forked from repo-mirrors/dbt-core
Compare commits
887 Commits
jerco/sql-
...
arky/add-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b29709b4d7 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 | ||
|
|
cbfc6a8baf | ||
|
|
9765596247 | ||
|
|
1b1a291fae | ||
|
|
867534c1f4 | ||
|
|
6d8b6459eb | ||
|
|
203bd8defd | ||
|
|
949680a5ce | ||
|
|
015c490b63 | ||
|
|
95a916936e | ||
|
|
961d69d8c2 | ||
|
|
be4d0a5b88 | ||
|
|
5310d3715c | ||
|
|
6bdf983e0b | ||
|
|
6604b9ca31 | ||
|
|
305241fe86 | ||
|
|
2d686b73fd | ||
|
|
30def98ed9 | ||
|
|
b78d23f68d | ||
|
|
4ffd633e40 | ||
|
|
07c3dcd21c | ||
|
|
fd233eac62 | ||
|
|
d8f38ca48b | ||
|
|
7740bd6b45 | ||
|
|
a57fdf008e | ||
|
|
a8e3afe8af | ||
|
|
44572e72f0 | ||
|
|
54b1e5699c | ||
|
|
ee7bc24903 | ||
|
|
15ef88d2ed | ||
|
|
7c56d72b46 | ||
|
|
5d28e4744e | ||
|
|
746ca7d149 | ||
|
|
a58b5ee8fb | ||
|
|
7fbfd53c3e | ||
|
|
4c44c29ee4 | ||
|
|
8ee0fe0a64 | ||
|
|
307a618ea8 | ||
|
|
ce07ce58e1 | ||
|
|
7ea51df6ae | ||
|
|
fe463c79fe | ||
|
|
d7d6843c5f | ||
|
|
adcf8bcbb3 | ||
|
|
5d937802f1 | ||
|
|
8c201e88a7 | ||
|
|
b8bc264731 | ||
|
|
9c6fbff0c3 | ||
|
|
5c7aa7f9ce | ||
|
|
1af94dedad | ||
|
|
2e7c968419 | ||
|
|
05b0820a9e | ||
|
|
d4e620eb50 | ||
|
|
0f52505dbe | ||
|
|
cb754fd97b | ||
|
|
e01d4c0a6e | ||
|
|
7a6bedaae3 | ||
|
|
22145e7e5f | ||
|
|
b3ac41ff9a | ||
|
|
036b95e5b2 | ||
|
|
2ce0c5ccf5 | ||
|
|
7156cc5c1d | ||
|
|
fcd30b1de2 | ||
|
|
a84fa50166 | ||
|
|
6a1e3a6db8 | ||
|
|
b37e5b5198 | ||
|
|
f9d4e9e03d | ||
|
|
9c97d30702 | ||
|
|
9836f7bdef | ||
|
|
b07ff7aebd | ||
|
|
aecbb4564c | ||
|
|
779663b39c | ||
|
|
7934af2974 | ||
|
|
533988233e | ||
|
|
8bc0e77a1d | ||
|
|
1c93c9bb58 | ||
|
|
6d7b32977c | ||
|
|
bf15466bec | ||
|
|
fb1ebe48f0 | ||
|
|
de65697ff9 | ||
|
|
ecf90d689e | ||
|
|
4cdeff11cd | ||
|
|
9ff2f6e430 | ||
|
|
73a0dc6d14 | ||
|
|
0a1c73e0fd | ||
|
|
8653ffc5a4 | ||
|
|
86583a350f | ||
|
|
fafab5d557 | ||
|
|
39e0c22353 | ||
|
|
f767943fb2 | ||
|
|
ae97831ebf | ||
|
|
f16bae0ab9 | ||
|
|
b947b2bc7e | ||
|
|
7068688181 | ||
|
|
38c0600982 | ||
|
|
83d163add5 | ||
|
|
d46e8855ef | ||
|
|
60524c0f8e | ||
|
|
ca73a2aa15 | ||
|
|
4a833a4272 | ||
|
|
f9abeca231 | ||
|
|
5f9e527768 | ||
|
|
6f51de4cb5 | ||
|
|
cb64682d33 | ||
|
|
98d1a94b60 | ||
|
|
a89da7ca88 | ||
|
|
2d237828ae | ||
|
|
f4253da72a | ||
|
|
919822e583 | ||
|
|
444c787729 | ||
|
|
3b63dd9f11 | ||
|
|
84166bf457 | ||
|
|
dd445e1fde | ||
|
|
6a22ec1b2e | ||
|
|
587bbcbf0d | ||
|
|
8e1c4ec116 | ||
|
|
dc35f56baa | ||
|
|
60d116b5b5 | ||
|
|
4dbc4a41c4 | ||
|
|
89541faec9 | ||
|
|
79bd98560b | ||
|
|
7917bd5033 | ||
|
|
05b0ebb184 | ||
|
|
e1d7a53325 | ||
|
|
7a06d354aa | ||
|
|
9dd5ab90bf | ||
|
|
45d614533f | ||
|
|
00a531d9d6 | ||
|
|
fd301a38db | ||
|
|
9c7e01dbca | ||
|
|
1ac6df0996 | ||
|
|
38ca4fce25 | ||
|
|
7e3a6eec96 | ||
|
|
ac16a55c64 | ||
|
|
620ca40b85 | ||
|
|
aa11cf2956 | ||
|
|
feb06e2107 | ||
|
|
a3d40e0abf | ||
|
|
7c1bd91d0a | ||
|
|
70a132d059 | ||
|
|
1fdebc660b | ||
|
|
0516192d69 | ||
|
|
f99be58217 | ||
|
|
3b6222e516 | ||
|
|
b88e60f8dd | ||
|
|
9373c4d1e4 | ||
|
|
0fe3ee8eca | ||
|
|
0d71a32aa2 | ||
|
|
0f223663bb | ||
|
|
c25d0c9f9c | ||
|
|
4a4b7beeb9 | ||
|
|
265e09dc93 | ||
|
|
87ea28fe84 | ||
|
|
af0f786f2e | ||
|
|
50528a009d | ||
|
|
f6e5582370 | ||
|
|
dea3181d96 | ||
|
|
5f7ae2fd4c | ||
|
|
4f249b8652 | ||
|
|
df23f68dd4 | ||
|
|
4b091cee9e | ||
|
|
dcb5acdf29 | ||
|
|
7fbeced315 | ||
|
|
47e7b1cc80 | ||
|
|
8f998c218e | ||
|
|
41c0797d7a | ||
|
|
3f2cba0dec | ||
|
|
b60c67d107 | ||
|
|
630cd3aba0 | ||
|
|
05595f5920 | ||
|
|
29f2cfc48d | ||
|
|
43d949c5cc | ||
|
|
58312f1816 | ||
|
|
dffbb6a659 | ||
|
|
272beb21a9 | ||
|
|
d34c511fa5 | ||
|
|
2945619eb8 | ||
|
|
078a83679a | ||
|
|
881437e890 | ||
|
|
40aca4bc17 | ||
|
|
0de046dfbe | ||
|
|
5a7b73be26 | ||
|
|
35f8ceb7f1 | ||
|
|
19d6dab973 | ||
|
|
810ef7f556 | ||
|
|
fd7306643f | ||
|
|
f1dddaa6e9 | ||
|
|
a7eb89d645 | ||
|
|
c56a9b2b7f | ||
|
|
17a8f462dd | ||
|
|
e3498bdaa5 | ||
|
|
d2f963e20e | ||
|
|
d53bb37186 | ||
|
|
9874f9e004 | ||
|
|
2739d5f4c4 | ||
|
|
d07603b288 | ||
|
|
723ac9493d | ||
|
|
de75777ede | ||
|
|
75703c10ee | ||
|
|
1722079a43 | ||
|
|
f5aea191d1 | ||
|
|
b2418b0634 | ||
|
|
aac034d9ba | ||
|
|
ada8860e48 | ||
|
|
a87275a4ca | ||
|
|
0891aef8d7 | ||
|
|
add924221a | ||
|
|
ba40d07ea3 | ||
|
|
57e9096816 | ||
|
|
6fedfe0ece | ||
|
|
121fa5793f | ||
|
|
a88f640395 | ||
|
|
74419b0e86 | ||
|
|
2ddf296a8e | ||
|
|
6b42a712a8 | ||
|
|
c3230d3374 | ||
|
|
602535fe71 | ||
|
|
f9b28bcaed | ||
|
|
922c75344b | ||
|
|
2caf87c247 | ||
|
|
f2a3535c3f | ||
|
|
a500e60b7f | ||
|
|
c7ebc8935f | ||
|
|
56f8f8a329 | ||
|
|
828d723512 | ||
|
|
b450a5754e | ||
|
|
2971b9a027 | ||
|
|
3c54959829 | ||
|
|
87e25e8692 | ||
|
|
6ac5c90a0b | ||
|
|
a58fb24e2b | ||
|
|
9ce593c47f | ||
|
|
c9d4051136 | ||
|
|
26f3518cea | ||
|
|
49eed67ab0 | ||
|
|
7a4d3bd2dc | ||
|
|
2afb4ccd68 | ||
|
|
f38d5ad8e2 | ||
|
|
7e1f04c667 | ||
|
|
ef2ba39dcf | ||
|
|
7045e11aa0 | ||
|
|
a9016c37f5 | ||
|
|
fe62ab8ec5 | ||
|
|
893daedc42 | ||
|
|
44be13b006 | ||
|
|
a5131ecc7d | ||
|
|
ce5d02569f | ||
|
|
4fc7456000 | ||
|
|
28e3412556 | ||
|
|
86fe510bcf | ||
|
|
eaedbd3187 | ||
|
|
b31fcc4edf | ||
|
|
edb5634b9a | ||
|
|
ad21458e10 | ||
|
|
622bc43ced | ||
|
|
e5d99da0bc | ||
|
|
618499b379 | ||
|
|
bca361acf9 | ||
|
|
567e2ca2be | ||
|
|
474143466f | ||
|
|
050161c78f | ||
|
|
ab496af1f0 | ||
|
|
c3c2b27e97 | ||
|
|
5789d717ba | ||
|
|
14e2c3ec21 | ||
|
|
b718c537a7 | ||
|
|
6992151081 | ||
|
|
bf5ed39db3 | ||
|
|
f573870232 | ||
|
|
da4a90aa11 | ||
|
|
2cfc386773 | ||
|
|
ae485f996a | ||
|
|
73ff497200 | ||
|
|
9a7305d43f | ||
|
|
ca23148908 | ||
|
|
8225a009b5 | ||
|
|
9605b76178 | ||
|
|
137dd9aa1b | ||
|
|
a203fe866a | ||
|
|
4186f99b74 | ||
|
|
6db899eddd | ||
|
|
8ea20b4ba2 | ||
|
|
3f76f82c88 | ||
|
|
6cbf66db58 | ||
|
|
8cd11b380f | ||
|
|
814eb65d59 | ||
|
|
f24452a3ab | ||
|
|
30503697f2 | ||
|
|
90902689c3 | ||
|
|
5a0e776cff | ||
|
|
9368e7a6a1 | ||
|
|
c02ddf8c0e | ||
|
|
64b8a12a42 | ||
|
|
e895fe9e4b | ||
|
|
8d987521dd | ||
|
|
4aafc5ef4a | ||
|
|
24ca76ea58 | ||
|
|
b681908ee2 | ||
|
|
72076b3fe5 | ||
|
|
0683c59dcd | ||
|
|
8019498f09 | ||
|
|
6234aec7d2 | ||
|
|
edd8059eb3 | ||
|
|
e3be347768 | ||
|
|
597acf1fa1 | ||
|
|
effa1a0813 | ||
|
|
726800be57 | ||
|
|
8b79747908 | ||
|
|
ec5d31de0e | ||
|
|
5d61ebbfdb | ||
|
|
0ef9931d19 | ||
|
|
a2213abbc0 | ||
|
|
915585c36e | ||
|
|
5ddd40885e | ||
|
|
58d1bccd26 | ||
|
|
70c26f5c74 | ||
|
|
ac962a4a31 | ||
|
|
bb2d062cc5 | ||
|
|
7667784985 | ||
|
|
05ecfbcc3a | ||
|
|
e06ae97068 | ||
|
|
ed50877c4f | ||
|
|
6b5e38ee28 | ||
|
|
63a1bf9adb | ||
|
|
2c7238fbb4 | ||
|
|
b1d597109f | ||
|
|
7617eece3a | ||
|
|
8ce92b56d7 | ||
|
|
21fae1c4a4 | ||
|
|
c952d44ec5 | ||
|
|
971b38c26b | ||
|
|
b7884facbf | ||
|
|
57ce461067 | ||
|
|
b1b830643e | ||
|
|
3cee9d16fa | ||
|
|
c647706ac2 | ||
|
|
7b33ffb1bd | ||
|
|
f38cbc4feb | ||
|
|
480e0e55c5 | ||
|
|
e5c468bb93 | ||
|
|
605c72e86e | ||
|
|
aad46ac5a8 | ||
|
|
d85618ef26 | ||
|
|
1250f23c44 | ||
|
|
daea7d59a7 | ||
|
|
4575757c2a | ||
|
|
d7a2f77705 | ||
|
|
4a4b89606b | ||
|
|
1ebe2e7118 | ||
|
|
f1087e57bf | ||
|
|
250537ba58 | ||
|
|
ccc7222868 | ||
|
|
311a57a21e | ||
|
|
b7c45de6b1 | ||
|
|
c53c3cf181 | ||
|
|
a77d325c8a | ||
|
|
dd41384d82 | ||
|
|
aa55fb2d30 | ||
|
|
864f4efb8b | ||
|
|
83c5a8c24b | ||
|
|
57aef33fb3 | ||
|
|
6d78e5e640 | ||
|
|
f54a876f65 | ||
|
|
8bbae7926b | ||
|
|
db2b12021e | ||
|
|
8b2c9bf39d | ||
|
|
298bf8a1d4 | ||
|
|
77748571b4 | ||
|
|
8ce4c289c5 | ||
|
|
abbece8876 | ||
|
|
3ad40372e6 | ||
|
|
c6d0e7c926 | ||
|
|
bc015843d4 | ||
|
|
df64511feb | ||
|
|
db0981afe7 | ||
|
|
dcf6544f93 | ||
|
|
c2c8959fee | ||
|
|
ccb4fa26cd | ||
|
|
d0b5d752df | ||
|
|
4c63b630de | ||
|
|
9c0b62b4f5 | ||
|
|
e08eede5e2 | ||
|
|
05e53d4143 | ||
|
|
b2ea2b8b25 | ||
|
|
2245d8d710 | ||
|
|
d9424cc710 | ||
|
|
0503c141b7 | ||
|
|
1a6e4a00c7 | ||
|
|
42b7caae19 | ||
|
|
622e5fd71d | ||
|
|
59d773ea7e | ||
|
|
84bf5b4620 | ||
|
|
726c4d6c58 | ||
|
|
acc88d47a3 | ||
|
|
0a74594d09 | ||
|
|
d2f3cdd6de | ||
|
|
92d1ef8482 | ||
|
|
a8abc49632 | ||
|
|
d6ac340df0 | ||
|
|
c653330911 | ||
|
|
82d9b2fa87 | ||
|
|
3f96fad4f9 | ||
|
|
c2c4757a2b | ||
|
|
08b2d94ccd | ||
|
|
7fa61f0816 | ||
|
|
c65ba11ae6 | ||
|
|
b0651b13b5 | ||
|
|
a34521ec07 | ||
|
|
da47b90503 | ||
|
|
d27016a4e7 | ||
|
|
db99e2f68d | ||
|
|
cbb9117ab9 | ||
|
|
e2ccf011d9 | ||
|
|
17014bfad3 | ||
|
|
92b7166c10 | ||
|
|
7b464b8a49 | ||
|
|
5c765bf3e2 | ||
|
|
93619a9a37 | ||
|
|
a181cee6ae | ||
|
|
a0ade13f5a | ||
|
|
9823a56e1d | ||
|
|
3aeab73740 | ||
|
|
9801eebc58 | ||
|
|
11c622230c | ||
|
|
f0349488ed | ||
|
|
c85be323f5 | ||
|
|
6954c4df1b | ||
|
|
30a1595f72 | ||
|
|
f841a7ca76 | ||
|
|
07a004b301 | ||
|
|
b05582de39 | ||
|
|
fa7c4d19f0 | ||
|
|
1913eac5ed | ||
|
|
066346faa2 | ||
|
|
0a03355ceb | ||
|
|
43e24c5ae6 | ||
|
|
89d111a5f6 | ||
|
|
e1b5e68904 | ||
|
|
065ab2ebc2 | ||
|
|
20c95a4993 | ||
|
|
c40b488cb4 | ||
|
|
585e7c59e8 | ||
|
|
7077c47551 | ||
|
|
f789b2535a | ||
|
|
2bfc6917e2 | ||
|
|
d74ae19523 | ||
|
|
1c7c23ac73 | ||
|
|
86e8722cd8 | ||
|
|
53127daad8 | ||
|
|
91b20b7482 | ||
|
|
7a61602738 | ||
|
|
dd4b47d8b1 | ||
|
|
eb200b4687 | ||
|
|
0fc080d222 | ||
|
|
5da63602b3 | ||
|
|
457ff3ef48 | ||
|
|
0dbdecef10 | ||
|
|
b13b0e9492 | ||
|
|
b9fdfd9e36 | ||
|
|
4d6352db14 | ||
|
|
9eb82c6497 | ||
|
|
89cc89dfdf | ||
|
|
2b0f6597a4 | ||
|
|
294def205f | ||
|
|
34fa703466 | ||
|
|
ab3f8dcbfd | ||
|
|
02c20477b9 | ||
|
|
d9a4ee126a | ||
|
|
5b31cc4266 | ||
|
|
9bb1250869 | ||
|
|
94d6d19fb4 | ||
|
|
d43c070007 | ||
|
|
9ef236601b | ||
|
|
9d6f961d2b | ||
|
|
5453840950 | ||
|
|
d453964546 | ||
|
|
748a932811 | ||
|
|
8217ad4722 | ||
|
|
cc5a38ec5a | ||
|
|
6ef3fbbf76 | ||
|
|
76fd12c7cd | ||
|
|
9ecb6e50e4 | ||
|
|
ce9d0afb8a | ||
|
|
c39ea807e8 | ||
|
|
1e35339389 | ||
|
|
304797b099 | ||
|
|
b9bdb775ab | ||
|
|
b0909b8f5d | ||
|
|
5d278dacf1 | ||
|
|
df93858b4b | ||
|
|
e8da84fb9e | ||
|
|
7e90e067af | ||
|
|
5e4e917de5 | ||
|
|
05dc0212e7 | ||
|
|
c00052cbfb | ||
|
|
3d54a83822 | ||
|
|
fafd5edbda | ||
|
|
8478262580 | ||
|
|
83b1fee062 | ||
|
|
0fbbc896b2 | ||
|
|
ce1aaec31d | ||
|
|
0544b08543 | ||
|
|
1809852a0d | ||
|
|
88d2ee4813 | ||
|
|
bef6edb942 | ||
|
|
99f27de934 | ||
|
|
9c91f3a7bd | ||
|
|
1b6fed2ffd | ||
|
|
0721f2c1b7 | ||
|
|
b9a35da118 | ||
|
|
60f80056b1 | ||
|
|
540c3b79aa | ||
|
|
77be2e4fdf | ||
|
|
16f529e1d4 | ||
|
|
ebfcf2a9ef | ||
|
|
67a8138b65 | ||
|
|
e91863de59 | ||
|
|
85d0b5afc7 | ||
|
|
1fbcaa4484 | ||
|
|
481235a943 | ||
|
|
2289e45571 | ||
|
|
b5d303f12a | ||
|
|
c3be975783 | ||
|
|
47c2edb42a | ||
|
|
b3440417ad | ||
|
|
020f639c7a | ||
|
|
55db15aba8 | ||
|
|
bce0e7c096 | ||
|
|
44b457c191 | ||
|
|
7d7066466d | ||
|
|
517576c088 | ||
|
|
987764858b | ||
|
|
a235abd176 | ||
|
|
9297e4d55c | ||
|
|
a0ec0b6f9d | ||
|
|
eae98677b9 | ||
|
|
66ac107409 | ||
|
|
39c5c42215 | ||
|
|
9f280a8469 | ||
|
|
73116fb816 | ||
|
|
f02243506d | ||
|
|
d5e9ce1797 | ||
|
|
4e786184d2 | ||
|
|
930bd3541e | ||
|
|
6c76137da4 | ||
|
|
68d06d8a9c | ||
|
|
d0543c9242 | ||
|
|
cfad27f963 | ||
|
|
c3ccbe3357 | ||
|
|
8e28f5906e | ||
|
|
d23285b4ba | ||
|
|
a42748433d | ||
|
|
be4a91a0fe | ||
|
|
8145eed603 | ||
|
|
fc00239f36 | ||
|
|
77dfec7214 | ||
|
|
7b73264ec8 | ||
|
|
1916784287 | ||
|
|
1ec54abdc4 | ||
|
|
5efc4aa066 | ||
|
|
c2856017a1 | ||
|
|
847c0b9644 | ||
|
|
17b82661d2 | ||
|
|
6c8609499a | ||
|
|
53ae325576 | ||
|
|
a7670a3ab9 | ||
|
|
ff2f1f42c3 | ||
|
|
35f7975d8f | ||
|
|
a9c8bc0e0a | ||
|
|
73aebd8159 | ||
|
|
9b84b6e2e8 | ||
|
|
095997913e | ||
|
|
6de1d29cf9 | ||
|
|
87db12d05b | ||
|
|
dcc70f314f | ||
|
|
dcd6ef733b | ||
|
|
85e415f50f | ||
|
|
2c684247e9 | ||
|
|
3d09531cda | ||
|
|
fc1227e0b1 | ||
|
|
dc96352493 | ||
|
|
725cf81af6 | ||
|
|
558468e854 | ||
|
|
95ad1ca4f8 | ||
|
|
02a69c8f4f | ||
|
|
7dbdfc88e0 | ||
|
|
2002791ec1 | ||
|
|
29d96bd6bf | ||
|
|
d01245133a | ||
|
|
23c8ac230c | ||
|
|
43d9ee3470 | ||
|
|
50fe25d230 | ||
|
|
a79960fa64 | ||
|
|
fa4f9d3d97 | ||
|
|
73385720b4 | ||
|
|
c2ab2971b0 | ||
|
|
0e60fc1078 | ||
|
|
4f2fef1ece | ||
|
|
3562637984 | ||
|
|
17aca39e1c | ||
|
|
59744f18bb | ||
|
|
f1326f526c | ||
|
|
834ac716fd | ||
|
|
0487b96098 | ||
|
|
dbd36f06e4 | ||
|
|
38ada8a68e | ||
|
|
e58edaab2d | ||
|
|
c202e005cd | ||
|
|
8129862b3c | ||
|
|
4e8aa007cf | ||
|
|
fe88bfabbf | ||
|
|
5328a64df2 | ||
|
|
87c9974be1 | ||
|
|
f3f509da92 | ||
|
|
5e8dcec2c5 | ||
|
|
56783446db | ||
|
|
207cc0383d | ||
|
|
49ecd6a6a4 | ||
|
|
c109f39d82 | ||
|
|
fd778dceb5 | ||
|
|
e402241e0e | ||
|
|
a6c37c948d | ||
|
|
fd886cb7dd | ||
|
|
b089a471b7 | ||
|
|
ae294b643b | ||
|
|
0bd6df0d1b | ||
|
|
7b1d61c956 | ||
|
|
646a0c704f | ||
|
|
bbf4fc30a5 | ||
|
|
6baaa2bcb0 | ||
|
|
13a595722a | ||
|
|
3680b6ad0e | ||
|
|
4c29d48d1c | ||
|
|
e00eb9aa3a | ||
|
|
f5a94fc774 | ||
|
|
b98af4ce17 | ||
|
|
b0f8d3d2f1 | ||
|
|
6c4577f44e | ||
|
|
89ee5962f5 | ||
|
|
a096202b28 | ||
|
|
7da7c2d692 | ||
|
|
1db48b3cca | ||
|
|
567847a5b0 | ||
|
|
9894c04d38 | ||
|
|
b26280d1cf | ||
|
|
cfece2cf51 | ||
|
|
79da002c3c | ||
|
|
e3f827513f | ||
|
|
10b2a7e7ff | ||
|
|
82c8d6a7a8 | ||
|
|
c994717cbc | ||
|
|
e3452b9a8f | ||
|
|
e95e36d63b | ||
|
|
74f7416144 | ||
|
|
1feeb804f4 | ||
|
|
0f6e4f0e32 | ||
|
|
2b44c2b456 | ||
|
|
2bb31ade39 | ||
|
|
0ce12405c0 | ||
|
|
b8c13e05db | ||
|
|
64268d2f9b | ||
|
|
8c8be68701 | ||
|
|
1df713fee9 | ||
|
|
758afd4071 | ||
|
|
0f9200d356 | ||
|
|
5f59ff1254 | ||
|
|
49e7bdbef9 | ||
|
|
5466fa5575 | ||
|
|
f8f21ee707 | ||
|
|
436737dde5 | ||
|
|
7f8d9a7af9 | ||
|
|
d80de82316 | ||
|
|
0d02446e07 | ||
|
|
a9e71b3907 | ||
|
|
739fb98d0e | ||
|
|
348769fa80 | ||
|
|
7efb6ab62d | ||
|
|
a3b018fd3b | ||
|
|
4d6208be64 | ||
|
|
3aab9befcf | ||
|
|
e5ac9df069 | ||
|
|
34960d8d61 | ||
|
|
94a7cfa58d | ||
|
|
eb72dbf32a | ||
|
|
9eb411f7b7 | ||
|
|
32415e3659 | ||
|
|
7886924c07 | ||
|
|
40b55ed65a | ||
|
|
4f5b9e686c | ||
|
|
95284aff68 | ||
|
|
063ff9c254 | ||
|
|
26b33e668d | ||
|
|
26ac9d57d0 | ||
|
|
7bd861a351 | ||
|
|
15c97f009a | ||
|
|
5153023100 | ||
|
|
c879083bc9 | ||
|
|
05bf27c958 | ||
|
|
a7ff003d4f | ||
|
|
2547e4f55e | ||
|
|
b43fc76701 | ||
|
|
48464a22a4 | ||
|
|
c3891d78e4 | ||
|
|
69ce6779e1 | ||
|
|
a206cfce65 | ||
|
|
3f54f30349 | ||
|
|
1071a4681d | ||
|
|
2548ba9936 | ||
|
|
999ed0b74c | ||
|
|
eef7bca005 | ||
|
|
5686cab5a0 | ||
|
|
99bc292588 | ||
|
|
a1ee348a6f | ||
|
|
2048a1af6f | ||
|
|
71223dc253 | ||
|
|
e03d35a9fc | ||
|
|
f988f76fcc | ||
|
|
0cacfd0f88 | ||
|
|
c25260e5dd | ||
|
|
c521fa6b74 | ||
|
|
f304b4b2da | ||
|
|
064d890172 | ||
|
|
febbd978b5 | ||
|
|
0d7e87fac6 | ||
|
|
3500528506 | ||
|
|
c42221fcf3 | ||
|
|
f49f28c331 | ||
|
|
dc964c43d9 | ||
|
|
60e491b3c1 | ||
|
|
3bfce2bac9 | ||
|
|
d63ad4cd82 | ||
|
|
d5608dca32 | ||
|
|
e7031f2d74 | ||
|
|
68a2996788 | ||
|
|
f5f0a7f908 | ||
|
|
1cfc0851ca | ||
|
|
d257d0b44c | ||
|
|
f8d347e5f8 | ||
|
|
a02db03f45 | ||
|
|
6e8388c653 | ||
|
|
6572b7e0a5 | ||
|
|
26bb5c3484 | ||
|
|
83f4992073 | ||
|
|
8392023e9f | ||
|
|
309efaa141 | ||
|
|
a5993fc866 | ||
|
|
5b1bc72ae1 | ||
|
|
72b6a80b07 | ||
|
|
e48f7ab32e | ||
|
|
16dc2be556 | ||
|
|
eea872c319 | ||
|
|
189c06dbb1 | ||
|
|
74662d1527 | ||
|
|
75f3e8cb74 | ||
|
|
aeee1c23a6 | ||
|
|
e50678c914 | ||
|
|
ae62f5708c | ||
|
|
cda88d1948 | ||
|
|
e7218d3e99 | ||
|
|
2c42fb436c | ||
|
|
a9e1a0e00a | ||
|
|
0d8e061a3d | ||
|
|
7532420eef | ||
|
|
03b17ff401 | ||
|
|
fc1fc2d5e9 | ||
|
|
7e43f36bb1 | ||
|
|
72c17c4464 | ||
|
|
3996a69861 | ||
|
|
aa8115aa5e | ||
|
|
ab0c3510eb | ||
|
|
4480d05cfb | ||
|
|
788694ec5b | ||
|
|
fb5bb7fff3 | ||
|
|
c270a77552 | ||
|
|
a2e040f389 | ||
|
|
a4376b96d8 | ||
|
|
ed5df342ca | ||
|
|
96f063e077 | ||
|
|
ee8f81de6a | ||
|
|
935edc70aa | ||
|
|
28c44a9be7 | ||
|
|
a2b3602485 | ||
|
|
3733817488 | ||
|
|
c5fb6c275a | ||
|
|
f633e9936f | ||
|
|
4e57c51c7a | ||
|
|
6267572ba7 | ||
|
|
32e1924c3b | ||
|
|
55af3c78d7 | ||
|
|
bdff19d909 | ||
|
|
f87c7819fb | ||
|
|
33694f3772 | ||
|
|
ebfc18408b | ||
|
|
6958f4f12e | ||
|
|
1f898c859a | ||
|
|
ce0bcc08a6 | ||
|
|
d1ae9dd37f | ||
|
|
31a3f2bdee | ||
|
|
1390715590 | ||
|
|
d09459c980 | ||
|
|
979e1c74bf | ||
|
|
7d0fccd63f | ||
|
|
37b8b65aad | ||
|
|
0211668361 | ||
|
|
f8c8322bb4 | ||
|
|
14c2bd9959 | ||
|
|
8db6bac1db | ||
|
|
080dd41876 | ||
|
|
8e9702cec5 | ||
|
|
5ff81c244e | ||
|
|
cfe81e81fd | ||
|
|
365414b5fc | ||
|
|
ec46be7368 | ||
|
|
f23a403468 | ||
|
|
15ad34e415 | ||
|
|
bacc891703 | ||
|
|
a2e167761c | ||
|
|
cce8fda06c | ||
|
|
dd4ac1ba4a | ||
|
|
401ebc2768 | ||
|
|
83612a98b7 | ||
|
|
827eae2750 | ||
|
|
3a3bedcd8e | ||
|
|
c1dfb4e6e6 | ||
|
|
5852f17f0b | ||
|
|
a94156703d | ||
|
|
2b3fb7a5d0 | ||
|
|
5f2a43864f | ||
|
|
88fbc94db2 | ||
|
|
6c277b5fe1 | ||
|
|
40e64b238c | ||
|
|
581bf51574 | ||
|
|
899b0ef224 | ||
|
|
3ade206e86 | ||
|
|
58bd750007 | ||
|
|
0ec829a096 | ||
|
|
7f953a6d48 | ||
|
|
0b92f04683 | ||
|
|
3f37a43a8c | ||
|
|
204d53516a | ||
|
|
5071b00baa | ||
|
|
81118d904a | ||
|
|
69cdc4148e | ||
|
|
1c71bf414d | ||
|
|
7cf57ae72d | ||
|
|
1b6f95fef4 | ||
|
|
38940eeeea | ||
|
|
6c950bad7c | ||
|
|
5e681929ae | ||
|
|
ea5a9da71e | ||
|
|
9c5ee59e19 | ||
|
|
55b1d3a191 | ||
|
|
a968aa7725 | ||
|
|
5e0a765917 | ||
|
|
0aeb9976f4 | ||
|
|
30a7da8112 | ||
|
|
f6a9dae422 | ||
|
|
62a7163334 | ||
|
|
e2f0467f5d | ||
|
|
3e3ecb1c3f | ||
|
|
27511d807f | ||
|
|
15077d087c | ||
|
|
5b01cc0c22 | ||
|
|
d1bcff865d | ||
|
|
0fce63665c | ||
|
|
1183e85eb4 | ||
|
|
3b86243f04 | ||
|
|
c251dae75e | ||
|
|
ecfd77f1ca |
@@ -1,13 +1,19 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 1.0.1
|
current_version = 1.7.0a1
|
||||||
parse = (?P<major>\d+)
|
parse = (?P<major>[\d]+) # major version number
|
||||||
\.(?P<minor>\d+)
|
\.(?P<minor>[\d]+) # minor version number
|
||||||
\.(?P<patch>\d+)
|
\.(?P<patch>[\d]+) # patch version number
|
||||||
((?P<prekind>a|b|rc)
|
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
||||||
(?P<pre>\d+) # pre-release version num
|
(?P<prekind>a|b|rc) # pre-release type
|
||||||
|
(?P<num>[\d]+) # pre-release version number
|
||||||
)?
|
)?
|
||||||
|
( # optional nightly release indicator
|
||||||
|
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
||||||
|
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||||
serialize =
|
serialize =
|
||||||
{major}.{minor}.{patch}{prekind}{pre}
|
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||||
|
{major}.{minor}.{patch}.{nightly}
|
||||||
|
{major}.{minor}.{patch}{prekind}{num}
|
||||||
{major}.{minor}.{patch}
|
{major}.{minor}.{patch}
|
||||||
commit = False
|
commit = False
|
||||||
tag = False
|
tag = False
|
||||||
@@ -21,19 +27,21 @@ values =
|
|||||||
rc
|
rc
|
||||||
final
|
final
|
||||||
|
|
||||||
[bumpversion:part:pre]
|
[bumpversion:part:num]
|
||||||
first_value = 1
|
first_value = 1
|
||||||
|
|
||||||
[bumpversion:file:setup.py]
|
[bumpversion:part:nightly]
|
||||||
|
|
||||||
[bumpversion:file:core/setup.py]
|
[bumpversion:file:core/setup.py]
|
||||||
|
|
||||||
[bumpversion:file:core/dbt/version.py]
|
[bumpversion:file:core/dbt/version.py]
|
||||||
|
|
||||||
[bumpversion:file:core/scripts/create_adapter_plugins.py]
|
|
||||||
|
|
||||||
[bumpversion:file:plugins/postgres/setup.py]
|
[bumpversion:file:plugins/postgres/setup.py]
|
||||||
|
|
||||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||||
|
|
||||||
[bumpversion:file:docker/Dockerfile]
|
[bumpversion:file:docker/Dockerfile]
|
||||||
|
|
||||||
|
[bumpversion:file:tests/adapter/setup.py]
|
||||||
|
|
||||||
|
[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py]
|
||||||
|
|||||||
@@ -2,6 +2,13 @@
|
|||||||
|
|
||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
|
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||||
|
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||||
|
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||||
|
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||||
|
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
## dbt-core 1.1.0 (TBD)
|
|
||||||
|
|
||||||
### Features
|
|
||||||
- Added Support for Semantic Versioning ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
|
||||||
- New Dockerfile to support specific db adapters and platforms. See docker/README.md for details ([#4495](https://github.com/dbt-labs/dbt-core/issues/4495), [#4487](https://github.com/dbt-labs/dbt-core/pull/4487))
|
|
||||||
- Allow unique_key to take a list ([#2479](https://github.com/dbt-labs/dbt-core/issues/2479), [#4618](https://github.com/dbt-labs/dbt-core/pull/4618))
|
|
||||||
- Add `--quiet` global flag and `print` Jinja function ([#3451](https://github.com/dbt-labs/dbt-core/issues/3451), [#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- User wasn't asked for permission to overwite a profile entry when running init inside an existing project ([#4375](https://github.com/dbt-labs/dbt-core/issues/4375), [#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
|
||||||
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
|
||||||
- Allow override of string and numeric types for adapters. ([#4603](https://github.com/dbt-labs/dbt-core/issues/4603))
|
|
||||||
- A change in secret environment variables won't trigger a full reparse [#4650](https://github.com/dbt-labs/dbt-core/issues/4650) [4665](https://github.com/dbt-labs/dbt-core/pull/4665)
|
|
||||||
- Fix misspellings and typos in docstrings ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Testing cleanup ([#4496](https://github.com/dbt-labs/dbt-core/pull/4496), [#4509](https://github.com/dbt-labs/dbt-core/pull/4509))
|
|
||||||
- Clean up test deprecation warnings ([#3988](https://github.com/dbt-labs/dbt-core/issue/3988), [#4556](https://github.com/dbt-labs/dbt-core/pull/4556))
|
|
||||||
- Use mashumaro for serialization in event logging ([#4504](https://github.com/dbt-labs/dbt-core/issues/4504), [#4505](https://github.com/dbt-labs/dbt-core/pull/4505))
|
|
||||||
- Drop support for Python 3.7.0 + 3.7.1 ([#4584](https://github.com/dbt-labs/dbt-core/issues/4584), [#4585](https://github.com/dbt-labs/dbt-core/pull/4585), [#4643](https://github.com/dbt-labs/dbt-core/pull/4643))
|
|
||||||
- Re-format codebase (except tests) using pre-commit hooks ([#3195](https://github.com/dbt-labs/dbt-core/issues/3195), [#4697](https://github.com/dbt-labs/dbt-core/pull/4697))
|
|
||||||
- Add deps module README ([#4686](https://github.com/dbt-labs/dbt-core/pull/4686/))
|
|
||||||
- Initial conversion of tests to pytest ([#4690](https://github.com/dbt-labs/dbt-core/issues/4690), [#4691](https://github.com/dbt-labs/dbt-core/pull/4691))
|
|
||||||
- Fix errors in Windows for tests/functions ([#4781](https://github.com/dbt-labs/dbt-core/issues/4781), [#4767](https://github.com/dbt-labs/dbt-core/pull/4767))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@NiallRees](https://github.com/NiallRees) ([#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
|
||||||
- [@alswang18](https://github.com/alswang18) ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
|
||||||
- [@emartens](https://github.com/ehmartens) ([#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
|
||||||
- [@mdesmet](https://github.com/mdesmet) ([#4604](https://github.com/dbt-labs/dbt-core/pull/4604))
|
|
||||||
- [@kazanzhy](https://github.com/kazanzhy) ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
|
||||||
@@ -26,6 +26,12 @@ changie batch <version> --move-dir '<version>' --prerelease 'rc1'
|
|||||||
changie merge
|
changie merge
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Example
|
||||||
|
```
|
||||||
|
changie batch 1.0.5 --move-dir '1.0.5' --prerelease 'rc1'
|
||||||
|
changie merge
|
||||||
|
```
|
||||||
|
|
||||||
#### Final Release Workflow
|
#### Final Release Workflow
|
||||||
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
||||||
|
|
||||||
@@ -34,7 +40,14 @@ changie batch <version> --include '<version>' --remove-prereleases
|
|||||||
changie merge
|
changie merge
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Example
|
||||||
|
```
|
||||||
|
changie batch 1.0.5 --include '1.0.5' --remove-prereleases
|
||||||
|
changie merge
|
||||||
|
```
|
||||||
|
|
||||||
### A Note on Manual Edits & Gotchas
|
### A Note on Manual Edits & Gotchas
|
||||||
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
||||||
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
||||||
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
||||||
|
- changie can modify, create and delete files depending on the command you run. This is expected. Be sure to commit everything that has been modified and deleted.
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](CONTRIBUTING.md)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|||||||
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: "Dependencies"
|
||||||
|
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||||
|
time: 2023-06-21T00:57:52.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
PR: 7912
|
||||||
6
.changes/unreleased/Docs-20230715-200907.yaml
Normal file
6
.changes/unreleased/Docs-20230715-200907.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Corrected spelling of "Partiton"
|
||||||
|
time: 2023-07-15T20:09:07.057361092+02:00
|
||||||
|
custom:
|
||||||
|
Author: pgoslatara
|
||||||
|
Issue: "8100"
|
||||||
6
.changes/unreleased/Docs-20230718-192422.yaml
Normal file
6
.changes/unreleased/Docs-20230718-192422.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Remove static SQL codeblock for metrics
|
||||||
|
time: 2023-07-18T19:24:22.155323+02:00
|
||||||
|
custom:
|
||||||
|
Author: marcodamore
|
||||||
|
Issue: "436"
|
||||||
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fixed double-underline
|
||||||
|
time: 2023-06-25T14:27:31.231253719+08:00
|
||||||
|
custom:
|
||||||
|
Author: lllong33
|
||||||
|
Issue: "5301"
|
||||||
6
.changes/unreleased/Fixes-20230718-125518.yaml
Normal file
6
.changes/unreleased/Fixes-20230718-125518.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Enable converting deprecation warnings to errors
|
||||||
|
time: 2023-07-18T12:55:18.03914-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8130"
|
||||||
6
.changes/unreleased/Fixes-20230720-122723.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-122723.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Add status to Parse Inline Error
|
||||||
|
time: 2023-07-20T12:27:23.085084-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8173"
|
||||||
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||||
|
time: 2023-07-20T16:15:13.761813-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "7694"
|
||||||
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Stop detecting materialization macros based on macro name
|
||||||
|
time: 2023-07-20T17:01:12.496238-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "6231"
|
||||||
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||||
|
time: 2023-07-20T17:24:22.969951-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "6653"
|
||||||
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Improve handling of CTE injection with ephemeral models
|
||||||
|
time: 2023-07-26T10:44:48.888451-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "8213"
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Automate changelog generation with changie
|
|
||||||
time: 2022-02-18T16:13:19.882436-06:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "4652"
|
|
||||||
PR: "4743"
|
|
||||||
6
.changes/unreleased/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Refactor flaky test pp_versioned_models
|
||||||
|
time: 2023-07-19T12:46:11.972481-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "7781"
|
||||||
6
.changes/unreleased/Under the Hood-20230719-163334.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230719-163334.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: format exception from dbtPlugin.initialize
|
||||||
|
time: 2023-07-19T16:33:34.586377-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8152"
|
||||||
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: A way to control maxBytes for a single dbt.log file
|
||||||
|
time: 2023-07-24T15:06:54.263822-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8199"
|
||||||
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Ref expressions with version can now be processed by the latest version of the
|
||||||
|
high-performance dbt-extractor library.
|
||||||
|
time: 2023-07-25T10:26:09.902878-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "7688"
|
||||||
144
.changie.yaml
Executable file → Normal file
144
.changie.yaml
Executable file → Normal file
@@ -4,47 +4,137 @@ headerPath: header.tpl.md
|
|||||||
versionHeaderPath: ""
|
versionHeaderPath: ""
|
||||||
changelogPath: CHANGELOG.md
|
changelogPath: CHANGELOG.md
|
||||||
versionExt: md
|
versionExt: md
|
||||||
|
envPrefix: "CHANGIE_"
|
||||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||||
kindFormat: '### {{.Kind}}'
|
kindFormat: '### {{.Kind}}'
|
||||||
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
changeFormat: |-
|
||||||
|
{{- $IssueList := list }}
|
||||||
|
{{- $changes := splitList " " $.Custom.Issue }}
|
||||||
|
{{- range $issueNbr := $changes }}
|
||||||
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||||
|
{{- $IssueList = append $IssueList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
|
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||||
|
|
||||||
kinds:
|
kinds:
|
||||||
- label: Fixes
|
- label: Breaking Changes
|
||||||
- label: Features
|
- label: Features
|
||||||
- label: Under the Hood
|
- label: Fixes
|
||||||
- label: Breaking Changes
|
- label: Docs
|
||||||
- label: Docs
|
changeFormat: |-
|
||||||
- label: Dependencies
|
{{- $IssueList := list }}
|
||||||
|
{{- $changes := splitList " " $.Custom.Issue }}
|
||||||
|
{{- range $issueNbr := $changes }}
|
||||||
|
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||||
|
{{- $IssueList = append $IssueList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
|
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||||
|
- label: Under the Hood
|
||||||
|
- label: Dependencies
|
||||||
|
changeFormat: |-
|
||||||
|
{{- $PRList := list }}
|
||||||
|
{{- $changes := splitList " " $.Custom.PR }}
|
||||||
|
{{- range $pullrequest := $changes }}
|
||||||
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||||
|
{{- $PRList = append $PRList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
|
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||||
|
skipGlobalChoices: true
|
||||||
|
additionalChoices:
|
||||||
|
- key: Author
|
||||||
|
label: GitHub Username(s) (separated by a single space if multiple)
|
||||||
|
type: string
|
||||||
|
minLength: 3
|
||||||
|
- key: PR
|
||||||
|
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||||
|
type: string
|
||||||
|
minLength: 1
|
||||||
|
- label: Security
|
||||||
|
changeFormat: |-
|
||||||
|
{{- $PRList := list }}
|
||||||
|
{{- $changes := splitList " " $.Custom.PR }}
|
||||||
|
{{- range $pullrequest := $changes }}
|
||||||
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||||
|
{{- $PRList = append $PRList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
|
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||||
|
skipGlobalChoices: true
|
||||||
|
additionalChoices:
|
||||||
|
- key: Author
|
||||||
|
label: GitHub Username(s) (separated by a single space if multiple)
|
||||||
|
type: string
|
||||||
|
minLength: 3
|
||||||
|
- key: PR
|
||||||
|
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||||
|
type: string
|
||||||
|
minLength: 1
|
||||||
|
|
||||||
|
newlines:
|
||||||
|
afterChangelogHeader: 1
|
||||||
|
afterKind: 1
|
||||||
|
afterChangelogVersion: 1
|
||||||
|
beforeKind: 1
|
||||||
|
endOfVersion: 1
|
||||||
|
|
||||||
custom:
|
custom:
|
||||||
- key: Author
|
- key: Author
|
||||||
label: GitHub Name
|
label: GitHub Username(s) (separated by a single space if multiple)
|
||||||
type: string
|
type: string
|
||||||
minLength: 3
|
minLength: 3
|
||||||
- key: Issue
|
- key: Issue
|
||||||
label: GitHub Issue Number
|
label: GitHub Issue Number (separated by a single space if multiple)
|
||||||
type: int
|
type: string
|
||||||
minLength: 4
|
minLength: 1
|
||||||
- key: PR
|
|
||||||
label: GitHub Pull Request Number
|
|
||||||
type: int
|
|
||||||
minLength: 4
|
|
||||||
footerFormat: |
|
footerFormat: |
|
||||||
Contributors:
|
|
||||||
{{- $contributorDict := dict }}
|
{{- $contributorDict := dict }}
|
||||||
{{- $core_team := list "emmyoop" "nathaniel-may" "gshank" "leahwicz" "ChenyuLInx" "stu-k" "iknox-fa" "VersusFacit" "McKnight-42" "jtcohen6" }}
|
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
||||||
|
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
||||||
|
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
||||||
|
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
||||||
|
{{- range $team_member := $core_team }}
|
||||||
|
{{- $team_member_lower := lower $team_member }}
|
||||||
|
{{- $maintainers = append $maintainers $team_member_lower }}
|
||||||
|
{{- end }}
|
||||||
{{- range $change := .Changes }}
|
{{- range $change := .Changes }}
|
||||||
{{- $author := $change.Custom.Author }}
|
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||||
{{- if not (has $author $core_team)}}
|
{{- /* loop through all authors for a single changelog */}}
|
||||||
{{- $pr := $change.Custom.PR }}
|
{{- range $author := $authorList }}
|
||||||
{{- if hasKey $contributorDict $author }}
|
{{- $authorLower := lower $author }}
|
||||||
{{- $prList := get $contributorDict $author }}
|
{{- /* we only want to include non-core team contributors */}}
|
||||||
{{- $prList = append $prList $pr }}
|
{{- if not (has $authorLower $maintainers)}}
|
||||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||||
|
{{- $IssueList := list }}
|
||||||
|
{{- $changeLink := $change.Kind }}
|
||||||
|
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||||
|
{{- $changes := splitList " " $change.Custom.PR }}
|
||||||
|
{{- range $issueNbr := $changes }}
|
||||||
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||||
|
{{- $IssueList = append $IssueList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
{{- else }}
|
{{- else }}
|
||||||
{{- $prList := list $change.Custom.PR }}
|
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
{{- range $issueNbr := $changes }}
|
||||||
|
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||||
|
{{- $IssueList = append $IssueList $changeLink }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
{{- /* check if this contributor has other changes associated with them already */}}
|
||||||
|
{{- if hasKey $contributorDict $author }}
|
||||||
|
{{- $contributionList := get $contributorDict $author }}
|
||||||
|
{{- $contributionList = concat $contributionList $IssueList }}
|
||||||
|
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||||
|
{{- else }}
|
||||||
|
{{- $contributionList := $IssueList }}
|
||||||
|
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
{{- end}}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
||||||
|
{{- if $contributorDict}}
|
||||||
|
### Contributors
|
||||||
{{- range $k,$v := $contributorDict }}
|
{{- range $k,$v := $contributorDict }}
|
||||||
- [{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-core/pull/{{$element}}){{end}})
|
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||||
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|||||||
2
.flake8
2
.flake8
@@ -9,4 +9,4 @@ ignore =
|
|||||||
E203 # makes Flake8 work like black
|
E203 # makes Flake8 work like black
|
||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test
|
exclude = test/
|
||||||
|
|||||||
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Reformatting dbt-core via black, flake8, mypy, and assorted pre-commit hooks.
|
||||||
|
43e3fc22c4eae4d3d901faba05e33c40f1f1dc5a
|
||||||
6
.gitattributes
vendored
Normal file
6
.gitattributes
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
core/dbt/include/index.html binary
|
||||||
|
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||||
|
core/dbt/docs/build/html/searchindex.js binary
|
||||||
|
core/dbt/docs/build/html/index.html binary
|
||||||
|
performance/runner/Cargo.lock binary
|
||||||
|
core/dbt/events/types_pb2.py binary
|
||||||
53
.github/CODEOWNERS
vendored
53
.github/CODEOWNERS
vendored
@@ -11,33 +11,50 @@
|
|||||||
|
|
||||||
# As a default for areas with no assignment,
|
# As a default for areas with no assignment,
|
||||||
# the core team as a whole will be assigned
|
# the core team as a whole will be assigned
|
||||||
* @dbt-labs/core
|
* @dbt-labs/core-team
|
||||||
|
|
||||||
# Changes to GitHub configurations including Actions
|
### OSS Tooling Guild
|
||||||
/.github/ @leahwicz
|
|
||||||
|
|
||||||
# Language core modules
|
/.github/ @dbt-labs/guild-oss-tooling
|
||||||
/core/dbt/config/ @dbt-labs/core-language
|
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||||
/core/dbt/context/ @dbt-labs/core-language
|
|
||||||
/core/dbt/contracts/ @dbt-labs/core-language
|
|
||||||
/core/dbt/deps/ @dbt-labs/core-language
|
|
||||||
/core/dbt/parser/ @dbt-labs/core-language
|
|
||||||
|
|
||||||
# Execution core modules
|
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||||
/core/dbt/events/ @dbt-labs/core-execution @dbt-labs/core-language # eventually remove language but they have knowledge here now
|
|
||||||
/core/dbt/graph/ @dbt-labs/core-execution
|
|
||||||
/core/dbt/task/ @dbt-labs/core-execution
|
|
||||||
|
|
||||||
# Adapter interface, scaffold, Postgres plugin
|
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||||
|
pytest.ini @dbt-labs/guild-oss-tooling
|
||||||
|
tox.ini @dbt-labs/guild-oss-tooling
|
||||||
|
|
||||||
|
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||||
|
requirements.txt @dbt-labs/guild-oss-tooling
|
||||||
|
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||||
|
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||||
|
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||||
|
|
||||||
|
### ADAPTERS
|
||||||
|
|
||||||
|
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||||
/core/dbt/adapters @dbt-labs/core-adapters
|
/core/dbt/adapters @dbt-labs/core-adapters
|
||||||
/core/scripts/create_adapter_plugin.py @dbt-labs/core-adapters
|
|
||||||
/plugins/ @dbt-labs/core-adapters
|
|
||||||
|
|
||||||
# Global project: default macros, including generic tests + materializations
|
# Global project (default macros + materializations), starter project
|
||||||
/core/dbt/include/global_project @dbt-labs/core-execution @dbt-labs/core-adapters
|
/core/dbt/include @dbt-labs/core-adapters
|
||||||
|
|
||||||
|
# Postgres plugin
|
||||||
|
/plugins/ @dbt-labs/core-adapters
|
||||||
|
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||||
|
|
||||||
|
# Functional tests for adapter plugins
|
||||||
|
/tests/adapter @dbt-labs/core-adapters
|
||||||
|
|
||||||
|
### TESTS
|
||||||
|
|
||||||
|
# Overlapping ownership for vast majority of unit + functional tests
|
||||||
|
|
||||||
# Perf regression testing framework
|
# Perf regression testing framework
|
||||||
# This excludes the test project files itself since those aren't specific
|
# This excludes the test project files itself since those aren't specific
|
||||||
# framework changes (excluded by not setting an owner next to it- no owner)
|
# framework changes (excluded by not setting an owner next to it- no owner)
|
||||||
/performance @nathaniel-may
|
/performance @nathaniel-may
|
||||||
/performance/projects
|
/performance/projects
|
||||||
|
|
||||||
|
### ARTIFACTS
|
||||||
|
|
||||||
|
/schemas/dbt @dbt-labs/cloud-artifacts
|
||||||
|
|||||||
30
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
30
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -9,23 +9,33 @@ body:
|
|||||||
Thanks for taking the time to fill out this bug report!
|
Thanks for taking the time to fill out this bug report!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: Is there an existing issue for this?
|
label: Is this a new bug in dbt-core?
|
||||||
description: Please search to see if an issue already exists for the bug you encountered.
|
description: >
|
||||||
|
In other words, is this an error, flaw, failure or fault in our software?
|
||||||
|
|
||||||
|
If this is a bug that broke existing functionality that used to work, please open a regression issue.
|
||||||
|
If this is a bug in an adapter plugin, please open an issue in the adapter's repository.
|
||||||
|
If this is a bug experienced while using dbt Cloud, please report to [support](mailto:support@getdbt.com).
|
||||||
|
If this is a request for help or troubleshooting code in your own dbt project, please join our [dbt Community Slack](https://www.getdbt.com/community/join-the-community/) or open a [Discussion question](https://github.com/dbt-labs/docs.getdbt.com/discussions).
|
||||||
|
|
||||||
|
Please search to see if an issue already exists for the bug you encountered.
|
||||||
options:
|
options:
|
||||||
- label: I have searched the existing issues
|
- label: I believe this is a new bug in dbt-core
|
||||||
|
required: true
|
||||||
|
- label: I have searched the existing issues, and I could not find an existing issue for this bug
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Current Behavior
|
label: Current Behavior
|
||||||
description: A concise description of what you're experiencing.
|
description: A concise description of what you're experiencing.
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Expected Behavior
|
label: Expected Behavior
|
||||||
description: A concise description of what you expected to happen.
|
description: A concise description of what you expected to happen.
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Steps To Reproduce
|
label: Steps To Reproduce
|
||||||
@@ -36,7 +46,7 @@ body:
|
|||||||
3. Run '...'
|
3. Run '...'
|
||||||
4. See error...
|
4. See error...
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: logs
|
id: logs
|
||||||
attributes:
|
attributes:
|
||||||
@@ -52,8 +62,8 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
examples:
|
examples:
|
||||||
- **OS**: Ubuntu 20.04
|
- **OS**: Ubuntu 20.04
|
||||||
- **Python**: 3.7.2 (`python --version`)
|
- **Python**: 3.9.12 (`python3 --version`)
|
||||||
- **dbt**: 0.21.0 (`dbt --version`)
|
- **dbt-core**: 1.1.1 (`dbt --version`)
|
||||||
value: |
|
value: |
|
||||||
- OS:
|
- OS:
|
||||||
- Python:
|
- Python:
|
||||||
@@ -64,13 +74,15 @@ body:
|
|||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: database
|
id: database
|
||||||
attributes:
|
attributes:
|
||||||
label: What database are you using dbt with?
|
label: Which database adapter are you using with dbt?
|
||||||
|
description: If the bug is specific to the database or adapter, please open the issue in that adapter's repository instead
|
||||||
multiple: true
|
multiple: true
|
||||||
options:
|
options:
|
||||||
- postgres
|
- postgres
|
||||||
- redshift
|
- redshift
|
||||||
- snowflake
|
- snowflake
|
||||||
- bigquery
|
- bigquery
|
||||||
|
- spark
|
||||||
- other (mention it in "Additional Context")
|
- other (mention it in "Additional Context")
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
|
|||||||
19
.github/ISSUE_TEMPLATE/config.yml
vendored
19
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,4 +1,14 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
|
- name: Ask the community for help
|
||||||
|
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||||
|
about: Need help troubleshooting? Check out our guide on how to ask
|
||||||
|
- name: Contact dbt Cloud support
|
||||||
|
url: mailto:support@getdbt.com
|
||||||
|
about: Are you using dbt Cloud? Contact our support team for help!
|
||||||
|
- name: Participate in Discussions
|
||||||
|
url: https://github.com/dbt-labs/dbt-core/discussions
|
||||||
|
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
|
||||||
- name: Create an issue for dbt-redshift
|
- name: Create an issue for dbt-redshift
|
||||||
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
||||||
about: Report a bug or request a feature for dbt-redshift
|
about: Report a bug or request a feature for dbt-redshift
|
||||||
@@ -8,9 +18,6 @@ contact_links:
|
|||||||
- name: Create an issue for dbt-snowflake
|
- name: Create an issue for dbt-snowflake
|
||||||
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
||||||
about: Report a bug or request a feature for dbt-snowflake
|
about: Report a bug or request a feature for dbt-snowflake
|
||||||
- name: Ask a question or get support
|
- name: Create an issue for dbt-spark
|
||||||
url: https://docs.getdbt.com/docs/guides/getting-help
|
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
|
||||||
about: Ask a question or request support
|
about: Report a bug or request a feature for dbt-spark
|
||||||
- name: Questions on Stack Overflow
|
|
||||||
url: https://stackoverflow.com/questions/tagged/dbt
|
|
||||||
about: Look at questions/answers at Stack Overflow
|
|
||||||
|
|||||||
22
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
22
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,22 +1,32 @@
|
|||||||
name: ✨ Feature
|
name: ✨ Feature
|
||||||
description: Suggest an idea for dbt
|
description: Propose a straightforward extension of dbt functionality
|
||||||
title: "[Feature] <title>"
|
title: "[Feature] <title>"
|
||||||
labels: ["enhancement", "triage"]
|
labels: ["enhancement", "triage"]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Thanks for taking the time to fill out this feature requests!
|
Thanks for taking the time to fill out this feature request!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: Is there an existing feature request for this?
|
label: Is this your first time submitting a feature request?
|
||||||
description: Please search to see if an issue already exists for the feature you would like.
|
description: >
|
||||||
|
We want to make sure that features are distinct and discoverable,
|
||||||
|
so that other members of the community can find them and offer their thoughts.
|
||||||
|
|
||||||
|
Issues are the right place to request straightforward extensions of existing dbt functionality.
|
||||||
|
For "big ideas" about future capabilities of dbt, we ask that you open a
|
||||||
|
[discussion](https://github.com/dbt-labs/dbt-core/discussions) in the "Ideas" category instead.
|
||||||
options:
|
options:
|
||||||
- label: I have searched the existing issues
|
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
|
||||||
|
required: true
|
||||||
|
- label: I have searched the existing issues, and I could not find an existing issue for this feature
|
||||||
|
required: true
|
||||||
|
- label: I am requesting a straightforward extension of existing dbt functionality, rather than a Big Idea better suited to a discussion
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Describe the Feature
|
label: Describe the feature
|
||||||
description: A clear and concise description of what you want to happen.
|
description: A clear and concise description of what you want to happen.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: 🛠️ Implementation
|
||||||
|
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||||
|
title: "[<project>] <title>"
|
||||||
|
labels: ["user_docs"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Housekeeping
|
||||||
|
description: >
|
||||||
|
A couple friendly reminders:
|
||||||
|
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||||
|
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||||
|
options:
|
||||||
|
- label: I am a maintainer of dbt-core
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Short description
|
||||||
|
description: |
|
||||||
|
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Acceptance critera
|
||||||
|
description: |
|
||||||
|
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Context
|
||||||
|
description: |
|
||||||
|
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||||
|
validations:
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
93
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
Normal file
93
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
name: ☣️ Regression
|
||||||
|
description: Report a regression you've observed in a newer version of dbt
|
||||||
|
title: "[Regression] <title>"
|
||||||
|
labels: ["bug", "regression", "triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this regression report!
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Is this a regression in a recent version of dbt-core?
|
||||||
|
description: >
|
||||||
|
A regression is when documented functionality works as expected in an older version of dbt-core,
|
||||||
|
and no longer works after upgrading to a newer version of dbt-core
|
||||||
|
options:
|
||||||
|
- label: I believe this is a regression in dbt-core functionality
|
||||||
|
required: true
|
||||||
|
- label: I have searched the existing issues, and I could not find an existing issue for this regression
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Current Behavior
|
||||||
|
description: A concise description of what you're experiencing.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Expected/Previous Behavior
|
||||||
|
description: A concise description of what you expected to happen.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps To Reproduce
|
||||||
|
description: Steps to reproduce the behavior.
|
||||||
|
placeholder: |
|
||||||
|
1. In this environment...
|
||||||
|
2. With this config...
|
||||||
|
3. Run '...'
|
||||||
|
4. See error...
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: logs
|
||||||
|
attributes:
|
||||||
|
label: Relevant log output
|
||||||
|
description: |
|
||||||
|
If applicable, log output to help explain your problem.
|
||||||
|
render: shell
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Environment
|
||||||
|
description: |
|
||||||
|
examples:
|
||||||
|
- **OS**: Ubuntu 20.04
|
||||||
|
- **Python**: 3.9.12 (`python3 --version`)
|
||||||
|
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
|
||||||
|
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
|
||||||
|
value: |
|
||||||
|
- OS:
|
||||||
|
- Python:
|
||||||
|
- dbt (working version):
|
||||||
|
- dbt (regression version):
|
||||||
|
render: markdown
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: dropdown
|
||||||
|
id: database
|
||||||
|
attributes:
|
||||||
|
label: Which database adapter are you using with dbt?
|
||||||
|
description: If the regression is specific to the database or adapter, please open the issue in that adapter's repository instead
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- postgres
|
||||||
|
- redshift
|
||||||
|
- snowflake
|
||||||
|
- bigquery
|
||||||
|
- spark
|
||||||
|
- other (mention it in "Additional Context")
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Context
|
||||||
|
description: |
|
||||||
|
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||||
|
|
||||||
|
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
216
.github/_README.md
vendored
Normal file
216
.github/_README.md
vendored
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
<!-- GitHub will publish this readme on the main repo page if the name is `README.md` so we've added the leading underscore to prevent this -->
|
||||||
|
<!-- Do not rename this file `README.md` -->
|
||||||
|
<!-- See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-readmes -->
|
||||||
|
|
||||||
|
## What are GitHub Actions?
|
||||||
|
|
||||||
|
GitHub Actions are used for many different purposes. We use them to run tests in CI, validate PRs are in an expected state, and automate processes.
|
||||||
|
|
||||||
|
- [Overview of GitHub Actions](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions)
|
||||||
|
- [What's a workflow?](https://docs.github.com/en/actions/using-workflows/about-workflows)
|
||||||
|
- [GitHub Actions guides](https://docs.github.com/en/actions/guides)
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
## Where do actions and workflows live
|
||||||
|
|
||||||
|
We try to maintain actions that are shared across repositories in a single place so that necesary changes can be made in a single place.
|
||||||
|
|
||||||
|
[dbt-labs/actions](https://github.com/dbt-labs/actions/) is the central repository of actions and workflows we use across repositories.
|
||||||
|
|
||||||
|
GitHub Actions also live locally within a repository. The workflows can be found at `.github/workflows` from the root of the repository. These should be specific to that code base.
|
||||||
|
|
||||||
|
Note: We are actively moving actions into the central Action repository so there is currently some duplication across repositories.
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
## Basics of Using Actions
|
||||||
|
|
||||||
|
### Viewing Output
|
||||||
|
|
||||||
|
- View the detailed action output for your PR in the **Checks** tab of the PR. This only shows the most recent run. You can also view high level **Checks** output at the bottom on the PR.
|
||||||
|
|
||||||
|
- View _all_ action output for a repository from the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab. Workflow results last 1 year. Artifacts last 90 days, unless specified otherwise in individual workflows.
|
||||||
|
|
||||||
|
This view often shows what seem like duplicates of the same workflow. This occurs when files are renamed but the workflow name has not changed. These are in fact _not_ duplicates.
|
||||||
|
|
||||||
|
You can see the branch the workflow runs from in this view. It is listed in the table between the workflow name and the time/duration of the run. When blank, the workflow is running in the context of the `main` branch.
|
||||||
|
|
||||||
|
### How to view what workflow file is being referenced from a run
|
||||||
|
|
||||||
|
- When viewing the output of a specific workflow run, click the 3 dots at the top right of the display. There will be an option to `View workflow file`.
|
||||||
|
|
||||||
|
### How to manually run a workflow
|
||||||
|
|
||||||
|
- If a workflow has the `on: workflow_dispatch` trigger, it can be manually triggered
|
||||||
|
- From the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab, find the workflow you want to run, select it and fill in any inputs requied. That's it!
|
||||||
|
|
||||||
|
### How to re-run jobs
|
||||||
|
|
||||||
|
- Some actions cannot be rerun in the GitHub UI. Namely the snyk checks and the cla check. Snyk checks are rerun by closing and reopening the PR. You can retrigger the cla check by commenting on the PR with `@cla-bot check`
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
## General Standards
|
||||||
|
|
||||||
|
### Permissions
|
||||||
|
- By default, workflows have read permissions in the repository for the contents scope only when no permissions are explicitly set.
|
||||||
|
- It is best practice to always define the permissions explicitly. This will allow actions to continue to work when the default permissions on the repository are changed. It also allows explicit grants of the least permissions possible.
|
||||||
|
- There are a lot of permissions available. [Read up on them](https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs) if you're unsure what to use.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
```
|
||||||
|
|
||||||
|
### Secrets
|
||||||
|
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
||||||
|
|
||||||
|
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
||||||
|
|
||||||
|
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
||||||
|
|
||||||
|
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
||||||
|
|
||||||
|
### Triggers
|
||||||
|
You can configure your workflows to run when specific activity on GitHub happens, at a scheduled time, or when an event outside of GitHub occurs. Read more details in the [GitHub docs](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows).
|
||||||
|
|
||||||
|
These triggers are under the `on` key of the workflow and more than one can be listed.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "*.latest"
|
||||||
|
- "releases/*"
|
||||||
|
pull_request:
|
||||||
|
# catch when the PR is opened with the label or when the label is added
|
||||||
|
types: [opened, labeled]
|
||||||
|
workflow_dispatch:
|
||||||
|
```
|
||||||
|
|
||||||
|
Some triggers of note that we use:
|
||||||
|
|
||||||
|
- `push` - Runs your workflow when you push a commit or tag.
|
||||||
|
- `pull_request` - Runs your workflow when activity on a pull request in the workflow's repository occurs. Takes in a list of activity types (opened, labeled, etc) if appropriate.
|
||||||
|
- `pull_request_target` - Same as `pull_request` but runs in the context of the PR target branch.
|
||||||
|
- `workflow_call` - used with reusable workflows. Triggered by another workflow calling it.
|
||||||
|
- `workflow_dispatch` - Gives the ability to manually trigger a workflow from the GitHub API, GitHub CLI, or GitHub browser interface.
|
||||||
|
|
||||||
|
|
||||||
|
### Basic Formatting
|
||||||
|
- Add a description of what your workflow does at the top in this format
|
||||||
|
|
||||||
|
```
|
||||||
|
# **what?**
|
||||||
|
# Describe what the action does.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Why does this action exist?
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# How/when will it be triggered?
|
||||||
|
```
|
||||||
|
|
||||||
|
- Leave blank lines between steps and jobs
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
jobs:
|
||||||
|
dependency_changelog:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Get File Name Timestamp
|
||||||
|
id: filename_time
|
||||||
|
uses: nanzm/get-time-action@v1.1
|
||||||
|
with:
|
||||||
|
format: 'YYYYMMDD-HHmmss'
|
||||||
|
|
||||||
|
- name: Get File Content Timestamp
|
||||||
|
id: file_content_time
|
||||||
|
uses: nanzm/get-time-action@v1.1
|
||||||
|
with:
|
||||||
|
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
|
||||||
|
|
||||||
|
- name: Generate Filepath
|
||||||
|
id: fp
|
||||||
|
run: |
|
||||||
|
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||||
|
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
||||||
|
```
|
||||||
|
|
||||||
|
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
||||||
|
|
||||||
|
When possible, generate variables at the top of your workflow in a single place to reference later. This is not always strictly possible since you may generate a value to be used later mid-workflow.
|
||||||
|
|
||||||
|
Be sure to use quotes around these logs so special characters are not interpreted.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
job1:
|
||||||
|
- name: "[DEBUG] Print Variables"
|
||||||
|
run: |
|
||||||
|
echo "all variables defined as inputs"
|
||||||
|
echo "The last commit sha in the release: ${{ inputs.sha }}"
|
||||||
|
echo "The release version number: ${{ inputs.version_number }}"
|
||||||
|
echo "The changelog_path: ${{ inputs.changelog_path }}"
|
||||||
|
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
||||||
|
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
||||||
|
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
||||||
|
|
||||||
|
# collect all the variables that need to be used in subsequent jobs
|
||||||
|
- name: Set Variables
|
||||||
|
id: variables
|
||||||
|
run: |
|
||||||
|
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
||||||
|
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
||||||
|
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
job2:
|
||||||
|
needs: [job1]
|
||||||
|
- name: "[DEBUG] Print Variables"
|
||||||
|
run: |
|
||||||
|
echo "all variables defined in job1 > Set Variables > outputs"
|
||||||
|
echo "important_path: ${{ needs.job1.outputs.important_path }}"
|
||||||
|
echo "release_id: ${{ needs.job1.outputs.release_id }}"
|
||||||
|
echo "open_prs: ${{ needs.job1.outputs.open_prs }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
- When it's not obvious what something does, add a comment!
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
## Tips
|
||||||
|
|
||||||
|
### Context
|
||||||
|
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
|
||||||
|
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
|
||||||
|
|
||||||
|
### Actions from the Marketplace
|
||||||
|
- Don’t use external actions for things that can easily be accomplished manually.
|
||||||
|
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||||
|
- Pin actions _we don't control_ to tags.
|
||||||
|
|
||||||
|
### Connecting to AWS
|
||||||
|
- Authenticate with the aws managed workflow
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Configure AWS credentials from Test account
|
||||||
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
aws-region: us-east-1
|
||||||
|
```
|
||||||
|
|
||||||
|
- Then access with the aws command that comes installed on the action runner machines
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Copy Artifacts from S3 via CLI
|
||||||
|
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
- Depending on what your action does, you may be able to use [`act`](https://github.com/nektos/act) to test the action locally. Some features of GitHub Actions do not work with `act`, among those are reusable workflows. If you can't use `act`, you'll have to push your changes up before being able to test. This can be slow.
|
||||||
2
.github/actions/latest-wrangler/README.md
vendored
2
.github/actions/latest-wrangler/README.md
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v3
|
||||||
- name: Wrangle latest tag
|
- name: Wrangle latest tag
|
||||||
id: is_latest
|
id: is_latest
|
||||||
uses: ./.github/actions/latest-wrangler
|
uses: ./.github/actions/latest-wrangler
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v3
|
||||||
- name: Wrangle latest tag
|
- name: Wrangle latest tag
|
||||||
id: is_latest
|
id: is_latest
|
||||||
uses: ./.github/actions/latest-wrangler
|
uses: ./.github/actions/latest-wrangler
|
||||||
|
|||||||
13
.github/actions/latest-wrangler/main.py
vendored
13
.github/actions/latest-wrangler/main.py
vendored
@@ -28,10 +28,11 @@ if __name__ == "__main__":
|
|||||||
if package_request.status_code == 404:
|
if package_request.status_code == 404:
|
||||||
if halt_on_missing:
|
if halt_on_missing:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
|
||||||
# everything is the latest if the package doesn't exist
|
# everything is the latest if the package doesn't exist
|
||||||
print(f"::set-output name=latest::{True}")
|
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||||
print(f"::set-output name=minor_latest::{True}")
|
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||||
|
gh_output.write("latest=True")
|
||||||
|
gh_output.write("minor_latest=True")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# TODO: verify package meta is "correct"
|
# TODO: verify package meta is "correct"
|
||||||
@@ -91,5 +92,7 @@ if __name__ == "__main__":
|
|||||||
latest = is_latest(pre_rel, new_version, current_latest)
|
latest = is_latest(pre_rel, new_version, current_latest)
|
||||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||||
|
|
||||||
print(f"::set-output name=latest::{latest}")
|
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||||
print(f"::set-output name=minor_latest::{minor_latest}")
|
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||||
|
gh_output.write(f"latest={latest}")
|
||||||
|
gh_output.write(f"minor_latest={minor_latest}")
|
||||||
|
|||||||
24
.github/pull_request_template.md
vendored
24
.github/pull_request_template.md
vendored
@@ -1,21 +1,35 @@
|
|||||||
resolves #
|
resolves #
|
||||||
|
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above if applicable.
|
Include the number of the issue addressed by this PR above if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
|
Include the number of the docs issue that was opened for this PR. If
|
||||||
|
this change has no user-facing implications, "N/A" suffices instead. New
|
||||||
|
docs tickets can be created by clicking the link above or by going to
|
||||||
|
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Description
|
### Problem
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Describe the Pull Request here. Add any references and info to help reviewers
|
Describe the problem this PR is solving. What is the application state
|
||||||
understand your changes. Include any tradeoffs you considered.
|
before this PR is merged?
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Solution
|
||||||
|
|
||||||
|
<!---
|
||||||
|
Describe the way this PR solves the above problem. Add as much detail as you
|
||||||
|
can to help reviewers understand your changes. Include any alternatives and
|
||||||
|
tradeoffs you considered.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Checklist
|
### Checklist
|
||||||
|
|
||||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||||
- [ ] I have added information about my change to be included in the [CHANGELOG](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#Adding-CHANGELOG-Entry).
|
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||||
|
|||||||
18
.github/workflows/backport.yml
vendored
18
.github/workflows/backport.yml
vendored
@@ -13,22 +13,28 @@
|
|||||||
# This automates the backporting process
|
# This automates the backporting process
|
||||||
|
|
||||||
# **when?**
|
# **when?**
|
||||||
# Once a PR is "Squash and merge"'d and it has been correctly labeled
|
# Once a PR is "Squash and merge"'d, by adding a backport label, this is triggered
|
||||||
# according to the naming convention.
|
|
||||||
|
|
||||||
name: Backport
|
name: Backport
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- closed
|
|
||||||
- labeled
|
- labeled
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
backport:
|
backport:
|
||||||
runs-on: ubuntu-18.04
|
|
||||||
name: Backport
|
name: Backport
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only react to merged PRs for security reasons.
|
||||||
|
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||||
|
if: >
|
||||||
|
github.event.pull_request.merged
|
||||||
|
&& contains(github.event.label.name, 'backport')
|
||||||
steps:
|
steps:
|
||||||
- name: Backport
|
- uses: tibdex/backport@v2.0.3
|
||||||
uses: tibdex/backport@v1.1.1
|
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
61
.github/workflows/bot-changelog.yml
vendored
Normal file
61
.github/workflows/bot-changelog.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# **what?**
|
||||||
|
# When bots create a PR, this action will add a corresponding changie yaml file to that
|
||||||
|
# PR when a specific label is added.
|
||||||
|
#
|
||||||
|
# The file is created off a template:
|
||||||
|
#
|
||||||
|
# kind: <per action matrix>
|
||||||
|
# body: <PR title>
|
||||||
|
# time: <current timestamp>
|
||||||
|
# custom:
|
||||||
|
# Author: <PR User Login (generally the bot)>
|
||||||
|
# Issue: 4904
|
||||||
|
# PR: <PR number>
|
||||||
|
#
|
||||||
|
# **why?**
|
||||||
|
# Automate changelog generation for more visability with automated bot PRs.
|
||||||
|
#
|
||||||
|
# **when?**
|
||||||
|
# Once a PR is created, label should be added to PR before or after creation. You can also
|
||||||
|
# manually trigger this by adding the appropriate label at any time.
|
||||||
|
#
|
||||||
|
# **how to add another bot?**
|
||||||
|
# Add the label and changie kind to the include matrix. That's it!
|
||||||
|
#
|
||||||
|
|
||||||
|
name: Bot Changelog
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
# catch when the PR is opened with the label or when the label is added
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate_changelog:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- label: "dependencies"
|
||||||
|
changie_kind: "Dependencies"
|
||||||
|
- label: "snyk"
|
||||||
|
changie_kind: "Security"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Create and commit changelog on bot PR
|
||||||
|
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
|
||||||
|
id: bot_changelog
|
||||||
|
uses: emmyoop/changie_bot@v1.1.0
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
|
commit_author_name: "Github Build Bot"
|
||||||
|
commit_author_email: "<buildbot@fishtownanalytics.com>"
|
||||||
|
commit_message: "Add automated changelog yaml from template for bot PR"
|
||||||
|
changie_kind: ${{ matrix.changie_kind }}
|
||||||
|
label: ${{ matrix.label }}
|
||||||
|
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
|
||||||
62
.github/workflows/changelog-check.yml
vendored
62
.github/workflows/changelog-check.yml
vendored
@@ -1,62 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Checks that a file has been committed under the /.changes directory
|
|
||||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
|
||||||
# it is dynamically generated by change type and timestamp.
|
|
||||||
# This workflow should not require any secrets since it runs for PRs
|
|
||||||
# from forked repos.
|
|
||||||
# By default, secrets are not passed to workflows running from
|
|
||||||
# a forked repo.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Ensure code change gets reflected in the CHANGELOG.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run for all PRs going into main and *.latest.
|
|
||||||
|
|
||||||
name: Check Changelog Entry
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
changelog:
|
|
||||||
name: changelog
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check if changelog file was added
|
|
||||||
# https://github.com/marketplace/actions/paths-changes-filter
|
|
||||||
# For each filter, it sets output variable named by the filter to the text:
|
|
||||||
# 'true' - if any of changed files matches any of filter rules
|
|
||||||
# 'false' - if none of changed files matches any of filter rules
|
|
||||||
# also, returns:
|
|
||||||
# `changes` - JSON array with names of all filters matching any of the changed files
|
|
||||||
uses: dorny/paths-filter@v2
|
|
||||||
id: filter
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
filters: |
|
|
||||||
changelog:
|
|
||||||
- added: '.changes/unreleased/**.yaml'
|
|
||||||
- name: Check a file has been added to .changes/unreleased if required
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
if: steps.filter.outputs.changelog == 'false' && !contains( github.event.pull_request.labels.*.name, 'Skip Changelog')
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
github.rest.issues.createComment({
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
body: "Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](CONTRIBUTING.md)."
|
|
||||||
})
|
|
||||||
core.setFailed('Changelog entry required to merge.')
|
|
||||||
40
.github/workflows/changelog-existence.yml
vendored
Normal file
40
.github/workflows/changelog-existence.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# **what?**
|
||||||
|
# Checks that a file has been committed under the /.changes directory
|
||||||
|
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||||
|
# it is dynamically generated by change type and timestamp.
|
||||||
|
# This workflow should not require any secrets since it runs for PRs
|
||||||
|
# from forked repos.
|
||||||
|
# By default, secrets are not passed to workflows running from
|
||||||
|
# a forked repo.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Ensure code change gets reflected in the CHANGELOG.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run for all PRs going into main and *.latest. It will
|
||||||
|
# run when they are opened, reopened, when any label is added or removed
|
||||||
|
# and when new code is pushed to the branch. The action will then get
|
||||||
|
# skipped if the 'Skip Changelog' label is present is any of the labels.
|
||||||
|
|
||||||
|
name: Check Changelog Entry
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changelog:
|
||||||
|
uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main
|
||||||
|
with:
|
||||||
|
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
|
||||||
|
skip_label: 'Skip Changelog'
|
||||||
|
secrets: inherit
|
||||||
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
41
.github/workflows/cut-release-branch.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# **what?**
|
||||||
|
# Cuts a new `*.latest` branch
|
||||||
|
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
||||||
|
# `main` and bumps `main` to the input version.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when called manually.
|
||||||
|
|
||||||
|
name: Cut new release branch
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version_to_bump_main:
|
||||||
|
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
||||||
|
required: true
|
||||||
|
new_branch_name:
|
||||||
|
description: 'The full name of the new branch (ex. 1.5.latest)'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cut_branch:
|
||||||
|
name: "Cut branch and clean up main for dbt-core"
|
||||||
|
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||||
|
with:
|
||||||
|
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
||||||
|
new_branch_name: ${{ inputs.new_branch_name }}
|
||||||
|
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||||
|
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||||
|
secrets:
|
||||||
|
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||||
4
.github/workflows/jira-creation.yml
vendored
4
.github/workflows/jira-creation.yml
vendored
@@ -18,8 +18,8 @@ permissions:
|
|||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
call-label-action:
|
call-creation-action:
|
||||||
uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main
|
uses: dbt-labs/actions/.github/workflows/jira-creation-actions.yml@main
|
||||||
secrets:
|
secrets:
|
||||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||||
|
|||||||
2
.github/workflows/jira-label.yml
vendored
2
.github/workflows/jira-label.yml
vendored
@@ -19,7 +19,7 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
call-label-action:
|
call-label-action:
|
||||||
uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main
|
uses: dbt-labs/actions/.github/workflows/jira-label-actions.yml@main
|
||||||
secrets:
|
secrets:
|
||||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||||
|
|||||||
7
.github/workflows/jira-transition.yml
vendored
7
.github/workflows/jira-transition.yml
vendored
@@ -15,9 +15,12 @@ on:
|
|||||||
issues:
|
issues:
|
||||||
types: [closed, deleted, reopened]
|
types: [closed, deleted, reopened]
|
||||||
|
|
||||||
|
# no special access is needed
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
call-label-action:
|
call-transition-action:
|
||||||
uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main
|
uses: dbt-labs/actions/.github/workflows/jira-transition-actions.yml@main
|
||||||
secrets:
|
secrets:
|
||||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||||
|
|||||||
166
.github/workflows/main.yml
vendored
166
.github/workflows/main.yml
vendored
@@ -33,28 +33,33 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
# top-level adjustments can be made here
|
||||||
|
env:
|
||||||
|
# number of parallel processes to spawn for python integration testing
|
||||||
|
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
code-quality:
|
code-quality:
|
||||||
name: code-quality
|
name: code-quality
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
pip --version
|
python -m pip --version
|
||||||
pip install pre-commit
|
make dev
|
||||||
pre-commit --version
|
|
||||||
pip install mypy==0.782
|
|
||||||
mypy --version
|
mypy --version
|
||||||
pip install -r editable-requirements.txt
|
|
||||||
dbt --version
|
dbt --version
|
||||||
|
|
||||||
- name: Run pre-commit hooks
|
- name: Run pre-commit hooks
|
||||||
@@ -64,30 +69,30 @@ jobs:
|
|||||||
name: unit test / python ${{ matrix.python-version }}
|
name: unit test / python ${{ matrix.python-version }}
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [3.7, 3.8, 3.9]
|
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TOXENV: "unit"
|
TOXENV: "unit"
|
||||||
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
pip --version
|
python -m pip --version
|
||||||
pip install tox
|
python -m pip install tox
|
||||||
tox --version
|
tox --version
|
||||||
|
|
||||||
- name: Run tox
|
- name: Run tox
|
||||||
@@ -96,41 +101,83 @@ jobs:
|
|||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
id: date
|
id: date
|
||||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
run: |
|
||||||
|
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||||
|
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
- name: Upload Unit Test Coverage to Codecov
|
||||||
if: always()
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
with:
|
uses: codecov/codecov-action@v3
|
||||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
env:
|
||||||
path: unit_results.csv
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
integration-metadata:
|
||||||
|
name: integration test metadata generation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||||
|
include: ${{ steps.generate-include.outputs.include }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: generate split-groups
|
||||||
|
id: generate-split-groups
|
||||||
|
run: |
|
||||||
|
MATRIX_JSON="["
|
||||||
|
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
|
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||||
|
done
|
||||||
|
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||||
|
MATRIX_JSON+="]"
|
||||||
|
echo "split-groups=${MATRIX_JSON}"
|
||||||
|
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: generate include
|
||||||
|
id: generate-include
|
||||||
|
run: |
|
||||||
|
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||||
|
INCLUDE_GROUPS="["
|
||||||
|
for include in ${INCLUDE[@]}; do
|
||||||
|
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
|
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||||
|
done
|
||||||
|
done
|
||||||
|
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||||
|
INCLUDE_GROUPS+="]"
|
||||||
|
echo "include=${INCLUDE_GROUPS}"
|
||||||
|
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
integration:
|
integration:
|
||||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
timeout-minutes: 30
|
||||||
|
needs:
|
||||||
|
- integration-metadata
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [3.7, 3.8, 3.9]
|
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-20.04]
|
||||||
include:
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
- python-version: 3.8
|
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||||
os: windows-latest
|
|
||||||
- python-version: 3.8
|
|
||||||
os: macos-latest
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TOXENV: integration
|
TOXENV: integration
|
||||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
DBT_INVOCATION_ENV: github-actions
|
||||||
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
|
DBT_TEST_USER_3: dbt_test_user_3
|
||||||
|
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||||
|
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||||
|
DD_SITE: datadoghq.com
|
||||||
|
DD_ENV: ci
|
||||||
|
DD_SERVICE: ${{ github.event.repository.name }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
@@ -148,30 +195,43 @@ jobs:
|
|||||||
|
|
||||||
- name: Install python tools
|
- name: Install python tools
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
pip --version
|
python -m pip --version
|
||||||
pip install tox
|
python -m pip install tox
|
||||||
tox --version
|
tox --version
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: tox
|
run: tox -- --ddtrace
|
||||||
|
env:
|
||||||
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
id: date
|
id: date
|
||||||
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
|
run: |
|
||||||
|
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||||
|
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v3
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
||||||
path: ./logs
|
path: ./logs
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
- name: Upload Integration Test Coverage to Codecov
|
||||||
if: always()
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
with:
|
uses: codecov/codecov-action@v3
|
||||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
|
env:
|
||||||
path: integration_results.csv
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
integration-report:
|
||||||
|
name: integration test suite
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: integration
|
||||||
|
steps:
|
||||||
|
- name: "[Notification] Integration test suite passes"
|
||||||
|
run: |
|
||||||
|
echo "::notice title="Integration test suite passes""
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: build packages
|
name: build packages
|
||||||
@@ -180,18 +240,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
python -m pip install --user --upgrade pip
|
||||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||||
pip --version
|
python -m pip --version
|
||||||
|
|
||||||
- name: Build distributions
|
- name: Build distributions
|
||||||
run: ./scripts/build-dist.sh
|
run: ./scripts/build-dist.sh
|
||||||
@@ -209,7 +269,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Install wheel distributions
|
- name: Install wheel distributions
|
||||||
run: |
|
run: |
|
||||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||||
|
|
||||||
- name: Check wheel distributions
|
- name: Check wheel distributions
|
||||||
run: |
|
run: |
|
||||||
@@ -218,7 +278,7 @@ jobs:
|
|||||||
- name: Install source distributions
|
- name: Install source distributions
|
||||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||||
run: |
|
run: |
|
||||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||||
|
|
||||||
- name: Check source distributions
|
- name: Check source distributions
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
265
.github/workflows/model_performance.yml
vendored
Normal file
265
.github/workflows/model_performance.yml
vendored
Normal file
@@ -0,0 +1,265 @@
|
|||||||
|
# **what?**
|
||||||
|
# This workflow models the performance characteristics of a point in time in dbt.
|
||||||
|
# It runs specific dbt commands on committed projects multiple times to create and
|
||||||
|
# commit information about the distribution to the current branch. For more information
|
||||||
|
# see the readme in the performance module at /performance/README.md.
|
||||||
|
#
|
||||||
|
# **why?**
|
||||||
|
# When developing new features, we can take quick performance samples and compare
|
||||||
|
# them against the commited baseline measurements produced by this workflow to detect
|
||||||
|
# some performance regressions at development time before they reach users.
|
||||||
|
#
|
||||||
|
# **when?**
|
||||||
|
# This is only run once directly after each release (for non-prereleases). If for some
|
||||||
|
# reason the results of a run are not satisfactory, it can also be triggered manually.
|
||||||
|
|
||||||
|
name: Model Performance Characteristics
|
||||||
|
|
||||||
|
on:
|
||||||
|
# runs after non-prereleases are published.
|
||||||
|
release:
|
||||||
|
types: [released]
|
||||||
|
# run manually from the actions tab
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
release_id:
|
||||||
|
description: 'dbt version to model (must be non-prerelease in Pypi)'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
RUNNER_CACHE_PATH: performance/runner/target/release/runner
|
||||||
|
|
||||||
|
# both jobs need to write
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
set-variables:
|
||||||
|
name: Setting Variables
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
cache_key: ${{ steps.variables.outputs.cache_key }}
|
||||||
|
release_id: ${{ steps.semver.outputs.base-version }}
|
||||||
|
release_branch: ${{ steps.variables.outputs.release_branch }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# explicitly checkout the performance runner from main regardless of which
|
||||||
|
# version we are modeling.
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
- name: Parse version into parts
|
||||||
|
id: semver
|
||||||
|
uses: dbt-labs/actions/parse-semver@v1
|
||||||
|
with:
|
||||||
|
version: ${{ github.event.inputs.release_id || github.event.release.tag_name }}
|
||||||
|
|
||||||
|
# collect all the variables that need to be used in subsequent jobs
|
||||||
|
- name: Set variables
|
||||||
|
id: variables
|
||||||
|
run: |
|
||||||
|
# create a cache key that will be used in the next job. without this the
|
||||||
|
# next job would have to checkout from main and hash the files itself.
|
||||||
|
echo "cache_key=${{ runner.os }}-${{ hashFiles('performance/runner/Cargo.toml')}}-${{ hashFiles('performance/runner/src/*') }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
branch_name="${{steps.semver.outputs.major}}.${{steps.semver.outputs.minor}}.latest"
|
||||||
|
echo "release_branch=$branch_name" >> $GITHUB_OUTPUT
|
||||||
|
echo "release branch is inferred to be ${branch_name}"
|
||||||
|
|
||||||
|
latest-runner:
|
||||||
|
name: Build or Fetch Runner
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [set-variables]
|
||||||
|
env:
|
||||||
|
RUSTFLAGS: "-D warnings"
|
||||||
|
steps:
|
||||||
|
- name: '[DEBUG] print variables'
|
||||||
|
run: |
|
||||||
|
echo "all variables defined in set-variables"
|
||||||
|
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||||
|
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||||
|
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||||
|
|
||||||
|
# explicitly checkout the performance runner from main regardless of which
|
||||||
|
# version we are modeling.
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
# attempts to access a previously cached runner
|
||||||
|
- uses: actions/cache@v3
|
||||||
|
id: cache
|
||||||
|
with:
|
||||||
|
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||||
|
key: ${{ needs.set-variables.outputs.cache_key }}
|
||||||
|
|
||||||
|
- name: Fetch Rust Toolchain
|
||||||
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Add fmt
|
||||||
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
run: rustup component add rustfmt
|
||||||
|
|
||||||
|
- name: Cargo fmt
|
||||||
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: fmt
|
||||||
|
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --manifest-path performance/runner/Cargo.toml
|
||||||
|
|
||||||
|
- name: Build (optimized)
|
||||||
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: build
|
||||||
|
args: --release --manifest-path performance/runner/Cargo.toml
|
||||||
|
# the cache action automatically caches this binary at the end of the job
|
||||||
|
|
||||||
|
model:
|
||||||
|
# depends on `latest-runner` as a separate job so that failures in this job do not prevent
|
||||||
|
# a successfully tested and built binary from being cached.
|
||||||
|
needs: [set-variables, latest-runner]
|
||||||
|
name: Model a release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: '[DEBUG] print variables'
|
||||||
|
run: |
|
||||||
|
echo "all variables defined in set-variables"
|
||||||
|
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||||
|
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||||
|
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
|
||||||
|
- name: Install dbt
|
||||||
|
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
||||||
|
|
||||||
|
- name: Install Hyperfine
|
||||||
|
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||||
|
|
||||||
|
# explicitly checkout main to get the latest project definitions
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
# this was built in the previous job so it will be there.
|
||||||
|
- name: Fetch Runner
|
||||||
|
uses: actions/cache@v3
|
||||||
|
id: cache
|
||||||
|
with:
|
||||||
|
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||||
|
key: ${{ needs.set-variables.outputs.cache_key }}
|
||||||
|
|
||||||
|
- name: Move Runner
|
||||||
|
run: mv performance/runner/target/release/runner performance/app
|
||||||
|
|
||||||
|
- name: Change Runner Permissions
|
||||||
|
run: chmod +x ./performance/app
|
||||||
|
|
||||||
|
- name: '[DEBUG] ls baseline directory before run'
|
||||||
|
run: ls -R performance/baselines/
|
||||||
|
|
||||||
|
# `${{ github.workspace }}` is used to pass the absolute path
|
||||||
|
- name: Create directories
|
||||||
|
run: |
|
||||||
|
mkdir ${{ github.workspace }}/performance/tmp/
|
||||||
|
mkdir -p performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||||
|
|
||||||
|
# Run modeling with taking 20 samples
|
||||||
|
- name: Run Measurement
|
||||||
|
run: |
|
||||||
|
performance/app model -v ${{ needs.set-variables.outputs.release_id }} -b ${{ github.workspace }}/performance/baselines/ -p ${{ github.workspace }}/performance/projects/ -t ${{ github.workspace }}/performance/tmp/ -n 20
|
||||||
|
|
||||||
|
- name: '[DEBUG] ls baseline directory after run'
|
||||||
|
run: ls -R performance/baselines/
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: baseline
|
||||||
|
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||||
|
|
||||||
|
create-pr:
|
||||||
|
name: Open PR for ${{ matrix.base-branch }}
|
||||||
|
|
||||||
|
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
|
||||||
|
# i.e. release branch and main
|
||||||
|
needs: [set-variables, latest-runner, model]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- base-branch: refs/heads/main
|
||||||
|
target-branch: performance-bot/main_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
||||||
|
- base-branch: refs/heads/${{ needs.set-variables.outputs.release_branch }}
|
||||||
|
target-branch: performance-bot/release_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: '[DEBUG] print variables'
|
||||||
|
run: |
|
||||||
|
echo "all variables defined in set-variables"
|
||||||
|
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||||
|
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||||
|
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ matrix.base-branch }}
|
||||||
|
|
||||||
|
- name: Create PR branch
|
||||||
|
run: |
|
||||||
|
git checkout -b ${{ matrix.target-branch }}
|
||||||
|
git push origin ${{ matrix.target-branch }}
|
||||||
|
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: baseline
|
||||||
|
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
|
||||||
|
|
||||||
|
- name: '[DEBUG] ls baselines after artifact download'
|
||||||
|
run: ls -R performance/baselines/
|
||||||
|
|
||||||
|
- name: Commit baseline
|
||||||
|
uses: EndBug/add-and-commit@v9
|
||||||
|
with:
|
||||||
|
add: 'performance/baselines/*'
|
||||||
|
author_name: 'Github Build Bot'
|
||||||
|
author_email: 'buildbot@fishtownanalytics.com'
|
||||||
|
message: 'adding performance baseline for ${{ needs.set-variables.outputs.release_id }}'
|
||||||
|
push: 'origin origin/${{ matrix.target-branch }}'
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v5
|
||||||
|
with:
|
||||||
|
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||||
|
base: ${{ matrix.base-branch }}
|
||||||
|
branch: '${{ matrix.target-branch }}'
|
||||||
|
title: 'Adding performance modeling for ${{needs.set-variables.outputs.release_id}} to ${{ matrix.base-branch }}'
|
||||||
|
body: 'Committing perf results for tracking for the ${{needs.set-variables.outputs.release_id}}'
|
||||||
|
labels: |
|
||||||
|
Skip Changelog
|
||||||
|
Performance
|
||||||
109
.github/workflows/nightly-release.yml
vendored
Normal file
109
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
# **what?**
|
||||||
|
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||||
|
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||||
|
# - pass data to release workflow;
|
||||||
|
# - night release will be pushed to GitHub as a draft release;
|
||||||
|
# - night build will be pushed to test PyPI;
|
||||||
|
#
|
||||||
|
# **why?**
|
||||||
|
# Ensure an automated and tested release process for nightly builds
|
||||||
|
#
|
||||||
|
# **when?**
|
||||||
|
# This workflow runs on schedule or can be run manually on demand.
|
||||||
|
|
||||||
|
name: Nightly Test Release to GitHub and PyPI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch: # for manual triggering
|
||||||
|
schedule:
|
||||||
|
- cron: 0 9 * * *
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # this is the permission that allows creating a new release
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
env:
|
||||||
|
RELEASE_BRANCH: "main"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
aggregate-release-data:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||||
|
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||||
|
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ env.RELEASE_BRANCH }}
|
||||||
|
|
||||||
|
- name: "Resolve Commit To Release"
|
||||||
|
id: resolve-commit-sha
|
||||||
|
run: |
|
||||||
|
commit_sha=$(git rev-parse HEAD)
|
||||||
|
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Get Current Version Number"
|
||||||
|
id: version-number-sources
|
||||||
|
run: |
|
||||||
|
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||||
|
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Audit Version And Parse Into Parts"
|
||||||
|
id: semver
|
||||||
|
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||||
|
with:
|
||||||
|
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||||
|
|
||||||
|
- name: "Get Current Date"
|
||||||
|
id: current-date
|
||||||
|
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Generate Nightly Release Version Number"
|
||||||
|
id: nightly-release-version
|
||||||
|
run: |
|
||||||
|
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
||||||
|
echo "number=$number" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||||
|
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||||
|
with:
|
||||||
|
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||||
|
|
||||||
|
- name: "Set Release Branch"
|
||||||
|
id: release-branch
|
||||||
|
run: |
|
||||||
|
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
log-outputs-aggregate-release-data:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [aggregate-release-data]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "[DEBUG] Log Outputs"
|
||||||
|
run: |
|
||||||
|
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||||
|
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||||
|
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||||
|
|
||||||
|
release-github-pypi:
|
||||||
|
needs: [aggregate-release-data]
|
||||||
|
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
|
with:
|
||||||
|
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||||
|
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||||
|
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||||
|
build_script_path: "scripts/build-dist.sh"
|
||||||
|
env_setup_script_path: "scripts/env-setup.sh"
|
||||||
|
s3_bucket_name: "core-team-artifacts"
|
||||||
|
package_test_command: "dbt --version"
|
||||||
|
test_run: true
|
||||||
|
nightly_release: true
|
||||||
|
secrets: inherit
|
||||||
176
.github/workflows/performance.yml
vendored
176
.github/workflows/performance.yml
vendored
@@ -1,176 +0,0 @@
|
|||||||
name: Performance Regression Tests
|
|
||||||
# Schedule triggers
|
|
||||||
on:
|
|
||||||
# runs twice a day at 10:05am and 10:05pm
|
|
||||||
schedule:
|
|
||||||
- cron: "5 10,22 * * *"
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# checks fmt of runner code
|
|
||||||
# purposefully not a dependency of any other job
|
|
||||||
# will block merging, but not prevent developing
|
|
||||||
fmt:
|
|
||||||
name: Cargo fmt
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- run: rustup component add rustfmt
|
|
||||||
- uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
|
||||||
|
|
||||||
# runs any tests associated with the runner
|
|
||||||
# these tests make sure the runner logic is correct
|
|
||||||
test-runner:
|
|
||||||
name: Test Runner
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
# turns errors into warnings
|
|
||||||
RUSTFLAGS: "-D warnings"
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --manifest-path performance/runner/Cargo.toml
|
|
||||||
|
|
||||||
# build an optimized binary to be used as the runner in later steps
|
|
||||||
build-runner:
|
|
||||||
needs: [test-runner]
|
|
||||||
name: Build Runner
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
RUSTFLAGS: "-D warnings"
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --manifest-path performance/runner/Cargo.toml
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: runner
|
|
||||||
path: performance/runner/target/release/runner
|
|
||||||
|
|
||||||
# run the performance measurements on the current or default branch
|
|
||||||
measure-dev:
|
|
||||||
needs: [build-runner]
|
|
||||||
name: Measure Dev Branch
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: checkout dev
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v2.2.2
|
|
||||||
with:
|
|
||||||
python-version: "3.8"
|
|
||||||
- name: install dbt
|
|
||||||
run: pip install -r dev-requirements.txt -r editable-requirements.txt
|
|
||||||
- name: install hyperfine
|
|
||||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: runner
|
|
||||||
- name: change permissions
|
|
||||||
run: chmod +x ./runner
|
|
||||||
- name: run
|
|
||||||
run: ./runner measure -b dev -p ${{ github.workspace }}/performance/projects/
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dev-results
|
|
||||||
path: performance/results/
|
|
||||||
|
|
||||||
# run the performance measurements on the release branch which we use
|
|
||||||
# as a performance baseline. This part takes by far the longest, so
|
|
||||||
# we do everything we can first so the job fails fast.
|
|
||||||
# -----
|
|
||||||
# we need to checkout dbt twice in this job: once for the baseline dbt
|
|
||||||
# version, and once to get the latest regression testing projects,
|
|
||||||
# metrics, and runner code from the develop or current branch so that
|
|
||||||
# the calculations match for both versions of dbt we are comparing.
|
|
||||||
measure-baseline:
|
|
||||||
needs: [build-runner]
|
|
||||||
name: Measure Baseline Branch
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: checkout latest
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: "0.20.latest"
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v2.2.2
|
|
||||||
with:
|
|
||||||
python-version: "3.8"
|
|
||||||
- name: move repo up a level
|
|
||||||
run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline
|
|
||||||
- name: "[debug] ls new dbt location"
|
|
||||||
run: ls ${{ github.workspace }}/../baseline/dbt/
|
|
||||||
# installation creates egg-links so we have to preserve source
|
|
||||||
- name: install dbt from new location
|
|
||||||
run: cd ${{ github.workspace }}/../baseline/dbt/ && pip install -r dev-requirements.txt -r editable-requirements.txt
|
|
||||||
# checkout the current branch to get all the target projects
|
|
||||||
# this deletes the old checked out code which is why we had to copy before
|
|
||||||
- name: checkout dev
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: install hyperfine
|
|
||||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: runner
|
|
||||||
- name: change permissions
|
|
||||||
run: chmod +x ./runner
|
|
||||||
- name: run runner
|
|
||||||
run: ./runner measure -b baseline -p ${{ github.workspace }}/performance/projects/
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: baseline-results
|
|
||||||
path: performance/results/
|
|
||||||
|
|
||||||
# detect regressions on the output generated from measuring
|
|
||||||
# the two branches. Exits with non-zero code if a regression is detected.
|
|
||||||
calculate-regressions:
|
|
||||||
needs: [measure-dev, measure-baseline]
|
|
||||||
name: Compare Results
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dev-results
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: baseline-results
|
|
||||||
- name: "[debug] ls result files"
|
|
||||||
run: ls
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: runner
|
|
||||||
- name: change permissions
|
|
||||||
run: chmod +x ./runner
|
|
||||||
- name: make results directory
|
|
||||||
run: mkdir ./final-output/
|
|
||||||
- name: run calculation
|
|
||||||
run: ./runner calculate -r ./ -o ./final-output/
|
|
||||||
# always attempt to upload the results even if there were regressions found
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: final-calculations
|
|
||||||
path: ./final-output/*
|
|
||||||
31
.github/workflows/release-branch-tests.yml
vendored
Normal file
31
.github/workflows/release-branch-tests.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# **what?**
|
||||||
|
# The purpose of this workflow is to trigger CI to run for each
|
||||||
|
# release branch and main branch on a regular cadence. If the CI workflow
|
||||||
|
# fails for a branch, it will post to #dev-core-alerts to raise awareness.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Ensures release branches and main are always shippable and not broken.
|
||||||
|
# Also, can catch any dependencies shifting beneath us that might
|
||||||
|
# introduce breaking changes (could also impact Cloud).
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# Mainly on a schedule of 9:00, 13:00, 18:00 UTC everyday.
|
||||||
|
# Manual trigger can also test on demand
|
||||||
|
|
||||||
|
name: Release branch scheduled testing
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 9,13,18 * * *' # 9:00, 13:00, 18:00 UTC
|
||||||
|
|
||||||
|
workflow_dispatch: # for manual triggering
|
||||||
|
|
||||||
|
# no special access is needed
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run_tests:
|
||||||
|
uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main
|
||||||
|
with:
|
||||||
|
workflows_to_run: '["main.yml"]'
|
||||||
|
secrets: inherit
|
||||||
27
.github/workflows/release-docker.yml
vendored
27
.github/workflows/release-docker.yml
vendored
@@ -12,6 +12,9 @@
|
|||||||
|
|
||||||
name: Docker release
|
name: Docker release
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
@@ -33,14 +36,14 @@ jobs:
|
|||||||
latest: ${{ steps.latest.outputs.latest }}
|
latest: ${{ steps.latest.outputs.latest }}
|
||||||
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v3
|
||||||
- name: Split version
|
- name: Split version
|
||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||||
echo "::set-output name=major::$MAJOR"
|
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||||
echo "::set-output name=minor::$MINOR"
|
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||||
echo "::set-output name=patch::$PATCH"
|
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Is pkg 'latest'
|
- name: Is pkg 'latest'
|
||||||
id: latest
|
id: latest
|
||||||
@@ -57,7 +60,7 @@ jobs:
|
|||||||
needs: [get_version_meta]
|
needs: [get_version_meta]
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
build_and_push:
|
build_and_push:
|
||||||
name: Build images and push to GHCR
|
name: Build images and push to GHCR
|
||||||
@@ -67,18 +70,20 @@ jobs:
|
|||||||
- name: Get docker build arg
|
- name: Get docker build arg
|
||||||
id: build_arg
|
id: build_arg
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||||
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||||
|
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||||
|
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Log in to the GHCR
|
- name: Log in to the GHCR
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
file: docker/Dockerfile
|
file: docker/Dockerfile
|
||||||
push: True
|
push: True
|
||||||
@@ -89,7 +94,7 @@ jobs:
|
|||||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||||
|
|
||||||
- name: Build and push MINOR.latest tag
|
- name: Build and push MINOR.latest tag
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v4
|
||||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||||
with:
|
with:
|
||||||
file: docker/Dockerfile
|
file: docker/Dockerfile
|
||||||
@@ -101,7 +106,7 @@ jobs:
|
|||||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||||
|
|
||||||
- name: Build and push latest tag
|
- name: Build and push latest tag
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v4
|
||||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||||
with:
|
with:
|
||||||
file: docker/Dockerfile
|
file: docker/Dockerfile
|
||||||
|
|||||||
338
.github/workflows/release.yml
vendored
338
.github/workflows/release.yml
vendored
@@ -1,199 +1,229 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Take the given commit, run unit tests specifically on that sha, build and
|
# Release workflow provides the following steps:
|
||||||
# package it, and then release to GitHub and PyPi with that specific build
|
# - checkout the given commit;
|
||||||
|
# - validate version in sources and changelog file for given version;
|
||||||
|
# - bump the version and generate a changelog if needed;
|
||||||
|
# - merge all changes to the target branch if needed;
|
||||||
|
# - run unit and integration tests against given commit;
|
||||||
|
# - build and package that SHA;
|
||||||
|
# - release it to GitHub and PyPI with that specific build;
|
||||||
|
#
|
||||||
# **why?**
|
# **why?**
|
||||||
# Ensure an automated and tested release process
|
# Ensure an automated and tested release process
|
||||||
|
#
|
||||||
# **when?**
|
# **when?**
|
||||||
# This will only run manually with a given sha and version
|
# This workflow can be run manually on demand or can be called by other workflows
|
||||||
|
|
||||||
name: Release to GitHub and PyPi
|
name: Release to GitHub and PyPI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
sha:
|
sha:
|
||||||
description: 'The last commit sha in the release'
|
description: "The last commit sha in the release"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
target_branch:
|
||||||
|
description: "The branch to release from"
|
||||||
|
type: string
|
||||||
required: true
|
required: true
|
||||||
version_number:
|
version_number:
|
||||||
description: 'The release version number (i.e. 1.0.0b1)'
|
description: "The release version number (i.e. 1.0.0b1)"
|
||||||
|
type: string
|
||||||
required: true
|
required: true
|
||||||
|
build_script_path:
|
||||||
|
description: "Build script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/build-dist.sh"
|
||||||
|
required: true
|
||||||
|
env_setup_script_path:
|
||||||
|
description: "Environment setup script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/env-setup.sh"
|
||||||
|
required: false
|
||||||
|
s3_bucket_name:
|
||||||
|
description: "AWS S3 bucket name"
|
||||||
|
type: string
|
||||||
|
default: "core-team-artifacts"
|
||||||
|
required: true
|
||||||
|
package_test_command:
|
||||||
|
description: "Package test command"
|
||||||
|
type: string
|
||||||
|
default: "dbt --version"
|
||||||
|
required: true
|
||||||
|
test_run:
|
||||||
|
description: "Test run (Publish release as draft)"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
required: false
|
||||||
|
nightly_release:
|
||||||
|
description: "Nightly release to dev environment"
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
required: false
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
sha:
|
||||||
|
description: "The last commit sha in the release"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
target_branch:
|
||||||
|
description: "The branch to release from"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
version_number:
|
||||||
|
description: "The release version number (i.e. 1.0.0b1)"
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
build_script_path:
|
||||||
|
description: "Build script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/build-dist.sh"
|
||||||
|
required: true
|
||||||
|
env_setup_script_path:
|
||||||
|
description: "Environment setup script path"
|
||||||
|
type: string
|
||||||
|
default: "scripts/env-setup.sh"
|
||||||
|
required: false
|
||||||
|
s3_bucket_name:
|
||||||
|
description: "AWS S3 bucket name"
|
||||||
|
type: string
|
||||||
|
default: "core-team-artifacts"
|
||||||
|
required: true
|
||||||
|
package_test_command:
|
||||||
|
description: "Package test command"
|
||||||
|
type: string
|
||||||
|
default: "dbt --version"
|
||||||
|
required: true
|
||||||
|
test_run:
|
||||||
|
description: "Test run (Publish release as draft)"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
required: false
|
||||||
|
nightly_release:
|
||||||
|
description: "Nightly release to dev environment"
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
required: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # this is the permission that allows creating a new release
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
unit:
|
log-inputs:
|
||||||
name: Unit test
|
name: Log Inputs
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
env:
|
|
||||||
TOXENV: "unit"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: "[DEBUG] Print Variables"
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
ref: ${{ github.event.inputs.sha }}
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.8
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||||
pip install tox
|
echo The branch to release from: ${{ inputs.target_branch }}
|
||||||
pip --version
|
echo The release version number: ${{ inputs.version_number }}
|
||||||
tox --version
|
echo Build script path: ${{ inputs.build_script_path }}
|
||||||
|
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||||
|
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||||
|
echo Package test command: ${{ inputs.package_test_command }}
|
||||||
|
echo Test run: ${{ inputs.test_run }}
|
||||||
|
echo Nightly release: ${{ inputs.nightly_release }}
|
||||||
|
|
||||||
- name: Run tox
|
bump-version-generate-changelog:
|
||||||
run: tox
|
name: Bump package version, Generate changelog
|
||||||
|
|
||||||
build:
|
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||||
name: build packages
|
|
||||||
|
with:
|
||||||
|
sha: ${{ inputs.sha }}
|
||||||
|
version_number: ${{ inputs.version_number }}
|
||||||
|
target_branch: ${{ inputs.target_branch }}
|
||||||
|
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||||
|
test_run: ${{ inputs.test_run }}
|
||||||
|
nightly_release: ${{ inputs.nightly_release }}
|
||||||
|
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
log-outputs-bump-version-generate-changelog:
|
||||||
|
name: "[Log output] Bump package version, Generate changelog"
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
|
||||||
|
needs: [bump-version-generate-changelog]
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Print variables
|
||||||
uses: actions/checkout@v2
|
run: |
|
||||||
|
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
|
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
|
|
||||||
|
build-test-package:
|
||||||
|
name: Build, Test, Package
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
needs: [bump-version-generate-changelog]
|
||||||
|
|
||||||
|
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||||
|
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
ref: ${{ github.event.inputs.sha }}
|
version_number: ${{ inputs.version_number }}
|
||||||
|
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
|
build_script_path: ${{ inputs.build_script_path }}
|
||||||
|
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||||
|
package_test_command: ${{ inputs.package_test_command }}
|
||||||
|
test_run: ${{ inputs.test_run }}
|
||||||
|
nightly_release: ${{ inputs.nightly_release }}
|
||||||
|
|
||||||
- name: Set up Python
|
secrets:
|
||||||
uses: actions/setup-python@v2
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
with:
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
python-version: 3.8
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
|
||||||
pip install --user --upgrade pip
|
|
||||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
|
||||||
pip --version
|
|
||||||
|
|
||||||
- name: Build distributions
|
|
||||||
run: ./scripts/build-dist.sh
|
|
||||||
|
|
||||||
- name: Show distributions
|
|
||||||
run: ls -lh dist/
|
|
||||||
|
|
||||||
- name: Check distribution descriptions
|
|
||||||
run: |
|
|
||||||
twine check dist/*
|
|
||||||
|
|
||||||
- name: Check wheel contents
|
|
||||||
run: |
|
|
||||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dist
|
|
||||||
path: |
|
|
||||||
dist/
|
|
||||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
|
||||||
|
|
||||||
test-build:
|
|
||||||
name: verify packages
|
|
||||||
|
|
||||||
needs: [build, unit]
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.8
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
|
||||||
pip install --user --upgrade pip
|
|
||||||
pip install --upgrade wheel
|
|
||||||
pip --version
|
|
||||||
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: dist
|
|
||||||
path: dist/
|
|
||||||
|
|
||||||
- name: Show distributions
|
|
||||||
run: ls -lh dist/
|
|
||||||
|
|
||||||
- name: Install wheel distributions
|
|
||||||
run: |
|
|
||||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check wheel distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
- name: Install source distributions
|
|
||||||
run: |
|
|
||||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
|
||||||
|
|
||||||
- name: Check source distributions
|
|
||||||
run: |
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
github-release:
|
github-release:
|
||||||
name: GitHub Release
|
name: GitHub Release
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
|
||||||
needs: test-build
|
needs: [bump-version-generate-changelog, build-test-package]
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
with:
|
||||||
name: dist
|
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||||
path: '.'
|
version_number: ${{ inputs.version_number }}
|
||||||
|
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||||
# Need to set an output variable because env variables can't be taken as input
|
test_run: ${{ inputs.test_run }}
|
||||||
# This is needed for the next step with releasing to GitHub
|
|
||||||
- name: Find release type
|
|
||||||
id: release_type
|
|
||||||
env:
|
|
||||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
|
||||||
run: |
|
|
||||||
echo ::set-output name=isPrerelease::$IS_PRERELEASE
|
|
||||||
|
|
||||||
- name: Creating GitHub Release
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
name: dbt-core v${{github.event.inputs.version_number}}
|
|
||||||
tag_name: v${{github.event.inputs.version_number}}
|
|
||||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
|
||||||
target_commitish: ${{github.event.inputs.sha}}
|
|
||||||
body: |
|
|
||||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
|
||||||
files: |
|
|
||||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
|
||||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
|
||||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
|
||||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
|
||||||
|
|
||||||
pypi-release:
|
pypi-release:
|
||||||
name: Pypi release
|
name: PyPI Release
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
needs: [github-release]
|
||||||
|
|
||||||
needs: github-release
|
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||||
|
|
||||||
environment: PypiProd
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v2
|
|
||||||
with:
|
with:
|
||||||
name: dist
|
version_number: ${{ inputs.version_number }}
|
||||||
path: 'dist'
|
test_run: ${{ inputs.test_run }}
|
||||||
|
|
||||||
- name: Publish distribution to PyPI
|
secrets:
|
||||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||||
|
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||||
|
|
||||||
|
slack-notification:
|
||||||
|
name: Slack Notification
|
||||||
|
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||||
|
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
bump-version-generate-changelog,
|
||||||
|
build-test-package,
|
||||||
|
github-release,
|
||||||
|
pypi-release,
|
||||||
|
]
|
||||||
|
|
||||||
|
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||||
with:
|
with:
|
||||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
status: "failure"
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||||
|
|||||||
11
.github/workflows/schema-check.yml
vendored
11
.github/workflows/schema-check.yml
vendored
@@ -21,6 +21,9 @@ on:
|
|||||||
- "*.latest"
|
- "*.latest"
|
||||||
- "releases/*"
|
- "releases/*"
|
||||||
|
|
||||||
|
# no special access is needed
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
env:
|
env:
|
||||||
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
||||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
||||||
@@ -34,17 +37,17 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.8
|
||||||
|
|
||||||
- name: Checkout dbt repo
|
- name: Checkout dbt repo
|
||||||
uses: actions/checkout@v2.3.4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||||
|
|
||||||
- name: Checkout schemas.getdbt.com repo
|
- name: Checkout schemas.getdbt.com repo
|
||||||
uses: actions/checkout@v2.3.4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: dbt-labs/schemas.getdbt.com
|
repository: dbt-labs/schemas.getdbt.com
|
||||||
ref: 'main'
|
ref: 'main'
|
||||||
@@ -80,7 +83,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload schema diff
|
- name: Upload schema diff
|
||||||
uses: actions/upload-artifact@v2.2.4
|
uses: actions/upload-artifact@v3
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
with:
|
with:
|
||||||
name: 'schema_schanges.txt'
|
name: 'schema_schanges.txt'
|
||||||
|
|||||||
16
.github/workflows/stale.yml
vendored
16
.github/workflows/stale.yml
vendored
@@ -3,16 +3,10 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: "30 1 * * *"
|
- cron: "30 1 * * *"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
||||||
steps:
|
|
||||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
|
||||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
|
||||||
with:
|
|
||||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
|
||||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
|
||||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
|
||||||
days-before-stale: 180
|
|
||||||
# ignore checking issues with the following labels
|
|
||||||
exempt-issue-labels: "epic,discussion"
|
|
||||||
|
|||||||
@@ -18,11 +18,41 @@ on:
|
|||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
|
# top-level adjustments can be made here
|
||||||
|
env:
|
||||||
|
# number of parallel processes to spawn for python testing
|
||||||
|
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
integration-metadata:
|
||||||
|
name: integration test metadata generation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: generate split-groups
|
||||||
|
id: generate-split-groups
|
||||||
|
run: |
|
||||||
|
MATRIX_JSON="["
|
||||||
|
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
|
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||||
|
done
|
||||||
|
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||||
|
MATRIX_JSON+="]"
|
||||||
|
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# run the performance measurements on the current or default branch
|
# run the performance measurements on the current or default branch
|
||||||
test-schema:
|
test-schema:
|
||||||
name: Test Log Schema
|
name: Test Log Schema
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
needs:
|
||||||
|
- integration-metadata
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
env:
|
env:
|
||||||
# turns warnings into errors
|
# turns warnings into errors
|
||||||
RUSTFLAGS: "-D warnings"
|
RUSTFLAGS: "-D warnings"
|
||||||
@@ -30,23 +60,24 @@ jobs:
|
|||||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||||
# tells integration tests to output into json format
|
# tells integration tests to output into json format
|
||||||
DBT_LOG_FORMAT: "json"
|
DBT_LOG_FORMAT: "json"
|
||||||
|
# tell eventmgr to convert logging events into bytes
|
||||||
|
DBT_TEST_BINARY_SERIALIZATION: "true"
|
||||||
|
# Additional test users
|
||||||
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
|
DBT_TEST_USER_3: dbt_test_user_3
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: checkout dev
|
- name: checkout dev
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v2.2.2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.8"
|
python-version: "3.8"
|
||||||
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
@@ -64,10 +95,14 @@ jobs:
|
|||||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
run: tox -e integration -- -nauto
|
run: tox -e integration -- -nauto
|
||||||
|
env:
|
||||||
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
|
|
||||||
# apply our schema tests to every log event from the previous step
|
test-schema-report:
|
||||||
# skips any output that isn't valid json
|
name: Log Schema Test Suite
|
||||||
- uses: actions-rs/cargo@v1
|
runs-on: ubuntu-latest
|
||||||
with:
|
needs: test-schema
|
||||||
command: run
|
steps:
|
||||||
args: --manifest-path test/interop/log_parsing/Cargo.toml
|
- name: "[Notification] Log test suite passes"
|
||||||
|
run: |
|
||||||
|
echo "::notice title="Log test suite passes""
|
||||||
|
|||||||
155
.github/workflows/test-repeater.yml
vendored
Normal file
155
.github/workflows/test-repeater.yml
vendored
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
# **what?**
|
||||||
|
# This workflow will test all test(s) at the input path given number of times to determine if it's flaky or not. You can test with any supported OS/Python combination.
|
||||||
|
# This is batched in 10 to allow more test iterations faster.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Testing if a test is flaky and if a previously flaky test has been fixed. This allows easy testing on supported python versions and OS combinations.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This is triggered manually from dbt-core.
|
||||||
|
|
||||||
|
name: Flaky Tester
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
branch:
|
||||||
|
description: 'Branch to check out'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
default: 'main'
|
||||||
|
test_path:
|
||||||
|
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
default: 'tests/functional/...'
|
||||||
|
python_version:
|
||||||
|
description: 'Version of Python to Test Against'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- '3.8'
|
||||||
|
- '3.9'
|
||||||
|
- '3.10'
|
||||||
|
- '3.11'
|
||||||
|
os:
|
||||||
|
description: 'OS to run test in'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'ubuntu-latest'
|
||||||
|
- 'macos-latest'
|
||||||
|
- 'windows-latest'
|
||||||
|
num_runs_per_batch:
|
||||||
|
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||||
|
type: number
|
||||||
|
required: true
|
||||||
|
default: '50'
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
debug:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: "[DEBUG] Output Inputs"
|
||||||
|
run: |
|
||||||
|
echo "Branch: ${{ inputs.branch }}"
|
||||||
|
echo "test_path: ${{ inputs.test_path }}"
|
||||||
|
echo "python_version: ${{ inputs.python_version }}"
|
||||||
|
echo "os: ${{ inputs.os }}"
|
||||||
|
echo "num_runs_per_batch: ${{ inputs.num_runs_per_batch }}"
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ${{ inputs.os }}
|
||||||
|
strategy:
|
||||||
|
# run all batches, even if one fails. This informs how flaky the test may be.
|
||||||
|
fail-fast: false
|
||||||
|
# using a matrix to speed up the jobs since the matrix will run in parallel when runners are available
|
||||||
|
matrix:
|
||||||
|
batch: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
|
||||||
|
env:
|
||||||
|
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||||
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
|
DBT_TEST_USER_3: dbt_test_user_3
|
||||||
|
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||||
|
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||||
|
DD_SITE: datadoghq.com
|
||||||
|
DD_ENV: ci
|
||||||
|
DD_SERVICE: ${{ github.event.repository.name }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout code"
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.branch }}
|
||||||
|
|
||||||
|
- name: "Setup Python"
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "${{ inputs.python_version }}"
|
||||||
|
|
||||||
|
- name: "Setup Dev Environment"
|
||||||
|
run: make dev
|
||||||
|
|
||||||
|
- name: "Set up postgres (linux)"
|
||||||
|
if: inputs.os == 'ubuntu-latest'
|
||||||
|
run: make setup-db
|
||||||
|
|
||||||
|
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||||
|
- name: "Set up postgres (macos)"
|
||||||
|
if: inputs.os == 'macos-latest'
|
||||||
|
uses: ./.github/actions/setup-postgres-macos
|
||||||
|
|
||||||
|
- name: "Set up postgres (windows)"
|
||||||
|
if: inputs.os == 'windows-latest'
|
||||||
|
uses: ./.github/actions/setup-postgres-windows
|
||||||
|
|
||||||
|
- name: "Test Command"
|
||||||
|
id: command
|
||||||
|
run: |
|
||||||
|
test_command="python -m pytest ${{ inputs.test_path }}"
|
||||||
|
echo "test_command=$test_command" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Run test ${{ inputs.num_runs_per_batch }} times"
|
||||||
|
id: pytest
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
for ((i=1; i<=${{ inputs.num_runs_per_batch }}; i++))
|
||||||
|
do
|
||||||
|
echo "Running pytest iteration $i..."
|
||||||
|
python -m pytest --ddtrace ${{ inputs.test_path }}
|
||||||
|
exit_code=$?
|
||||||
|
|
||||||
|
if [[ $exit_code -eq 0 ]]; then
|
||||||
|
success=$((success + 1))
|
||||||
|
echo "Iteration $i: Success"
|
||||||
|
else
|
||||||
|
failure=$((failure + 1))
|
||||||
|
echo "Iteration $i: Failure"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "==========================="
|
||||||
|
echo "Successful runs: $success"
|
||||||
|
echo "Failed runs: $failure"
|
||||||
|
echo "==========================="
|
||||||
|
echo
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "failure=$failure" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: "Success and Failure Summary: ${{ inputs.os }}/Python ${{ inputs.python_version }}"
|
||||||
|
run: |
|
||||||
|
echo "Batch: ${{ matrix.batch }}"
|
||||||
|
echo "Successful runs: ${{ steps.pytest.outputs.success }}"
|
||||||
|
echo "Failed runs: ${{ steps.pytest.outputs.failure }}"
|
||||||
|
|
||||||
|
- name: "Error for Failures"
|
||||||
|
if: ${{ steps.pytest.outputs.failure }}
|
||||||
|
run: |
|
||||||
|
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||||
|
exit 1
|
||||||
31
.github/workflows/triage-labels.yml
vendored
Normal file
31
.github/workflows/triage-labels.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# **what?**
|
||||||
|
# When the core team triages, we sometimes need more information from the issue creator. In
|
||||||
|
# those cases we remove the `triage` label and add the `awaiting_response` label. Once we
|
||||||
|
# recieve a response in the form of a comment, we want the `awaiting_response` label removed
|
||||||
|
# in favor of the `triage` label so we are aware that the issue needs action.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# To help with out team triage issue tracking
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run when a comment is added to an issue and that issue has to `awaiting_response` label.
|
||||||
|
|
||||||
|
name: Update Triage Label
|
||||||
|
|
||||||
|
on: issue_comment
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
triage_label:
|
||||||
|
if: contains(github.event.issue.labels.*.name, 'awaiting_response')
|
||||||
|
uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main
|
||||||
|
with:
|
||||||
|
add_label: "triage"
|
||||||
|
remove_label: "awaiting_response"
|
||||||
|
secrets: inherit
|
||||||
101
.github/workflows/version-bump.yml
vendored
101
.github/workflows/version-bump.yml
vendored
@@ -1,18 +1,15 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# This workflow will take a version number and a dry run flag. With that
|
# This workflow will take the new version number to bump to. With that
|
||||||
# it will run versionbump to update the version number everywhere in the
|
# it will run versionbump to update the version number everywhere in the
|
||||||
# code base and then generate an update Docker requirements file. If this
|
# code base and then run changie to create the corresponding changelog.
|
||||||
# is a dry run, a draft PR will open with the changes. If this isn't a dry
|
# A PR will be created with the changes that can be reviewed before committing.
|
||||||
# run, the changes will be committed to the branch this is run on.
|
|
||||||
|
|
||||||
# **why?**
|
# **why?**
|
||||||
# This is to aid in releasing dbt and making sure we have updated
|
# This is to aid in releasing dbt and making sure we have updated
|
||||||
# the versions and Docker requirements in all places.
|
# the version in all places and generated the changelog.
|
||||||
|
|
||||||
# **when?**
|
# **when?**
|
||||||
# This is triggered either manually OR
|
# This is triggered manually
|
||||||
# from the repository_dispatch event "version-bump" which is sent from
|
|
||||||
# the dbt-release repo Action
|
|
||||||
|
|
||||||
name: Version Bump
|
name: Version Bump
|
||||||
|
|
||||||
@@ -20,90 +17,12 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
version_number:
|
version_number:
|
||||||
description: 'The version number to bump to'
|
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
||||||
required: true
|
required: true
|
||||||
is_dry_run:
|
|
||||||
description: 'Creates a draft PR to allow testing instead of committing to a branch'
|
|
||||||
required: true
|
|
||||||
default: 'true'
|
|
||||||
repository_dispatch:
|
|
||||||
types: [version-bump]
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bump:
|
version_bump_and_changie:
|
||||||
runs-on: ubuntu-latest
|
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
|
||||||
steps:
|
|
||||||
- name: Check out the repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set version and dry run values
|
|
||||||
id: variables
|
|
||||||
env:
|
|
||||||
VERSION_NUMBER: "${{ github.event.client_payload.version_number == '' && github.event.inputs.version_number || github.event.client_payload.version_number }}"
|
|
||||||
IS_DRY_RUN: "${{ github.event.client_payload.is_dry_run == '' && github.event.inputs.is_dry_run || github.event.client_payload.is_dry_run }}"
|
|
||||||
run: |
|
|
||||||
echo Repository dispatch event version: ${{ github.event.client_payload.version_number }}
|
|
||||||
echo Repository dispatch event dry run: ${{ github.event.client_payload.is_dry_run }}
|
|
||||||
echo Workflow dispatch event version: ${{ github.event.inputs.version_number }}
|
|
||||||
echo Workflow dispatch event dry run: ${{ github.event.inputs.is_dry_run }}
|
|
||||||
echo ::set-output name=VERSION_NUMBER::$VERSION_NUMBER
|
|
||||||
echo ::set-output name=IS_DRY_RUN::$IS_DRY_RUN
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.8"
|
version_number: ${{ inputs.version_number }}
|
||||||
|
secrets: inherit # ok since what we are calling is internally maintained
|
||||||
- name: Install python dependencies
|
|
||||||
run: |
|
|
||||||
python3 -m venv env
|
|
||||||
source env/bin/activate
|
|
||||||
pip install --upgrade pip
|
|
||||||
|
|
||||||
- name: Create PR branch
|
|
||||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
|
||||||
run: |
|
|
||||||
git checkout -b bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
|
||||||
git push origin bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
|
||||||
git branch --set-upstream-to=origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
|
||||||
|
|
||||||
# - name: Generate Docker requirements
|
|
||||||
# run: |
|
|
||||||
# source env/bin/activate
|
|
||||||
# pip install -r requirements.txt
|
|
||||||
# pip freeze -l > docker/requirements/requirements.txt
|
|
||||||
# git status
|
|
||||||
|
|
||||||
- name: Bump version
|
|
||||||
run: |
|
|
||||||
source env/bin/activate
|
|
||||||
pip install -r dev-requirements.txt
|
|
||||||
env/bin/bumpversion --allow-dirty --new-version ${{steps.variables.outputs.VERSION_NUMBER}} major
|
|
||||||
git status
|
|
||||||
|
|
||||||
- name: Commit version bump directly
|
|
||||||
uses: EndBug/add-and-commit@v7
|
|
||||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'false' }}
|
|
||||||
with:
|
|
||||||
author_name: 'Github Build Bot'
|
|
||||||
author_email: 'buildbot@fishtownanalytics.com'
|
|
||||||
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
|
||||||
|
|
||||||
- name: Commit version bump to branch
|
|
||||||
uses: EndBug/add-and-commit@v7
|
|
||||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
|
||||||
with:
|
|
||||||
author_name: 'Github Build Bot'
|
|
||||||
author_email: 'buildbot@fishtownanalytics.com'
|
|
||||||
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
|
||||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
|
||||||
push: 'origin origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
|
||||||
uses: peter-evans/create-pull-request@v3
|
|
||||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
|
||||||
with:
|
|
||||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
|
||||||
draft: true
|
|
||||||
base: ${{github.ref}}
|
|
||||||
title: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
|
||||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
|
||||||
|
|||||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -11,6 +11,8 @@ __pycache__/
|
|||||||
env*/
|
env*/
|
||||||
dbt_env/
|
dbt_env/
|
||||||
build/
|
build/
|
||||||
|
!tests/functional/build
|
||||||
|
!core/dbt/docs/build
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
downloads/
|
downloads/
|
||||||
@@ -24,8 +26,11 @@ var/
|
|||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
*.mypy_cache/
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
logs/
|
logs/
|
||||||
|
.user.yml
|
||||||
|
profiles.yml
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
# Usually these files are written by a python script from a template
|
# Usually these files are written by a python script from a template
|
||||||
@@ -49,6 +54,7 @@ coverage.xml
|
|||||||
*,cover
|
*,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
test.env
|
test.env
|
||||||
|
makefile.test.env
|
||||||
*.pytest_cache/
|
*.pytest_cache/
|
||||||
|
|
||||||
|
|
||||||
@@ -95,3 +101,7 @@ venv/
|
|||||||
|
|
||||||
# vscode
|
# vscode
|
||||||
.vscode/
|
.vscode/
|
||||||
|
*.code-workspace
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
poetry.lock
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||||
|
|
||||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||||
exclude: ^test/
|
|
||||||
|
|
||||||
# Force all unspecified python hooks to run python 3.8
|
# Force all unspecified python hooks to run python 3.8
|
||||||
default_language_version:
|
default_language_version:
|
||||||
python: python3.8
|
python: python3
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
@@ -21,21 +20,16 @@ repos:
|
|||||||
- "markdown"
|
- "markdown"
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 21.12b0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
args:
|
|
||||||
- "--line-length=99"
|
|
||||||
- "--target-version=py38"
|
|
||||||
- id: black
|
- id: black
|
||||||
alias: black-check
|
alias: black-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
args:
|
args:
|
||||||
- "--line-length=99"
|
|
||||||
- "--target-version=py38"
|
|
||||||
- "--check"
|
- "--check"
|
||||||
- "--diff"
|
- "--diff"
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
rev: 4.0.1
|
rev: 4.0.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
@@ -43,7 +37,7 @@ repos:
|
|||||||
alias: flake8-check
|
alias: flake8-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: v0.782
|
rev: v1.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
# N.B.: Mypy is... a bit fragile.
|
# N.B.: Mypy is... a bit fragile.
|
||||||
|
|||||||
310
CHANGELOG.md
310
CHANGELOG.md
@@ -3,312 +3,20 @@
|
|||||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](CONTRIBUTING.md)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.1.0 (TBD)
|
|
||||||
|
|
||||||
### Features
|
|
||||||
- Added Support for Semantic Versioning ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
|
||||||
- New Dockerfile to support specific db adapters and platforms. See docker/README.md for details ([#4495](https://github.com/dbt-labs/dbt-core/issues/4495), [#4487](https://github.com/dbt-labs/dbt-core/pull/4487))
|
|
||||||
- Allow unique_key to take a list ([#2479](https://github.com/dbt-labs/dbt-core/issues/2479), [#4618](https://github.com/dbt-labs/dbt-core/pull/4618))
|
|
||||||
- Add `--quiet` global flag and `print` Jinja function ([#3451](https://github.com/dbt-labs/dbt-core/issues/3451), [#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- User wasn't asked for permission to overwite a profile entry when running init inside an existing project ([#4375](https://github.com/dbt-labs/dbt-core/issues/4375), [#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
|
||||||
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
|
||||||
- Allow override of string and numeric types for adapters. ([#4603](https://github.com/dbt-labs/dbt-core/issues/4603))
|
|
||||||
- A change in secret environment variables won't trigger a full reparse [#4650](https://github.com/dbt-labs/dbt-core/issues/4650) [4665](https://github.com/dbt-labs/dbt-core/pull/4665)
|
|
||||||
- Fix misspellings and typos in docstrings ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Testing cleanup ([#4496](https://github.com/dbt-labs/dbt-core/pull/4496), [#4509](https://github.com/dbt-labs/dbt-core/pull/4509))
|
|
||||||
- Clean up test deprecation warnings ([#3988](https://github.com/dbt-labs/dbt-core/issue/3988), [#4556](https://github.com/dbt-labs/dbt-core/pull/4556))
|
|
||||||
- Use mashumaro for serialization in event logging ([#4504](https://github.com/dbt-labs/dbt-core/issues/4504), [#4505](https://github.com/dbt-labs/dbt-core/pull/4505))
|
|
||||||
- Drop support for Python 3.7.0 + 3.7.1 ([#4584](https://github.com/dbt-labs/dbt-core/issues/4584), [#4585](https://github.com/dbt-labs/dbt-core/pull/4585), [#4643](https://github.com/dbt-labs/dbt-core/pull/4643))
|
|
||||||
- Re-format codebase (except tests) using pre-commit hooks ([#3195](https://github.com/dbt-labs/dbt-core/issues/3195), [#4697](https://github.com/dbt-labs/dbt-core/pull/4697))
|
|
||||||
- Add deps module README ([#4686](https://github.com/dbt-labs/dbt-core/pull/4686/))
|
|
||||||
- Initial conversion of tests to pytest ([#4690](https://github.com/dbt-labs/dbt-core/issues/4690), [#4691](https://github.com/dbt-labs/dbt-core/pull/4691))
|
|
||||||
- Fix errors in Windows for tests/functions ([#4781](https://github.com/dbt-labs/dbt-core/issues/4781), [#4767](https://github.com/dbt-labs/dbt-core/pull/4767))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@NiallRees](https://github.com/NiallRees) ([#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
|
||||||
- [@alswang18](https://github.com/alswang18) ([#4644](https://github.com/dbt-labs/dbt-core/pull/4644))
|
|
||||||
- [@emartens](https://github.com/ehmartens) ([#4701](https://github.com/dbt-labs/dbt-core/pull/4701))
|
|
||||||
- [@mdesmet](https://github.com/mdesmet) ([#4604](https://github.com/dbt-labs/dbt-core/pull/4604))
|
|
||||||
- [@kazanzhy](https://github.com/kazanzhy) ([#4545](https://github.com/dbt-labs/dbt-core/pull/4545))
|
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.0.4 (TBD)
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Fix bug causing empty node level meta, snapshot config errors ([#4459](https://github.com/dbt-labs/dbt-core/issues/4459), [#4726](https://github.com/dbt-labs/dbt-core/pull/4726))
|
|
||||||
- Fix slow `dbt run` when using Postgres adapter, by deduplicating relations in `postgres_get_relations` ([#3058](https://github.com/dbt-labs/dbt-core/issues/3058), [#4521](https://github.com/dbt-labs/dbt-core/pull/4521))
|
|
||||||
- Fix partial parsing bug with multiple snapshot blocks ([#4771](https//github.com/dbt-labs/dbt-core/issues/4772), [#4773](https://github.com/dbt-labs/dbt-core/pull/4773))
|
|
||||||
- Fix lack of color output on Linux and MacOS when piping the output into another process using the shell pipe (`|`) [#4792](https://github.com/dbt-labs/dbt-core/pull/4792)
|
|
||||||
- Fixed a bug where nodes that depend on multiple macros couldn't be selected using `-s state:modified` ([#4678](https://github.com/dbt-labs/dbt-core/issues/4678))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@varun-dc ](https://github.com/varun-dc) ([#4792](https://github.com/dbt-labs/dbt-core/pull/4792))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
- Resolve errors related to operations preventing DAG from generating in the docs. Also patch a spark issue to allow search to filter accurately past the missing columns. ([#4578](https://github.com/dbt-labs/dbt-core/issues/4578), [#4763](https://github.com/dbt-labs/dbt-core/pull/4763))
|
|
||||||
|
|
||||||
## dbt-core 1.0.3 (TBD)
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Fix bug accessing target fields in deps and clean commands ([#4752](https://github.com/dbt-labs/dbt-core/issues/4752), [#4758](https://github.com/dbt-labs/dbt-core/issues/4758))
|
|
||||||
|
|
||||||
## dbt-core 1.0.2 (TBD)
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Projects created using `dbt init` now have the correct `seeds` directory created (instead of `data`) ([#4588](https://github.com/dbt-labs/dbt-core/issues/4588), [#4599](https://github.com/dbt-labs/dbt-core/pull/4589))
|
|
||||||
- Don't require a profile for dbt deps and clean commands ([#4554](https://github.com/dbt-labs/dbt-core/issues/4554), [#4610](https://github.com/dbt-labs/dbt-core/pull/4610))
|
|
||||||
- Select modified.body works correctly when new model added([#4570](https://github.com/dbt-labs/dbt-core/issues/4570), [#4631](https://github.com/dbt-labs/dbt-core/pull/4631))
|
|
||||||
- Fix bug in retry logic for bad response from hub and when there is a bad git tarball download. ([#4577](https://github.com/dbt-labs/dbt-core/issues/4577), [#4579](https://github.com/dbt-labs/dbt-core/issues/4579), [#4609](https://github.com/dbt-labs/dbt-core/pull/4609))
|
|
||||||
- Restore previous log level (DEBUG) when a test depends on a disabled resource. Still WARN if the resource is missing ([#4594](https://github.com/dbt-labs/dbt-core/issues/4594), [#4647](https://github.com/dbt-labs/dbt-core/pull/4647))
|
|
||||||
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
|
||||||
- Support click versions in the v7.x series ([#4681](https://github.com/dbt-labs/dbt-core/pull/4681))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
* [@amirkdv](https://github.com/amirkdv) ([#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
|
||||||
* [@twilly](https://github.com/twilly) ([#4681](https://github.com/dbt-labs/dbt-core/pull/4681))
|
|
||||||
|
|
||||||
## dbt-core 1.0.2 (TBD)
|
|
||||||
### Fixes
|
|
||||||
- adapter compability messaging added([#4438](https://github.com/dbt-labs/dbt-core/pull/4438) [#4565](https://github.com/dbt-labs/dbt-core/pull/4565))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
* [@nkyuray](https://github.com/nkyuray) ([#4565](https://github.com/dbt-labs/dbt-core/pull/4565))
|
|
||||||
## dbt-core 1.0.1 (January 03, 2022)
|
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.0.1rc1 (December 20, 2021)
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Fix wrong url in the dbt docs overview homepage ([#4442](https://github.com/dbt-labs/dbt-core/pull/4442))
|
|
||||||
- Fix redefined status param of SQLQueryStatus to typecheck the string which passes on `._message` value of `AdapterResponse` or the `str` value sent by adapter plugin. ([#4463](https://github.com/dbt-labs/dbt-core/pull/4463#issuecomment-990174166))
|
|
||||||
- Fix `DepsStartPackageInstall` event to use package name instead of version number. ([#4482](https://github.com/dbt-labs/dbt-core/pull/4482))
|
|
||||||
- Reimplement log message to use adapter name instead of the object method. ([#4501](https://github.com/dbt-labs/dbt-core/pull/4501))
|
|
||||||
- Issue better error message for incompatible schemas ([#4470](https://github.com/dbt-labs/dbt-core/pull/4442), [#4497](https://github.com/dbt-labs/dbt-core/pull/4497))
|
|
||||||
- Remove secrets from error related to packages. ([#4507](https://github.com/dbt-labs/dbt-core/pull/4507))
|
|
||||||
- Prevent coercion of boolean values (`True`, `False`) to numeric values (`0`, `1`) in query results ([#4511](https://github.com/dbt-labs/dbt-core/issues/4511), [#4512](https://github.com/dbt-labs/dbt-core/pull/4512))
|
|
||||||
- Fix error with an env_var in a project hook ([#4523](https://github.com/dbt-labs/dbt-core/issues/4523), [#4524](https://github.com/dbt-labs/dbt-core/pull/4524))
|
|
||||||
- Add additional windows compat logic for colored log output. ([#4443](https://github.com/dbt-labs/dbt-core/issues/4443))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
- Fix missing data on exposures in docs ([#4467](https://github.com/dbt-labs/dbt-core/issues/4467))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@remoyson](https://github.com/remoyson) ([#4442](https://github.com/dbt-labs/dbt-core/pull/4442))
|
|
||||||
|
|
||||||
## dbt-core 1.0.0 (December 3, 2021)
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Configure the CLI logger destination to use stdout instead of stderr ([#4368](https://github.com/dbt-labs/dbt-core/pull/4368))
|
|
||||||
- Make the size of `EVENT_HISTORY` configurable, via `EVENT_BUFFER_SIZE` global config ([#4411](https://github.com/dbt-labs/dbt-core/pull/4411), [#4416](https://github.com/dbt-labs/dbt-core/pull/4416))
|
|
||||||
- Change type of `log_format` in `profiles.yml` user config to be string, not boolean ([#4394](https://github.com/dbt-labs/dbt-core/pull/4394))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Only log cache events if `LOG_CACHE_EVENTS` is enabled, and disable by default. This restores previous behavior ([#4369](https://github.com/dbt-labs/dbt-core/pull/4369))
|
|
||||||
- Move event codes to be a top-level attribute of JSON-formatted logs, rather than nested in `data` ([#4381](https://github.com/dbt-labs/dbt-core/pull/4381))
|
|
||||||
- Fix failing integration test on Windows ([#4380](https://github.com/dbt-labs/dbt-core/pull/4380))
|
|
||||||
- Clean up warning messages for `clean` + `deps` ([#4366](https://github.com/dbt-labs/dbt-core/pull/4366))
|
|
||||||
- Use RFC3339 timestamps for log messages ([#4384](https://github.com/dbt-labs/dbt-core/pull/4384))
|
|
||||||
- Different text output for console (info) and file (debug) logs ([#4379](https://github.com/dbt-labs/dbt-core/pull/4379), [#4418](https://github.com/dbt-labs/dbt-core/pull/4418))
|
|
||||||
- Remove unused events. More structured `ConcurrencyLine`. Replace `\n` message starts/ends with `EmptyLine` events, and exclude `EmptyLine` from JSON-formatted output ([#4388](https://github.com/dbt-labs/dbt-core/pull/4388))
|
|
||||||
- Update `events` module README ([#4395](https://github.com/dbt-labs/dbt-core/pull/4395))
|
|
||||||
- Rework approach to JSON serialization for events with non-standard properties ([#4396](https://github.com/dbt-labs/dbt-core/pull/4396))
|
|
||||||
- Update legacy logger file name to `dbt.log.legacy` ([#4402](https://github.com/dbt-labs/dbt-core/pull/4402))
|
|
||||||
- Rollover `dbt.log` at 10 MB, and keep up to 5 backups, restoring previous behavior ([#4405](https://github.com/dbt-labs/dbt-core/pull/4405))
|
|
||||||
- Use reference keys instead of full relation objects in cache events ([#4410](https://github.com/dbt-labs/dbt-core/pull/4410))
|
|
||||||
- Add `node_type` contextual info to more events ([#4378](https://github.com/dbt-labs/dbt-core/pull/4378))
|
|
||||||
- Make `materialized` config optional in `node_type` ([#4417](https://github.com/dbt-labs/dbt-core/pull/4417))
|
|
||||||
- Stringify exception in `GenericExceptionOnRun` to support JSON serialization ([#4424](https://github.com/dbt-labs/dbt-core/pull/4424))
|
|
||||||
- Add "interop" tests for machine consumption of structured log output ([#4327](https://github.com/dbt-labs/dbt-core/pull/4327))
|
|
||||||
- Relax version specifier for `dbt-extractor` to `~=0.4.0`, to support compiled wheels for additional architectures when available ([#4427](https://github.com/dbt-labs/dbt-core/pull/4427))
|
|
||||||
|
|
||||||
## dbt-core 1.0.0rc3 (November 30, 2021)
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Support partial parsing of env_vars in metrics ([#4253](https://github.com/dbt-labs/dbt-core/issues/4293), [#4322](https://github.com/dbt-labs/dbt-core/pull/4322))
|
|
||||||
- Fix typo in `UnparsedSourceDefinition.__post_serialize__` ([#3545](https://github.com/dbt-labs/dbt-core/issues/3545), [#4349](https://github.com/dbt-labs/dbt-core/pull/4349))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Change some CompilationExceptions to ParsingExceptions ([#4254](http://github.com/dbt-labs/dbt-core/issues/4254), [#4328](https://github.com/dbt-core/pull/4328))
|
|
||||||
- Reorder logic for static parser sampling to speed up model parsing ([#4332](https://github.com/dbt-labs/dbt-core/pull/4332))
|
|
||||||
- Use more augmented assignment statements ([#4315](https://github.com/dbt-labs/dbt-core/issues/4315)), ([#4311](https://github.com/dbt-labs/dbt-core/pull/4331))
|
|
||||||
- Adjust logic when finding approximate matches for models and tests ([#3835](https://github.com/dbt-labs/dbt-core/issues/3835)), [#4076](https://github.com/dbt-labs/dbt-core/pull/4076))
|
|
||||||
- Restore small previous behaviors for logging: JSON formatting for first few events; `WARN`-level stdout for `list` task; include tracking events in `dbt.log` ([#4341](https://github.com/dbt-labs/dbt-core/pull/4341))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@sarah-weatherbee](https://github.com/sarah-weatherbee) ([#4331](https://github.com/dbt-labs/dbt-core/pull/4331))
|
|
||||||
- [@emilieschario](https://github.com/emilieschario) ([#4076](https://github.com/dbt-labs/dbt-core/pull/4076))
|
|
||||||
- [@sneznaj](https://github.com/sneznaj) ([#4349](https://github.com/dbt-labs/dbt-core/pull/4349))
|
|
||||||
|
|
||||||
## dbt-core 1.0.0rc2 (November 22, 2021)
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
- Restrict secret env vars (prefixed `DBT_ENV_SECRET_`) to `profiles.yml` + `packages.yml` _only_. Raise an exception if a secret env var is used elsewhere ([#4310](https://github.com/dbt-labs/dbt-core/issues/4310), [#4311](https://github.com/dbt-labs/dbt-core/pull/4311))
|
|
||||||
- Reorder arguments to `config.get()` so that `default` is second ([#4273](https://github.com/dbt-labs/dbt-core/issues/4273), [#4297](https://github.com/dbt-labs/dbt-core/pull/4297))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
- Avoid error when missing column in YAML description ([#4151](https://github.com/dbt-labs/dbt-core/issues/4151), [#4285](https://github.com/dbt-labs/dbt-core/pull/4285))
|
|
||||||
- Allow `--defer` flag to `dbt snapshot` ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4296](https://github.com/dbt-labs/dbt-core/pull/4296))
|
|
||||||
- Install prerelease packages when `version` explicitly references a prerelease version, regardless of `install-prerelease` status ([#4243](https://github.com/dbt-labs/dbt-core/issues/4243), [#4295](https://github.com/dbt-labs/dbt-core/pull/4295))
|
|
||||||
- Add data attributes to json log messages ([#4301](https://github.com/dbt-labs/dbt-core/pull/4301))
|
|
||||||
- Add event codes to all log events ([#4319](https://github.com/dbt-labs/dbt-core/pull/4319))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Fix serialization error with missing quotes in metrics model ref ([#4252](https://github.com/dbt-labs/dbt-core/issues/4252), [#4287](https://github.com/dbt-labs/dbt-core/pull/4289))
|
|
||||||
- Correct definition of 'created_at' in ParsedMetric nodes ([#4298](http://github.com/dbt-labs/dbt-core/issues/4298), [#4299](https://github.com/dbt-labs/dbt-core/pull/4299))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Allow specifying default in Jinja config.get with default keyword ([#4273](https://github.com/dbt-labs/dbt-core/issues/4273), [#4297](https://github.com/dbt-labs/dbt-core/pull/4297))
|
|
||||||
- Fix serialization error with missing quotes in metrics model ref ([#4252](https://github.com/dbt-labs/dbt-core/issues/4252), [#4287](https://github.com/dbt-labs/dbt-core/pull/4289))
|
|
||||||
- Correct definition of 'created_at' in ParsedMetric nodes ([#4298](https://github.com/dbt-labs/dbt-core/issues/4298), [#4299](https://github.com/dbt-labs/dbt-core/pull/4299))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Add --indirect-selection parameter to profiles.yml and builtin DBT_ env vars; stringified parameter to enable multi-modal use ([#3997](https://github.com/dbt-labs/dbt-core/issues/3997), [#4270](https://github.com/dbt-labs/dbt-core/pull/4270))
|
|
||||||
- Fix filesystem searcher test failure on Python 3.9 ([#3689](https://github.com/dbt-labs/dbt-core/issues/3689), [#4271](https://github.com/dbt-labs/dbt-core/pull/4271))
|
|
||||||
- Clean up deprecation warnings shown for `dbt_project.yml` config renames ([#4276](https://github.com/dbt-labs/dbt-core/issues/4276), [#4291](https://github.com/dbt-labs/dbt-core/pull/4291))
|
|
||||||
- Fix metrics count in compiled project stats ([#4290](https://github.com/dbt-labs/dbt-core/issues/4290), [#4292](https://github.com/dbt-labs/dbt-core/pull/4292))
|
|
||||||
- First pass at supporting more dbt tasks via python lib ([#4200](https://github.com/dbt-labs/dbt-core/pull/4200))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@kadero](https://github.com/kadero) ([#4285](https://github.com/dbt-labs/dbt-core/pull/4285), [#4296](https://github.com/dbt-labs/dbt-core/pull/4296))
|
|
||||||
- [@joellabes](https://github.com/joellabes) ([#4295](https://github.com/dbt-labs/dbt-core/pull/4295))
|
|
||||||
|
|
||||||
## dbt-core 1.0.0rc1 (November 10, 2021)
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
- Replace `greedy` flag/property for test selection with `indirect_selection: eager/cautious` flag/property. Set to `eager` by default. **Note:** This reverts test selection to its pre-v0.20 behavior by default. `dbt test -s my_model` _will_ select multi-parent tests, such as `relationships`, that depend on unselected resources. To achieve the behavior change in v0.20 + v0.21, set `--indirect-selection=cautious` on the CLI or `indirect_selection: cautious` in yaml selectors. ([#4082](https://github.com/dbt-labs/dbt-core/issues/4082), [#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
|
||||||
- In v1.0.0, **`pip install dbt` will raise an explicit error.** Instead, please use `pip install dbt-<adapter>` (to use dbt with that database adapter), or `pip install dbt-core` (for core functionality). For parity with the previous behavior of `pip install dbt`, you can use: `pip install dbt-core dbt-postgres dbt-redshift dbt-snowflake dbt-bigquery` ([#4100](https://github.com/dbt-labs/dbt-core/issues/4100), [#4133](https://github.com/dbt-labs/dbt-core/pull/4133))
|
|
||||||
- Reorganize the `global_project` (macros) into smaller files with clearer names. Remove unused global macros: `column_list`, `column_list_for_create_table`, `incremental_upsert` ([#4154](https://github.com/dbt-labs/dbt-core/pull/4154))
|
|
||||||
- Introduce structured event interface, and begin conversion of all legacy logging ([#3359](https://github.com/dbt-labs/dbt-core/issues/3359), [#4055](https://github.com/dbt-labs/dbt-core/pull/4055))
|
|
||||||
- **This is a breaking change for adapter plugins, requiring a very simple migration.** See [`events` module README](core/dbt/events/README.md#adapter-maintainers) for details.
|
|
||||||
- If you maintain another kind of dbt-core plugin that makes heavy use of legacy logging, and you need time to cut over to the new event interface, you can re-enable the legacy logger via an environment variable shim, `DBT_ENABLE_LEGACY_LOGGER=True`. Be advised that we will remove this capability in a future version of dbt-core.
|
|
||||||
|
|
||||||
### Features
|
|
||||||
- Allow nullable `error_after` in source freshness ([#3874](https://github.com/dbt-labs/dbt-core/issues/3874), [#3955](https://github.com/dbt-labs/dbt-core/pull/3955))
|
|
||||||
- Add `metrics` nodes ([#4071](https://github.com/dbt-labs/dbt-core/issues/4071), [#4235](https://github.com/dbt-labs/dbt-core/pull/4235))
|
|
||||||
- Add support for `dbt init <project_name>`, and support for `skip_profile_setup` argument (`dbt init -s`) ([#4156](https://github.com/dbt-labs/dbt-core/issues/4156), [#4249](https://github.com/dbt-labs/dbt-core/pull/4249))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Changes unit tests using `assertRaisesRegexp` to `assertRaisesRegex` ([#4136](https://github.com/dbt-labs/dbt-core/issues/4132), [#4136](https://github.com/dbt-labs/dbt-core/pull/4136))
|
|
||||||
- Allow retries when the answer from a `dbt deps` is `None` ([#4178](https://github.com/dbt-labs/dbt-core/issues/4178), [#4225](https://github.com/dbt-labs/dbt-core/pull/4225))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Fix non-alphabetical sort of Source Tables in source overview page ([docs#81](https://github.com/dbt-labs/dbt-docs/issues/81), [docs#218](https://github.com/dbt-labs/dbt-docs/pull/218))
|
|
||||||
- Add title tag to node elements in tree ([docs#202](https://github.com/dbt-labs/dbt-docs/issues/202), [docs#203](https://github.com/dbt-labs/dbt-docs/pull/203))
|
|
||||||
- Account for test rename: `schema` → `generic`, `data` →` singular`. Use `test_metadata` instead of `schema`/`data` tags to differentiate ([docs#216](https://github.com/dbt-labs/dbt-docs/issues/216), [docs#222](https://github.com/dbt-labs/dbt-docs/pull/222))
|
|
||||||
- Add `metrics` ([core#216](https://github.com/dbt-labs/dbt-core/issues/4235), [docs#223](https://github.com/dbt-labs/dbt-docs/pull/223))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Bump artifact schema versions for 1.0.0: manifest v4, run results v4, sources v3. Notable changes: added `metrics` nodes; schema test + data test nodes are renamed to generic test + singular test nodes; freshness threshold default values ([#4191](https://github.com/dbt-labs/dbt-core/pull/4191))
|
|
||||||
- Speed up node selection by skipping `incorporate_indirect_nodes` if not needed ([#4213](https://github.com/dbt-labs/dbt-core/issues/4213), [#4214](https://github.com/dbt-labs/dbt-core/issues/4214))
|
|
||||||
- When `on_schema_change` is set, pass common columns as `dest_columns` in incremental merge macros ([#4144](https://github.com/dbt-labs/dbt-core/issues/4144), [#4170](https://github.com/dbt-labs/dbt-core/pull/4170))
|
|
||||||
- Clear adapters before registering in `lib` module config generation ([#4218](https://github.com/dbt-labs/dbt-core/pull/4218))
|
|
||||||
- Remove official support for python 3.6, which is reaching end of life on December 23, 2021 ([#4134](https://github.com/dbt-labs/dbt-core/issues/4134), [#4223](https://github.com/dbt-labs/dbt-core/pull/4223))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@kadero](https://github.com/kadero) ([#3955](https://github.com/dbt-labs/dbt-core/pull/3955), [#4249](https://github.com/dbt-labs/dbt-core/pull/4249))
|
|
||||||
- [@frankcash](https://github.com/frankcash) ([#4136](https://github.com/dbt-labs/dbt-core/pull/4136))
|
|
||||||
- [@Kayrnt](https://github.com/Kayrnt) ([#4136](https://github.com/dbt-labs/dbt-core/pull/4170))
|
|
||||||
- [@VersusFacit](https://github.com/VersusFacit) ([#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
|
||||||
- [@joellabes](https://github.com/joellabes) ([#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
|
||||||
- [@b-per](https://github.com/b-per) ([#4225](https://github.com/dbt-labs/dbt-core/pull/4225))
|
|
||||||
- [@salmonsd](https://github.com/salmonsd) ([docs#218](https://github.com/dbt-labs/dbt-docs/pull/218))
|
|
||||||
- [@miike](https://github.com/miike) ([docs#203](https://github.com/dbt-labs/dbt-docs/pull/203))
|
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.0.0b2 (October 25, 2021)
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
|
|
||||||
- Enable `on-run-start` and `on-run-end` hooks for `dbt test`. Add `flags.WHICH` to execution context, representing current task ([#3463](https://github.com/dbt-labs/dbt-core/issues/3463), [#4004](https://github.com/dbt-labs/dbt-core/pull/4004))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
- Normalize global CLI arguments/flags ([#2990](https://github.com/dbt-labs/dbt/issues/2990), [#3839](https://github.com/dbt-labs/dbt/pull/3839))
|
|
||||||
- Turns on the static parser by default and adds the flag `--no-static-parser` to disable it. ([#3377](https://github.com/dbt-labs/dbt/issues/3377), [#3939](https://github.com/dbt-labs/dbt/pull/3939))
|
|
||||||
- Generic test FQNs have changed to include the relative path, resource, and column (if applicable) where they are defined. This makes it easier to configure them from the `tests` block in `dbt_project.yml` ([#3259](https://github.com/dbt-labs/dbt/pull/3259), [#3880](https://github.com/dbt-labs/dbt/pull/3880)
|
|
||||||
- Turn on partial parsing by default ([#3867](https://github.com/dbt-labs/dbt/issues/3867), [#3989](https://github.com/dbt-labs/dbt/issues/3989))
|
|
||||||
- Add `result:<status>` selectors to automatically rerun failed tests and erroneous models. This makes it easier to rerun failed dbt jobs with a simple selector flag instead of restarting from the beginning or manually running the dbt models in scope. ([#3859](https://github.com/dbt-labs/dbt/issues/3891), [#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
|
||||||
- `dbt init` is now interactive, generating profiles.yml when run inside existing project ([#3625](https://github.com/dbt-labs/dbt/pull/3625))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
|
|
||||||
- Fix intermittent errors in partial parsing tests ([#4060](https://github.com/dbt-labs/dbt-core/issues/4060), [#4068](https://github.com/dbt-labs/dbt-core/pull/4068))
|
|
||||||
- Make finding disabled nodes more consistent ([#4069](https://github.com/dbt-labs/dbt-core/issues/4069), [#4073](https://github.com/dbt-labas/dbt-core/pull/4073))
|
|
||||||
- Remove connection from `render_with_context` during parsing, thereby removing misleading log message ([#3137](https://github.com/dbt-labs/dbt-core/issues/3137), [#4062](https://github.com/dbt-labas/dbt-core/pull/4062))
|
|
||||||
- Wait for postgres docker container to be ready in `setup_db.sh`. ([#3876](https://github.com/dbt-labs/dbt-core/issues/3876), [#3908](https://github.com/dbt-labs/dbt-core/pull/3908))
|
|
||||||
- Prefer macros defined in the project over the ones in a package by default ([#4106](https://github.com/dbt-labs/dbt-core/issues/4106), [#4114](https://github.com/dbt-labs/dbt-core/pull/4114))
|
|
||||||
- Dependency updates ([#4079](https://github.com/dbt-labs/dbt-core/pull/4079)), ([#3532](https://github.com/dbt-labs/dbt-core/pull/3532)
|
|
||||||
- Schedule partial parsing for SQL files with env_var changes ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4101](https://github.com/dbt-labs/dbt-core/pull/4101))
|
|
||||||
- Schedule partial parsing for schema files with env_var changes ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4162](https://github.com/dbt-labs/dbt-core/pull/4162))
|
|
||||||
- Skip partial parsing when env_vars change in dbt_project or profile ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4212](https://github.com/dbt-labs/dbt-core/pull/4212))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
- [@sungchun12](https://github.com/sungchun12) ([#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
|
||||||
- [@matt-winkler](https://github.com/matt-winkler) ([#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
|
||||||
- [@NiallRees](https://github.com/NiallRees) ([#3625](https://github.com/dbt-labs/dbt/pull/3625))
|
|
||||||
- [@rvacaru](https://github.com/rvacaru) ([#3908](https://github.com/dbt-labs/dbt/pull/3908))
|
|
||||||
- [@JCZuurmond](https://github.com/jczuurmond) ([#4114](https://github.com/dbt-labs/dbt-core/pull/4114))
|
|
||||||
- [@ljhopkins2](https://github.com/dbt-labs/dbt-core/pull/4079)
|
|
||||||
|
|
||||||
## dbt-core 1.0.0b1 (October 11, 2021)
|
|
||||||
|
|
||||||
### Breaking changes
|
|
||||||
|
|
||||||
- The two type of test definitions are now "singular" and "generic" (instead of "data" and "schema", respectively). The `test_type:` selection method accepts `test_type:singular` and `test_type:generic`. (It will also accept `test_type:schema` and `test_type:data` for backwards compatibility) ([#3234](https://github.com/dbt-labs/dbt-core/issues/3234), [#3880](https://github.com/dbt-labs/dbt-core/pull/3880)). **Not backwards compatible:** The `--data` and `--schema` flags to `dbt test` are no longer supported, and tests no longer have the tags `'data'` and `'schema'` automatically applied.
|
|
||||||
- Deprecated the use of the `packages` arg `adapter.dispatch` in favor of the `macro_namespace` arg. ([#3895](https://github.com/dbt-labs/dbt-core/issues/3895))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
- Normalize global CLI arguments/flags ([#2990](https://github.com/dbt-labs/dbt-core/issues/2990), [#3839](https://github.com/dbt-labs/dbt-core/pull/3839))
|
|
||||||
- Turns on the static parser by default and adds the flag `--no-static-parser` to disable it. ([#3377](https://github.com/dbt-labs/dbt-core/issues/3377), [#3939](https://github.com/dbt-labs/dbt-core/pull/3939))
|
|
||||||
- Generic test FQNs have changed to include the relative path, resource, and column (if applicable) where they are defined. This makes it easier to configure them from the `tests` block in `dbt_project.yml` ([#3259](https://github.com/dbt-labs/dbt-core/pull/3259), [#3880](https://github.com/dbt-labs/dbt-core/pull/3880)
|
|
||||||
- Turn on partial parsing by default ([#3867](https://github.com/dbt-labs/dbt-core/issues/3867), [#3989](https://github.com/dbt-labs/dbt-core/issues/3989))
|
|
||||||
- Generic test can now be added under a `generic` subfolder in the `test-paths` directory. ([#4052](https://github.com/dbt-labs/dbt-core/pull/4052))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
- Add generic tests defined on sources to the manifest once, not twice ([#3347](https://github.com/dbt-labs/dbt/issues/3347), [#3880](https://github.com/dbt-labs/dbt/pull/3880))
|
|
||||||
- Skip partial parsing if certain macros have changed ([#3810](https://github.com/dbt-labs/dbt/issues/3810), [#3982](https://github.com/dbt-labs/dbt/pull/3892))
|
|
||||||
- Enable cataloging of unlogged Postgres tables ([3961](https://github.com/dbt-labs/dbt/issues/3961), [#3993](https://github.com/dbt-labs/dbt/pull/3993))
|
|
||||||
- Fix multiple disabled nodes ([#4013](https://github.com/dbt-labs/dbt/issues/4013), [#4018](https://github.com/dbt-labs/dbt/pull/4018))
|
|
||||||
- Fix multiple partial parsing errors ([#3996](https://github.com/dbt-labs/dbt/issues/3006), [#4020](https://github.com/dbt-labs/dbt/pull/4018))
|
|
||||||
- Return an error instead of a warning when runing with `--warn-error` and no models are selected ([#4006](https://github.com/dbt-labs/dbt/issues/4006), [#4019](https://github.com/dbt-labs/dbt/pull/4019))
|
|
||||||
- Fixed bug with `error_if` test option ([#4070](https://github.com/dbt-labs/dbt-core/pull/4070))
|
|
||||||
|
|
||||||
### Under the hood
|
|
||||||
- Enact deprecation for `materialization-return` and replace deprecation warning with an exception. ([#3896](https://github.com/dbt-labs/dbt-core/issues/3896))
|
|
||||||
- Build catalog for only relational, non-ephemeral nodes in the graph ([#3920](https://github.com/dbt-labs/dbt-core/issues/3920))
|
|
||||||
- Enact deprecation to remove the `release` arg from the `execute_macro` method. ([#3900](https://github.com/dbt-labs/dbt-core/issues/3900))
|
|
||||||
- Enact deprecation for default quoting to be True. Override for the `dbt-snowflake` adapter so it stays `False`. ([#3898](https://github.com/dbt-labs/dbt-core/issues/3898))
|
|
||||||
- Enact deprecation for object used as dictionaries when they should be dataclasses. Replace deprecation warning with an exception for the dunder methods of `__iter__` and `__len__` for all superclasses of FakeAPIObject. ([#3897](https://github.com/dbt-labs/dbt-core/issues/3897))
|
|
||||||
- Enact deprecation for `adapter-macro` and replace deprecation warning with an exception. ([#3901](https://github.com/dbt-labs/dbt-core/issues/3901))
|
|
||||||
- Add warning when trying to put a node under the wrong key. ie. A seed under models in a `schema.yml` file. ([#3899](https://github.com/dbt-labs/dbt-core/issues/3899))
|
|
||||||
- Plugins for `redshift`, `snowflake`, and `bigquery` have moved to separate repos: [`dbt-redshift`](https://github.com/dbt-labs/dbt-redshift), [`dbt-snowflake`](https://github.com/dbt-labs/dbt-snowflake), [`dbt-bigquery`](https://github.com/dbt-labs/dbt-bigquery)
|
|
||||||
- Change the default dbt packages installation directory to `dbt_packages` from `dbt_modules`. Also rename `module-path` to `packages-install-path` to allow default overrides of package install directory. Deprecation warning added for projects using the old `dbt_modules` name without specifying a `packages-install-path`. ([#3523](https://github.com/dbt-labs/dbt-core/issues/3523))
|
|
||||||
- Update the default project paths to be `analysis-paths = ['analyses']` and `test-paths = ['tests]`. Also have starter project set `analysis-paths: ['analyses']` from now on. ([#2659](https://github.com/dbt-labs/dbt-core/issues/2659))
|
|
||||||
- Define the data type of `sources` as an array of arrays of string in the manifest artifacts. ([#3966](https://github.com/dbt-labs/dbt-core/issues/3966), [#3967](https://github.com/dbt-labs/dbt-core/pull/3967))
|
|
||||||
- Marked `source-paths` and `data-paths` as deprecated keys in `dbt_project.yml` in favor of `model-paths` and `seed-paths` respectively.([#1607](https://github.com/dbt-labs/dbt-core/issues/1607))
|
|
||||||
- Surface git errors to `stdout` when cloning dbt packages from Github. ([#3167](https://github.com/dbt-labs/dbt-core/issues/3167))
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
|
|
||||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#3920](https://github.com/dbt-labs/dbt-core/pull/3922))
|
|
||||||
- [@kadero](https://github.com/kadero) ([#3952](https://github.com/dbt-labs/dbt-core/pull/3953))
|
|
||||||
- [@samlader](https://github.com/samlader) ([#3993](https://github.com/dbt-labs/dbt-core/pull/3993))
|
|
||||||
- [@yu-iskw](https://github.com/yu-iskw) ([#3967](https://github.com/dbt-labs/dbt-core/pull/3967))
|
|
||||||
- [@laxjesse](https://github.com/laxjesse) ([#4019](https://github.com/dbt-labs/dbt-core/pull/4019))
|
|
||||||
- [@gitznik](https://github.com/Gitznik) ([#4124](https://github.com/dbt-labs/dbt-core/pull/4124))
|
|
||||||
|
|
||||||
|
|
||||||
## Previous Releases
|
## Previous Releases
|
||||||
|
|
||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
|
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||||
|
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||||
|
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||||
|
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||||
|
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||||
|
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||||
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
||||||
|
|||||||
211
CONTRIBUTING.md
211
CONTRIBUTING.md
@@ -1,79 +1,30 @@
|
|||||||
# Contributing to `dbt`
|
# Contributing to `dbt-core`
|
||||||
|
|
||||||
|
`dbt-core` is open source software. It is what it is today because community members have opened issues, provided feedback, and [contributed to the knowledge loop](https://www.getdbt.com/dbt-labs/values/). Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||||
|
|
||||||
1. [About this document](#about-this-document)
|
1. [About this document](#about-this-document)
|
||||||
2. [Proposing a change](#proposing-a-change)
|
2. [Getting the code](#getting-the-code)
|
||||||
3. [Getting the code](#getting-the-code)
|
3. [Setting up an environment](#setting-up-an-environment)
|
||||||
4. [Setting up an environment](#setting-up-an-environment)
|
4. [Running dbt-core in development](#running-dbt-core-in-development)
|
||||||
5. [Running `dbt` in development](#running-dbt-in-development)
|
5. [Testing dbt-core](#testing)
|
||||||
6. [Testing](#testing)
|
6. [Debugging](#debugging)
|
||||||
7. [Submitting a Pull Request](#submitting-a-pull-request)
|
7. [Adding or modifying a changelog entry](#adding-or-modifying-a-changelog-entry)
|
||||||
|
8. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||||
|
|
||||||
## About this document
|
## About this document
|
||||||
|
|
||||||
This document is a guide intended for folks interested in contributing to `dbt-core`. Below, we document the process by which members of the community should create issues and submit pull requests (PRs) in this repository. It is not intended as a guide for using `dbt-core`, and it assumes a certain level of familiarity with Python concepts such as virtualenvs, `pip`, python modules, filesystems, and so on. This guide assumes you are using macOS or Linux and are comfortable with the command line.
|
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
||||||
|
|
||||||
If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the `#dbt-core-development` channel on [slack](https://community.getdbt.com).
|
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||||
|
|
||||||
#### Adapters
|
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
||||||
|
|
||||||
If you have an issue or code change suggestion related to a specific database [adapter](https://docs.getdbt.com/docs/available-adapters); please refer to that supported databases seperate repo for those contributions.
|
### Notes
|
||||||
|
|
||||||
### Signing the CLA
|
- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`).
|
||||||
|
- **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones.
|
||||||
Please note that all contributors to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the `dbt-core` codebase. If you are unable to sign the CLA, then the `dbt-core` maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones.
|
- **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...). If an issue fix applies to a release branch, that fix should be first committed to the development branch and then to the release branch (rarely release-branch fixes may not apply to `main`).
|
||||||
|
- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via pip, homebrew, and dbt Cloud.
|
||||||
## Proposing a change
|
|
||||||
|
|
||||||
`dbt-core` is Apache 2.0-licensed open source software. `dbt-core` is what it is today because community members like you have opened issues, provided feedback, and contributed to the knowledge loop for the entire communtiy. Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
|
||||||
|
|
||||||
### Defining the problem
|
|
||||||
|
|
||||||
If you have an idea for a new feature or if you've discovered a bug in `dbt-core`, the first step is to open an issue. Please check the list of [open issues](https://github.com/dbt-labs/dbt-core/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt-core` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
|
||||||
|
|
||||||
> **Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed.
|
|
||||||
|
|
||||||
### Discussing the idea
|
|
||||||
|
|
||||||
After you open an issue, a `dbt-core` maintainer will follow up by commenting on your issue (usually within 1-3 days) to explore your idea further and advise on how to implement the suggested changes. In many cases, community members will chime in with their own thoughts on the problem statement. If you as the issue creator are interested in submitting a Pull Request to address the issue, you should indicate this in the body of the issue. The `dbt-core` maintainers are _always_ happy to help contributors with the implementation of fixes and features, so please also indicate if there's anything you're unsure about or could use guidance around in the issue.
|
|
||||||
|
|
||||||
### Submitting a change
|
|
||||||
|
|
||||||
If an issue is appropriately well scoped and describes a beneficial change to the `dbt-core` codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this.
|
|
||||||
|
|
||||||
The `dbt-core` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/dbt-labs/dbt-core/contribute) page.
|
|
||||||
|
|
||||||
Here's a good workflow:
|
|
||||||
- Comment on the open issue, expressing your interest in contributing the required code change
|
|
||||||
- Outline your planned implementation. If you want help getting started, ask!
|
|
||||||
- Follow the steps outlined below to develop locally. Once you have opened a PR, one of the `dbt-core` maintainers will work with you to review your code.
|
|
||||||
- Add a test! Tests are crucial for both fixes and new features alike. We want to make sure that code works as intended, and that it avoids any bugs previously encountered. Currently, the best resource for understanding `dbt-core`'s [unit](test/unit) and [integration](test/integration) tests is the tests themselves. One of the maintainers can help by pointing out relevant examples.
|
|
||||||
- Check your formatting and linting with [Flake8](https://flake8.pycqa.org/en/latest/#), [Black](https://github.com/psf/black), and the rest of the hooks we have in our [pre-commit](https://pre-commit.com/) [config](https://github.com/dbt-labs/dbt-core/blob/75201be9db1cb2c6c01fa7e71a314f5e5beb060a/.pre-commit-config.yaml).
|
|
||||||
|
|
||||||
In some cases, the right resolution to an open issue might be tangential to the `dbt-core` codebase. The right path forward might be a documentation update or a change that can be made in user-space. In other cases, the issue might describe functionality that the `dbt-core` maintainers are unwilling or unable to incorporate into the `dbt-core` codebase. When it is determined that an open issue describes functionality that will not translate to a code change in the `dbt-core` repository, the issue will be tagged with the `wontfix` label (see below) and closed.
|
|
||||||
|
|
||||||
### Using issue labels
|
|
||||||
|
|
||||||
The `dbt-core` maintainers use labels to categorize open issues. Most labels describe the domain in the `dbt-core` codebase germane to the discussion.
|
|
||||||
|
|
||||||
| tag | description |
|
|
||||||
| --- | ----------- |
|
|
||||||
| [triage](https://github.com/dbt-labs/dbt-core/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt-core` maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
|
||||||
| [bug](https://github.com/dbt-labs/dbt-core/labels/bug) | This issue represents a defect or regression in `dbt-core` |
|
|
||||||
| [enhancement](https://github.com/dbt-labs/dbt-core/labels/enhancement) | This issue represents net-new functionality in `dbt-core` |
|
|
||||||
| [good first issue](https://github.com/dbt-labs/dbt-core/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt-core` codebase to implement. This issue is appropriate for a first-time contributor. |
|
|
||||||
| [help wanted](https://github.com/dbt-labs/dbt-core/labels/help%20wanted) / [discussion](https://github.com/dbt-labs/dbt-core/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
|
||||||
| [duplicate](https://github.com/dbt-labs/dbt-core/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt-core` maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
|
||||||
| [snoozed](https://github.com/dbt-labs/dbt-core/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt-core` maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
|
||||||
| [stale](https://github.com/dbt-labs/dbt-core/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt-core` maintainers, but they can be re-opened if the discussion is restarted. |
|
|
||||||
| [wontfix](https://github.com/dbt-labs/dbt-core/labels/wontfix) | This issue does not require a code change in the `dbt-core` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
|
||||||
|
|
||||||
#### Branching Strategy
|
|
||||||
|
|
||||||
`dbt-core` has three types of branches:
|
|
||||||
|
|
||||||
- **Trunks** are where active development of the next release takes place. There is one trunk named `main` at the time of writing this, and will be the default branch of the repository.
|
|
||||||
- **Release Branches** track a specific, not yet complete release of `dbt-core`. Each minor version release has a corresponding release branch. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of `dbt-core`.
|
|
||||||
- **Feature Branches** track individual features and fixes. On completion they should be merged into the trunk branch or a specific release branch.
|
|
||||||
|
|
||||||
## Getting the code
|
## Getting the code
|
||||||
|
|
||||||
@@ -85,15 +36,17 @@ You will need `git` in order to download and modify the `dbt-core` source code.
|
|||||||
|
|
||||||
If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt-core` by forking the `dbt-core` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt-core` by forking the `dbt-core` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||||
|
|
||||||
1. fork the `dbt-core` repository
|
1. Fork the `dbt-core` repository
|
||||||
2. clone your fork locally
|
2. Clone your fork locally
|
||||||
3. check out a new branch for your proposed changes
|
3. Check out a new branch for your proposed changes
|
||||||
4. push changes to your fork
|
4. Push changes to your fork
|
||||||
5. open a pull request against `dbt-labs/dbt` from your forked repository
|
5. Open a pull request against `dbt-labs/dbt-core` from your forked repository
|
||||||
|
|
||||||
### dbt Labs contributors
|
### dbt Labs contributors
|
||||||
|
|
||||||
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch. Branch names should be fixed by `CT-XXX/` where:
|
||||||
|
* CT stands for 'core team'
|
||||||
|
* XXX stands for a JIRA ticket number
|
||||||
|
|
||||||
## Setting up an environment
|
## Setting up an environment
|
||||||
|
|
||||||
@@ -101,19 +54,21 @@ There are some tools that will be helpful to you in developing locally. While th
|
|||||||
|
|
||||||
### Tools
|
### Tools
|
||||||
|
|
||||||
A short list of tools used in `dbt-core` testing that will be helpful to your understanding:
|
These are the tools used in `dbt-core` development and testing:
|
||||||
|
|
||||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, Python 3.8, and Python 3.9
|
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.8, 3.9, 3.10 and 3.11
|
||||||
- [`pytest`](https://docs.pytest.org/en/latest/) to discover/run tests
|
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple
|
|
||||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||||
- [`black`](https://github.com/psf/black) for code formatting
|
- [`black`](https://github.com/psf/black) for code formatting
|
||||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||||
- [Github Actions](https://github.com/features/actions)
|
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
||||||
|
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
||||||
|
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
|
||||||
|
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
||||||
|
|
||||||
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about them.
|
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
||||||
|
|
||||||
#### virtual environments
|
#### Virtual environments
|
||||||
|
|
||||||
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
||||||
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
||||||
@@ -124,12 +79,12 @@ source env/bin/activate
|
|||||||
|
|
||||||
This will create and activate a new Python virtual environment.
|
This will create and activate a new Python virtual environment.
|
||||||
|
|
||||||
#### docker and docker-compose
|
#### Docker and `docker-compose`
|
||||||
|
|
||||||
Docker and docker-compose are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).
|
Docker and `docker-compose` are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).
|
||||||
|
|
||||||
|
|
||||||
#### postgres (optional)
|
#### Postgres (optional)
|
||||||
|
|
||||||
For testing, and later in the examples in this document, you may want to have `psql` available so you can poke around in the database and see what happened. We recommend that you use [homebrew](https://brew.sh/) for that on macOS, and your package manager on Linux. You can install any version of the postgres client that you'd like. On macOS, with homebrew setup, you can run:
|
For testing, and later in the examples in this document, you may want to have `psql` available so you can poke around in the database and see what happened. We recommend that you use [homebrew](https://brew.sh/) for that on macOS, and your package manager on Linux. You can install any version of the postgres client that you'd like. On macOS, with homebrew setup, you can run:
|
||||||
|
|
||||||
@@ -141,32 +96,37 @@ brew install postgresql
|
|||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
|
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
make dev
|
make dev
|
||||||
# or
|
```
|
||||||
|
or, alternatively:
|
||||||
|
```sh
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||||
|
pre-commit install
|
||||||
```
|
```
|
||||||
|
|
||||||
When `dbt-core` is installed this way, any changes you make to the `dbt-core` source code will be reflected immediately in your next `dbt-core` run.
|
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||||
|
|
||||||
|
|
||||||
### Running `dbt-core`
|
### Running `dbt-core`
|
||||||
|
|
||||||
With your virtualenv activated, the `dbt-core` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||||
|
|
||||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
Getting the `dbt-core` integration tests set up in your local environment will be very helpful as you start to make changes to your local version of `dbt-core`. The section that follows outlines some helpful tips for setting up the test environment.
|
Once you're able to manually test that your code change is working as expected, it's important to run existing automated tests, as well as adding some new ones. These tests will ensure that:
|
||||||
|
- Your code changes do not unexpectedly break other established functionality
|
||||||
|
- Your code changes can handle all known edge cases
|
||||||
|
- The functionality you're adding will _keep_ working in the future
|
||||||
|
|
||||||
Although `dbt-core` works with a number of different databases, you won't need to supply credentials for every one of these databases in your test environment. Instead you can test all dbt-core code changes with Python and Postgres.
|
Although `dbt-core` works with a number of different databases, you won't need to supply credentials for every one of these databases in your test environment. Instead, you can test most `dbt-core` code changes with Python and Postgres.
|
||||||
|
|
||||||
### Initial setup
|
### Initial setup
|
||||||
|
|
||||||
We recommend starting with `dbt-core`'s Postgres tests. These tests cover most of the functionality in `dbt-core`, are the fastest to run, and are the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
make setup-db
|
make setup-db
|
||||||
@@ -192,48 +152,79 @@ make test
|
|||||||
# Runs postgres integration tests with py38 in "fail fast" mode.
|
# Runs postgres integration tests with py38 in "fail fast" mode.
|
||||||
make integration
|
make integration
|
||||||
```
|
```
|
||||||
> These make targets assume you have a local install of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
||||||
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
||||||
|
|
||||||
Check out the other targets in the Makefile to see other commonly used test
|
Check out the other targets in the Makefile to see other commonly used test
|
||||||
suites.
|
suites.
|
||||||
|
|
||||||
#### `pre-commit`
|
#### `pre-commit`
|
||||||
[`pre-commit`](https.pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This command installs several pip executables including black, mypy, and flake8. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
||||||
|
|
||||||
#### `tox`
|
#### `tox`
|
||||||
|
|
||||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, and Python 3.9 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
|
||||||
|
|
||||||
#### `pytest`
|
#### `pytest`
|
||||||
|
|
||||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv
|
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:
|
||||||
active and dev dependencies installed you can do things like:
|
|
||||||
```sh
|
```sh
|
||||||
# run specific postgres integration tests
|
|
||||||
python -m pytest -m profile_postgres test/integration/001_simple_copy_test
|
|
||||||
# run all unit tests in a file
|
# run all unit tests in a file
|
||||||
python -m pytest test/unit/test_graph.py
|
python3 -m pytest tests/unit/test_graph.py
|
||||||
# run a specific unit test
|
# run a specific unit test
|
||||||
python -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
python3 -m pytest tests/unit/test_graph.py::GraphTest::test__dependency_list
|
||||||
|
# run specific Postgres functional tests
|
||||||
|
python3 -m pytest tests/functional/sources
|
||||||
```
|
```
|
||||||
> [Here](https://docs.pytest.org/en/reorganize-docs/new-docs/user/commandlineuseful.html)
|
|
||||||
> is a list of useful command-line options for `pytest` to use while developing.
|
|
||||||
|
|
||||||
## Adding CHANGELOG Entry
|
> See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options.
|
||||||
|
|
||||||
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
### Unit, Integration, Functional?
|
||||||
|
|
||||||
|
Here are some general rules for adding tests:
|
||||||
|
* unit tests (`tests/unit`) don’t need to access a database; "pure Python" tests should be written as unit tests
|
||||||
|
* functional tests (`tests/functional`) cover anything that interacts with a database, namely adapter
|
||||||
|
|
||||||
|
## Debugging
|
||||||
|
|
||||||
|
1. The logs for a `dbt run` have stack traces and other information for debugging errors (in `logs/dbt.log` in your project directory).
|
||||||
|
2. Try using a debugger, like `ipdb`. For pytest: `--pdb --pdbcls=IPython.terminal.debugger:pdb`
|
||||||
|
3. Sometimes, it’s easier to debug on a single thread: `dbt --single-threaded run`
|
||||||
|
4. To make print statements from Jinja macros: `{{ log(msg, info=true) }}`
|
||||||
|
5. You can also add `{{ debug() }}` statements, which will drop you into some auto-generated code that the macro wrote.
|
||||||
|
6. The dbt “artifacts” are written out to the ‘target’ directory of your dbt project. They are in unformatted json, which can be hard to read. Format them with:
|
||||||
|
> python -m json.tool target/run_results.json > run_results.json
|
||||||
|
|
||||||
|
### Assorted development tips
|
||||||
|
* Append `# type: ignore` to the end of a line if you need to disable `mypy` on that line.
|
||||||
|
* Sometimes flake8 complains about lines that are actually fine, in which case you can put a comment on the line such as: # noqa or # noqa: ANNN, where ANNN is the error code that flake8 issues.
|
||||||
|
* To collect output for `CProfile`, run dbt with the `-r` option and the name of an output file, i.e. `dbt -r dbt.cprof run`. If you just want to profile parsing, you can do: `dbt -r dbt.cprof parse`. `pip` install `snakeviz` to view the output. Run `snakeviz dbt.cprof` and output will be rendered in a browser window.
|
||||||
|
|
||||||
|
## Adding or modifying a CHANGELOG Entry
|
||||||
|
|
||||||
|
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
||||||
|
|
||||||
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
|
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
|
||||||
|
|
||||||
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
|
Once changie is installed and your PR is created for a new feature, simply run the following command and changie will walk you through the process of creating a changelog entry:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
changie new
|
||||||
|
```
|
||||||
|
|
||||||
|
Commit the file that's created and your changelog entry is complete!
|
||||||
|
|
||||||
|
If you are contributing to a feature already in progress, you will modify the changie yaml file in dbt/.changes/unreleased/ related to your change. If you need help finding this file, please ask within the discussion for the pull request!
|
||||||
|
|
||||||
|
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.
|
||||||
|
|
||||||
## Submitting a Pull Request
|
## Submitting a Pull Request
|
||||||
|
|
||||||
dbt Labs provides a CI environment to test changes to specific adapters, and periodic maintenance checks of `dbt-core` through Github Actions. For example, if you submit a pull request to the `dbt-redshift` repo, GitHub will trigger automated code checks and tests against Redshift.
|
Code can be merged into the current development branch `main` by opening a pull request. A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||||
|
|
||||||
A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
|
||||||
- First time contributors should note code checks + unit tests require a maintainer to approve.
|
|
||||||
|
|
||||||
|
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
||||||
|
|
||||||
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||||
|
|
||||||
|
Sometimes, the content license agreement auto-check bot doesn't find a user's entry in its roster. If you need to force a rerun, add `@cla-bot check` in a comment on the pull request.
|
||||||
|
|||||||
@@ -3,13 +3,13 @@
|
|||||||
# See `/docker` for a generic and production-ready docker file
|
# See `/docker` for a generic and production-ready docker file
|
||||||
##
|
##
|
||||||
|
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends \
|
&& apt-get install -y --no-install-recommends \
|
||||||
software-properties-common \
|
software-properties-common gpg-agent \
|
||||||
&& add-apt-repository ppa:git-core/ppa -y \
|
&& add-apt-repository ppa:git-core/ppa -y \
|
||||||
&& apt-get dist-upgrade -y \
|
&& apt-get dist-upgrade -y \
|
||||||
&& apt-get install -y --no-install-recommends \
|
&& apt-get install -y --no-install-recommends \
|
||||||
@@ -30,22 +30,21 @@ RUN apt-get update \
|
|||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
&& add-apt-repository ppa:deadsnakes/ppa \
|
&& add-apt-repository ppa:deadsnakes/ppa \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
python \
|
python-is-python3 \
|
||||||
python-dev \
|
python-dev-is-python3 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
python3.6 \
|
|
||||||
python3.6-dev \
|
|
||||||
python3-pip \
|
|
||||||
python3.6-venv \
|
|
||||||
python3.7 \
|
|
||||||
python3.7-dev \
|
|
||||||
python3.7-venv \
|
|
||||||
python3.8 \
|
python3.8 \
|
||||||
python3.8-dev \
|
python3.8-dev \
|
||||||
python3.8-venv \
|
python3.8-venv \
|
||||||
python3.9 \
|
python3.9 \
|
||||||
python3.9-dev \
|
python3.9-dev \
|
||||||
python3.9-venv \
|
python3.9-venv \
|
||||||
|
python3.10 \
|
||||||
|
python3.10-dev \
|
||||||
|
python3.10-venv \
|
||||||
|
python3.11 \
|
||||||
|
python3.11-dev \
|
||||||
|
python3.11-venv \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
|||||||
60
Makefile
60
Makefile
@@ -6,12 +6,42 @@ ifeq ($(USE_DOCKER),true)
|
|||||||
DOCKER_CMD := docker-compose run --rm test
|
DOCKER_CMD := docker-compose run --rm test
|
||||||
endif
|
endif
|
||||||
|
|
||||||
.PHONY: dev
|
#
|
||||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
|
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||||
|
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||||
|
# DBT_TEST_USER_1=user
|
||||||
|
# LOG_DIR="dir with a space in it"
|
||||||
|
#
|
||||||
|
# Warn: Restrict each line to one variable only.
|
||||||
|
#
|
||||||
|
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||||
|
include ./makefile.test.env
|
||||||
|
endif
|
||||||
|
|
||||||
|
CI_FLAGS =\
|
||||||
|
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||||
|
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||||
|
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||||
|
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||||
|
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||||
|
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY: dev_req
|
||||||
|
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||||
|
@\
|
||||||
|
pip install -r dev-requirements.txt
|
||||||
|
pip install -r editable-requirements.txt
|
||||||
|
|
||||||
|
.PHONY: dev
|
||||||
|
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||||
@\
|
@\
|
||||||
pip install -r dev-requirements.txt -r editable-requirements.txt && \
|
|
||||||
pre-commit install
|
pre-commit install
|
||||||
|
|
||||||
|
.PHONY: proto_types
|
||||||
|
proto_types: ## generates google protobuf python file from types.proto
|
||||||
|
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||||
|
|
||||||
.PHONY: mypy
|
.PHONY: mypy
|
||||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||||
@\
|
@\
|
||||||
@@ -34,27 +64,34 @@ lint: .env ## Runs flake8 and mypy code checks against staged changes.
|
|||||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||||
|
|
||||||
.PHONY: unit
|
.PHONY: unit
|
||||||
unit: .env ## Runs unit tests with py38.
|
unit: .env ## Runs unit tests with py
|
||||||
@\
|
@\
|
||||||
$(DOCKER_CMD) tox -e py38
|
$(DOCKER_CMD) tox -e py
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: .env ## Runs unit tests with py38 and code checks against staged changes.
|
test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||||
@\
|
@\
|
||||||
$(DOCKER_CMD) tox -e py38; \
|
$(DOCKER_CMD) tox -e py; \
|
||||||
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
||||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
||||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||||
|
|
||||||
.PHONY: integration
|
.PHONY: integration
|
||||||
integration: .env ## Runs postgres integration tests with py38.
|
integration: .env ## Runs postgres integration tests with py-integration
|
||||||
@\
|
@\
|
||||||
$(DOCKER_CMD) tox -e py38-integration -- -nauto
|
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||||
|
|
||||||
.PHONY: integration-fail-fast
|
.PHONY: integration-fail-fast
|
||||||
integration-fail-fast: .env ## Runs postgres integration tests with py38 in "fail fast" mode.
|
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||||
@\
|
@\
|
||||||
$(DOCKER_CMD) tox -e py38-integration -- -x -nauto
|
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
||||||
|
|
||||||
|
.PHONY: interop
|
||||||
|
interop: clean
|
||||||
|
@\
|
||||||
|
mkdir $(LOG_DIR) && \
|
||||||
|
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||||
|
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||||
|
|
||||||
.PHONY: setup-db
|
.PHONY: setup-db
|
||||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||||
@@ -77,6 +114,7 @@ endif
|
|||||||
clean: ## Resets development environment.
|
clean: ## Resets development environment.
|
||||||
@echo 'cleaning repo...'
|
@echo 'cleaning repo...'
|
||||||
@rm -f .coverage
|
@rm -f .coverage
|
||||||
|
@rm -f .coverage.*
|
||||||
@rm -rf .eggs/
|
@rm -rf .eggs/
|
||||||
@rm -f .env
|
@rm -f .env
|
||||||
@rm -rf .tox/
|
@rm -rf .tox/
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Understanding dbt
|
## Understanding dbt
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
|||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||||
|
|
||||||
## Join the dbt Community
|
## Join the dbt Community
|
||||||
|
|||||||
@@ -1 +1,2 @@
|
|||||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
||||||
|
include dbt/py.typed
|
||||||
|
|||||||
@@ -2,50 +2,59 @@
|
|||||||
|
|
||||||
## The following are individual files in this directory.
|
## The following are individual files in this directory.
|
||||||
|
|
||||||
### deprecations.py
|
|
||||||
|
|
||||||
### flags.py
|
|
||||||
|
|
||||||
### main.py
|
|
||||||
|
|
||||||
### tracking.py
|
|
||||||
|
|
||||||
### version.py
|
|
||||||
|
|
||||||
### lib.py
|
|
||||||
|
|
||||||
### node_types.py
|
|
||||||
|
|
||||||
### helper_types.py
|
|
||||||
|
|
||||||
### links.py
|
|
||||||
|
|
||||||
### semver.py
|
|
||||||
|
|
||||||
### ui.py
|
|
||||||
|
|
||||||
### compilation.py
|
### compilation.py
|
||||||
|
|
||||||
|
### constants.py
|
||||||
|
|
||||||
### dataclass_schema.py
|
### dataclass_schema.py
|
||||||
|
|
||||||
|
### deprecations.py
|
||||||
|
|
||||||
### exceptions.py
|
### exceptions.py
|
||||||
|
|
||||||
|
### flags.py
|
||||||
|
|
||||||
|
### helper_types.py
|
||||||
|
|
||||||
### hooks.py
|
### hooks.py
|
||||||
|
|
||||||
|
### lib.py
|
||||||
|
|
||||||
|
### links.py
|
||||||
|
|
||||||
### logger.py
|
### logger.py
|
||||||
|
|
||||||
|
### main.py
|
||||||
|
|
||||||
|
### node_types.py
|
||||||
|
|
||||||
### profiler.py
|
### profiler.py
|
||||||
|
|
||||||
|
### selected_resources.py
|
||||||
|
|
||||||
|
### semver.py
|
||||||
|
|
||||||
|
### tracking.py
|
||||||
|
|
||||||
|
### ui.py
|
||||||
|
|
||||||
### utils.py
|
### utils.py
|
||||||
|
|
||||||
|
### version.py
|
||||||
|
|
||||||
|
|
||||||
## The subdirectories will be documented in a README in the subdirectory
|
## The subdirectories will be documented in a README in the subdirectory
|
||||||
* config
|
|
||||||
* include
|
|
||||||
* adapters
|
* adapters
|
||||||
* context
|
* cli
|
||||||
* deps
|
|
||||||
* graph
|
|
||||||
* task
|
|
||||||
* clients
|
* clients
|
||||||
|
* config
|
||||||
|
* context
|
||||||
|
* contracts
|
||||||
|
* deps
|
||||||
|
* docs
|
||||||
* events
|
* events
|
||||||
|
* graph
|
||||||
|
* include
|
||||||
|
* parser
|
||||||
|
* task
|
||||||
|
* tests
|
||||||
|
|||||||
7
core/dbt/__init__.py
Normal file
7
core/dbt/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# N.B.
|
||||||
|
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||||
|
# The matching statement is in plugins/postgres/dbt/__init__.py
|
||||||
|
|
||||||
|
from pkgutil import extend_path
|
||||||
|
|
||||||
|
__path__ = extend_path(__path__, __name__)
|
||||||
@@ -1 +1,30 @@
|
|||||||
# Adapters README
|
# Adapters README
|
||||||
|
|
||||||
|
The Adapters module is responsible for defining database connection methods, caching information from databases, how relations are defined, and the two major connection types we have - base and sql.
|
||||||
|
|
||||||
|
# Directories
|
||||||
|
|
||||||
|
## `base`
|
||||||
|
|
||||||
|
Defines the base implementation Adapters can use to build out full functionality.
|
||||||
|
|
||||||
|
## `sql`
|
||||||
|
|
||||||
|
Defines a sql implementation for adapters that initially inherits the above base implementation and comes with some premade methods and macros that can be overwritten as needed per adapter. (most common type of adapter.)
|
||||||
|
|
||||||
|
# Files
|
||||||
|
|
||||||
|
## `cache.py`
|
||||||
|
|
||||||
|
Cached information from the database.
|
||||||
|
|
||||||
|
## `factory.py`
|
||||||
|
Defines how we generate adapter objects
|
||||||
|
|
||||||
|
## `protocol.py`
|
||||||
|
|
||||||
|
Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods.
|
||||||
|
|
||||||
|
## `reference_keys.py`
|
||||||
|
|
||||||
|
Configures naming scheme for cache elements to be universal.
|
||||||
|
|||||||
7
core/dbt/adapters/__init__.py
Normal file
7
core/dbt/adapters/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# N.B.
|
||||||
|
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||||
|
# The matching statement is in plugins/postgres/dbt/adapters/__init__.py
|
||||||
|
|
||||||
|
from pkgutil import extend_path
|
||||||
|
|
||||||
|
__path__ = extend_path(__path__, __name__)
|
||||||
10
core/dbt/adapters/base/README.md
Normal file
10
core/dbt/adapters/base/README.md
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
|
||||||
|
## Base adapters
|
||||||
|
|
||||||
|
### impl.py
|
||||||
|
|
||||||
|
The class `SQLAdapter` in [base/imply.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/adapters/base/impl.py) is a (mostly) abstract object that adapter objects inherit from. The base class scaffolds out methods that every adapter project usually should implement for smooth communication between dbt and database.
|
||||||
|
|
||||||
|
Some target databases require more or fewer methods--it all depends on what the warehouse's featureset is.
|
||||||
|
|
||||||
|
Look into the class for function-level comments.
|
||||||
@@ -1,14 +1,19 @@
|
|||||||
# these are all just exports, #noqa them so flake8 will be happy
|
# these are all just exports, #noqa them so flake8 will be happy
|
||||||
|
|
||||||
# TODO: Should we still include this in the `adapters` namespace?
|
# TODO: Should we still include this in the `adapters` namespace?
|
||||||
from dbt.contracts.connection import Credentials # noqa
|
from dbt.contracts.connection import Credentials # noqa: F401
|
||||||
from dbt.adapters.base.meta import available # noqa
|
from dbt.adapters.base.meta import available # noqa: F401
|
||||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa
|
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||||
from dbt.adapters.base.relation import ( # noqa
|
from dbt.adapters.base.relation import ( # noqa: F401
|
||||||
BaseRelation,
|
BaseRelation,
|
||||||
RelationType,
|
RelationType,
|
||||||
SchemaSearchMap,
|
SchemaSearchMap,
|
||||||
)
|
)
|
||||||
from dbt.adapters.base.column import Column # noqa
|
from dbt.adapters.base.column import Column # noqa: F401
|
||||||
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter # noqa
|
from dbt.adapters.base.impl import ( # noqa: F401
|
||||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa
|
AdapterConfig,
|
||||||
|
BaseAdapter,
|
||||||
|
PythonJobHelper,
|
||||||
|
ConstraintSupport,
|
||||||
|
)
|
||||||
|
from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from dataclasses import dataclass
|
|||||||
import re
|
import re
|
||||||
from typing import Dict, ClassVar, Any, Optional
|
from typing import Dict, ClassVar, Any, Optional
|
||||||
|
|
||||||
from dbt.exceptions import RuntimeException
|
from dbt.exceptions import DbtRuntimeError
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -12,6 +12,7 @@ class Column:
|
|||||||
"TIMESTAMP": "TIMESTAMP",
|
"TIMESTAMP": "TIMESTAMP",
|
||||||
"FLOAT": "FLOAT",
|
"FLOAT": "FLOAT",
|
||||||
"INTEGER": "INT",
|
"INTEGER": "INT",
|
||||||
|
"BOOLEAN": "BOOLEAN",
|
||||||
}
|
}
|
||||||
column: str
|
column: str
|
||||||
dtype: str
|
dtype: str
|
||||||
@@ -59,6 +60,7 @@ class Column:
|
|||||||
"float",
|
"float",
|
||||||
"double precision",
|
"double precision",
|
||||||
"float8",
|
"float8",
|
||||||
|
"double",
|
||||||
]
|
]
|
||||||
|
|
||||||
def is_integer(self) -> bool:
|
def is_integer(self) -> bool:
|
||||||
@@ -84,7 +86,7 @@ class Column:
|
|||||||
|
|
||||||
def string_size(self) -> int:
|
def string_size(self) -> int:
|
||||||
if not self.is_string():
|
if not self.is_string():
|
||||||
raise RuntimeException("Called string_size() on non-string field!")
|
raise DbtRuntimeError("Called string_size() on non-string field!")
|
||||||
|
|
||||||
if self.dtype == "text" or self.char_size is None:
|
if self.dtype == "text" or self.char_size is None:
|
||||||
# char_size should never be None. Handle it reasonably just in case
|
# char_size should never be None. Handle it reasonably just in case
|
||||||
@@ -123,7 +125,7 @@ class Column:
|
|||||||
def from_description(cls, name: str, raw_data_type: str) -> "Column":
|
def from_description(cls, name: str, raw_data_type: str) -> "Column":
|
||||||
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
|
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
|
||||||
if match is None:
|
if match is None:
|
||||||
raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
|
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
|
||||||
data_type, size_info = match.groups()
|
data_type, size_info = match.groups()
|
||||||
char_size = None
|
char_size = None
|
||||||
numeric_precision = None
|
numeric_precision = None
|
||||||
@@ -136,7 +138,7 @@ class Column:
|
|||||||
try:
|
try:
|
||||||
char_size = int(parts[0])
|
char_size = int(parts[0])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise RuntimeException(
|
raise DbtRuntimeError(
|
||||||
f'Could not interpret data_type "{raw_data_type}": '
|
f'Could not interpret data_type "{raw_data_type}": '
|
||||||
f'could not convert "{parts[0]}" to an integer'
|
f'could not convert "{parts[0]}" to an integer'
|
||||||
)
|
)
|
||||||
@@ -144,14 +146,14 @@ class Column:
|
|||||||
try:
|
try:
|
||||||
numeric_precision = int(parts[0])
|
numeric_precision = int(parts[0])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise RuntimeException(
|
raise DbtRuntimeError(
|
||||||
f'Could not interpret data_type "{raw_data_type}": '
|
f'Could not interpret data_type "{raw_data_type}": '
|
||||||
f'could not convert "{parts[0]}" to an integer'
|
f'could not convert "{parts[0]}" to an integer'
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
numeric_scale = int(parts[1])
|
numeric_scale = int(parts[1])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise RuntimeException(
|
raise DbtRuntimeError(
|
||||||
f'Could not interpret data_type "{raw_data_type}": '
|
f'Could not interpret data_type "{raw_data_type}": '
|
||||||
f'could not convert "{parts[1]}" to an integer'
|
f'could not convert "{parts[1]}" to an integer'
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,10 +1,25 @@
|
|||||||
import abc
|
import abc
|
||||||
import os
|
import os
|
||||||
|
from time import sleep
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
# multiprocessing.RLock is a function returning this type
|
# multiprocessing.RLock is a function returning this type
|
||||||
from multiprocessing.synchronize import RLock
|
from multiprocessing.synchronize import RLock
|
||||||
from threading import get_ident
|
from threading import get_ident
|
||||||
from typing import Dict, Tuple, Hashable, Optional, ContextManager, List, Union
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
Tuple,
|
||||||
|
Hashable,
|
||||||
|
Optional,
|
||||||
|
ContextManager,
|
||||||
|
List,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
Iterable,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
|
||||||
import agate
|
import agate
|
||||||
|
|
||||||
@@ -21,18 +36,24 @@ from dbt.contracts.graph.manifest import Manifest
|
|||||||
from dbt.adapters.base.query_headers import (
|
from dbt.adapters.base.query_headers import (
|
||||||
MacroQueryStringSetter,
|
MacroQueryStringSetter,
|
||||||
)
|
)
|
||||||
|
from dbt.events import AdapterLogger
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
NewConnection,
|
NewConnection,
|
||||||
ConnectionReused,
|
ConnectionReused,
|
||||||
|
ConnectionLeftOpenInCleanup,
|
||||||
ConnectionLeftOpen,
|
ConnectionLeftOpen,
|
||||||
ConnectionLeftOpen2,
|
ConnectionClosedInCleanup,
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
ConnectionClosed2,
|
|
||||||
Rollback,
|
Rollback,
|
||||||
RollbackFailed,
|
RollbackFailed,
|
||||||
)
|
)
|
||||||
|
from dbt.events.contextvars import get_node_info
|
||||||
from dbt import flags
|
from dbt import flags
|
||||||
|
from dbt.utils import cast_to_str
|
||||||
|
|
||||||
|
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||||
|
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||||
|
|
||||||
|
|
||||||
class BaseConnectionManager(metaclass=abc.ABCMeta):
|
class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||||
@@ -70,13 +91,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
key = self.get_thread_identifier()
|
key = self.get_thread_identifier()
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if key not in self.thread_connections:
|
if key not in self.thread_connections:
|
||||||
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
|
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||||
return self.thread_connections[key]
|
return self.thread_connections[key]
|
||||||
|
|
||||||
def set_thread_connection(self, conn: Connection) -> None:
|
def set_thread_connection(self, conn: Connection) -> None:
|
||||||
key = self.get_thread_identifier()
|
key = self.get_thread_identifier()
|
||||||
if key in self.thread_connections:
|
if key in self.thread_connections:
|
||||||
raise dbt.exceptions.InternalException(
|
raise dbt.exceptions.DbtInternalError(
|
||||||
"In set_thread_connection, existing connection exists for {}"
|
"In set_thread_connection, existing connection exists for {}"
|
||||||
)
|
)
|
||||||
self.thread_connections[key] = conn
|
self.thread_connections[key] = conn
|
||||||
@@ -116,57 +137,148 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
:return: A context manager that handles exceptions raised by the
|
:return: A context manager that handles exceptions raised by the
|
||||||
underlying database.
|
underlying database.
|
||||||
"""
|
"""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError(
|
||||||
"`exception_handler` is not implemented for this adapter!"
|
"`exception_handler` is not implemented for this adapter!"
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||||
conn_name: str
|
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||||
if name is None:
|
'connection_named', called by 'connection_for(node)'.
|
||||||
# if a name isn't specified, we'll re-use a single handle
|
Creates a connection for this thread if one doesn't already
|
||||||
# named 'master'
|
exist, and will rename an existing connection."""
|
||||||
conn_name = "master"
|
|
||||||
else:
|
|
||||||
if not isinstance(name, str):
|
|
||||||
raise dbt.exceptions.CompilerException(
|
|
||||||
f"For connection name, got {name} - not a string!"
|
|
||||||
)
|
|
||||||
assert isinstance(name, str)
|
|
||||||
conn_name = name
|
|
||||||
|
|
||||||
|
conn_name: str = "master" if name is None else name
|
||||||
|
|
||||||
|
# Get a connection for this thread
|
||||||
conn = self.get_if_exists()
|
conn = self.get_if_exists()
|
||||||
|
|
||||||
|
if conn and conn.name == conn_name and conn.state == "open":
|
||||||
|
# Found a connection and nothing to do, so just return it
|
||||||
|
return conn
|
||||||
|
|
||||||
if conn is None:
|
if conn is None:
|
||||||
|
# Create a new connection
|
||||||
conn = Connection(
|
conn = Connection(
|
||||||
type=Identifier(self.TYPE),
|
type=Identifier(self.TYPE),
|
||||||
name=None,
|
name=conn_name,
|
||||||
state=ConnectionState.INIT,
|
state=ConnectionState.INIT,
|
||||||
transaction_open=False,
|
transaction_open=False,
|
||||||
handle=None,
|
handle=None,
|
||||||
credentials=self.profile.credentials,
|
credentials=self.profile.credentials,
|
||||||
)
|
)
|
||||||
self.set_thread_connection(conn)
|
|
||||||
|
|
||||||
if conn.name == conn_name and conn.state == "open":
|
|
||||||
return conn
|
|
||||||
|
|
||||||
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
|
|
||||||
|
|
||||||
if conn.state == "open":
|
|
||||||
fire_event(ConnectionReused(conn_name=conn_name))
|
|
||||||
else:
|
|
||||||
conn.handle = LazyHandle(self.open)
|
conn.handle = LazyHandle(self.open)
|
||||||
|
# Add the connection to thread_connections for this thread
|
||||||
|
self.set_thread_connection(conn)
|
||||||
|
fire_event(
|
||||||
|
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||||
|
)
|
||||||
|
else: # existing connection either wasn't open or didn't have the right name
|
||||||
|
if conn.state != "open":
|
||||||
|
conn.handle = LazyHandle(self.open)
|
||||||
|
if conn.name != conn_name:
|
||||||
|
orig_conn_name: str = conn.name or ""
|
||||||
conn.name = conn_name
|
conn.name = conn_name
|
||||||
|
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def retry_connection(
|
||||||
|
cls,
|
||||||
|
connection: Connection,
|
||||||
|
connect: Callable[[], AdapterHandle],
|
||||||
|
logger: AdapterLogger,
|
||||||
|
retryable_exceptions: Iterable[Type[Exception]],
|
||||||
|
retry_limit: int = 1,
|
||||||
|
retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1,
|
||||||
|
_attempts: int = 0,
|
||||||
|
) -> Connection:
|
||||||
|
"""Given a Connection, set its handle by calling connect.
|
||||||
|
|
||||||
|
The calls to connect will be retried up to retry_limit times to deal with transient
|
||||||
|
connection errors. By default, one retry will be attempted if retryable_exceptions is set.
|
||||||
|
|
||||||
|
:param Connection connection: An instance of a Connection that needs a handle to be set,
|
||||||
|
usually when attempting to open it.
|
||||||
|
:param connect: A callable that returns the appropiate connection handle for a
|
||||||
|
given adapter. This callable will be retried retry_limit times if a subclass of any
|
||||||
|
Exception in retryable_exceptions is raised by connect.
|
||||||
|
:type connect: Callable[[], AdapterHandle]
|
||||||
|
:param AdapterLogger logger: A logger to emit messages on retry attempts or errors. When
|
||||||
|
handling expected errors, we call debug, and call warning on unexpected errors or when
|
||||||
|
all retry attempts have been exhausted.
|
||||||
|
:param retryable_exceptions: An iterable of exception classes that if raised by
|
||||||
|
connect should trigger a retry.
|
||||||
|
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||||
|
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||||
|
is exceeded before a successful call, a FailedToConnectError will be raised.
|
||||||
|
Must be non-negative.
|
||||||
|
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||||
|
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||||
|
or float to be passed to time.sleep.
|
||||||
|
:type retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1
|
||||||
|
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||||
|
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||||
|
is a Callable. This parameter should not be set by the initial caller.
|
||||||
|
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||||
|
successfully acquiring a handle.
|
||||||
|
:return: The given connection with its appropriate state and handle attributes set
|
||||||
|
depending on whether we successfully acquired a handle or not.
|
||||||
|
"""
|
||||||
|
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||||
|
if timeout < 0:
|
||||||
|
raise dbt.exceptions.FailedToConnectError(
|
||||||
|
"retry_timeout cannot be negative or return a negative time."
|
||||||
|
)
|
||||||
|
|
||||||
|
if retry_limit < 0 or retry_limit > sys.getrecursionlimit():
|
||||||
|
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||||
|
connection.handle = None
|
||||||
|
connection.state = ConnectionState.FAIL
|
||||||
|
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||||
|
|
||||||
|
try:
|
||||||
|
connection.handle = connect()
|
||||||
|
connection.state = ConnectionState.OPEN
|
||||||
|
return connection
|
||||||
|
|
||||||
|
except tuple(retryable_exceptions) as e:
|
||||||
|
if retry_limit <= 0:
|
||||||
|
connection.handle = None
|
||||||
|
connection.state = ConnectionState.FAIL
|
||||||
|
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||||
|
f"{retry_limit} attempts remaining. Retrying in {timeout} seconds.\n"
|
||||||
|
f"Error:\n{e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
sleep(timeout)
|
||||||
|
return cls.retry_connection(
|
||||||
|
connection=connection,
|
||||||
|
connect=connect,
|
||||||
|
logger=logger,
|
||||||
|
retry_limit=retry_limit - 1,
|
||||||
|
retry_timeout=retry_timeout,
|
||||||
|
retryable_exceptions=retryable_exceptions,
|
||||||
|
_attempts=_attempts + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
connection.handle = None
|
||||||
|
connection.state = ConnectionState.FAIL
|
||||||
|
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def cancel_open(self) -> Optional[List[str]]:
|
def cancel_open(self) -> Optional[List[str]]:
|
||||||
"""Cancel all open connections on the adapter. (passable)"""
|
"""Cancel all open connections on the adapter. (passable)"""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError(
|
||||||
"`cancel_open` is not implemented for this adapter!"
|
"`cancel_open` is not implemented for this adapter!"
|
||||||
)
|
)
|
||||||
|
|
||||||
@abc.abstractclassmethod
|
@classmethod
|
||||||
|
@abc.abstractmethod
|
||||||
def open(cls, connection: Connection) -> Connection:
|
def open(cls, connection: Connection) -> Connection:
|
||||||
"""Open the given connection on the adapter and return it.
|
"""Open the given connection on the adapter and return it.
|
||||||
|
|
||||||
@@ -176,7 +288,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
This should be thread-safe, or hold the lock if necessary. The given
|
This should be thread-safe, or hold the lock if necessary. The given
|
||||||
connection should not be in either in_use or available.
|
connection should not be in either in_use or available.
|
||||||
"""
|
"""
|
||||||
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
|
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||||
|
|
||||||
def release(self) -> None:
|
def release(self) -> None:
|
||||||
with self.lock:
|
with self.lock:
|
||||||
@@ -197,9 +309,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
with self.lock:
|
with self.lock:
|
||||||
for connection in self.thread_connections.values():
|
for connection in self.thread_connections.values():
|
||||||
if connection.state not in {"closed", "init"}:
|
if connection.state not in {"closed", "init"}:
|
||||||
fire_event(ConnectionLeftOpen(conn_name=connection.name))
|
fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name)))
|
||||||
else:
|
else:
|
||||||
fire_event(ConnectionClosed(conn_name=connection.name))
|
fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name)))
|
||||||
self.close(connection)
|
self.close(connection)
|
||||||
|
|
||||||
# garbage collect these connections
|
# garbage collect these connections
|
||||||
@@ -208,16 +320,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def begin(self) -> None:
|
def begin(self) -> None:
|
||||||
"""Begin a transaction. (passable)"""
|
"""Begin a transaction. (passable)"""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||||
"`begin` is not implemented for this adapter!"
|
|
||||||
)
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def commit(self) -> None:
|
def commit(self) -> None:
|
||||||
"""Commit a transaction. (passable)"""
|
"""Commit a transaction. (passable)"""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||||
"`commit` is not implemented for this adapter!"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _rollback_handle(cls, connection: Connection) -> None:
|
def _rollback_handle(cls, connection: Connection) -> None:
|
||||||
@@ -225,28 +333,40 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
try:
|
try:
|
||||||
connection.handle.rollback()
|
connection.handle.rollback()
|
||||||
except Exception:
|
except Exception:
|
||||||
fire_event(RollbackFailed(conn_name=connection.name))
|
fire_event(
|
||||||
|
RollbackFailed(
|
||||||
|
conn_name=cast_to_str(connection.name),
|
||||||
|
exc_info=traceback.format_exc(),
|
||||||
|
node_info=get_node_info(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _close_handle(cls, connection: Connection) -> None:
|
def _close_handle(cls, connection: Connection) -> None:
|
||||||
"""Perform the actual close operation."""
|
"""Perform the actual close operation."""
|
||||||
# On windows, sometimes connection handles don't have a close() attr.
|
# On windows, sometimes connection handles don't have a close() attr.
|
||||||
if hasattr(connection.handle, "close"):
|
if hasattr(connection.handle, "close"):
|
||||||
fire_event(ConnectionClosed2(conn_name=connection.name))
|
fire_event(
|
||||||
|
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
|
||||||
|
)
|
||||||
connection.handle.close()
|
connection.handle.close()
|
||||||
else:
|
else:
|
||||||
fire_event(ConnectionLeftOpen2(conn_name=connection.name))
|
fire_event(
|
||||||
|
ConnectionLeftOpen(
|
||||||
|
conn_name=cast_to_str(connection.name), node_info=get_node_info()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _rollback(cls, connection: Connection) -> None:
|
def _rollback(cls, connection: Connection) -> None:
|
||||||
"""Roll back the given connection."""
|
"""Roll back the given connection."""
|
||||||
if connection.transaction_open is False:
|
if connection.transaction_open is False:
|
||||||
raise dbt.exceptions.InternalException(
|
raise dbt.exceptions.DbtInternalError(
|
||||||
f"Tried to rollback transaction on connection "
|
f"Tried to rollback transaction on connection "
|
||||||
f'"{connection.name}", but it does not have one open!'
|
f'"{connection.name}", but it does not have one open!'
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(Rollback(conn_name=connection.name))
|
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||||
cls._rollback_handle(connection)
|
cls._rollback_handle(connection)
|
||||||
|
|
||||||
connection.transaction_open = False
|
connection.transaction_open = False
|
||||||
@@ -258,7 +378,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
return connection
|
return connection
|
||||||
|
|
||||||
if connection.transaction_open and connection.handle:
|
if connection.transaction_open and connection.handle:
|
||||||
fire_event(Rollback(conn_name=connection.name))
|
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||||
cls._rollback_handle(connection)
|
cls._rollback_handle(connection)
|
||||||
connection.transaction_open = False
|
connection.transaction_open = False
|
||||||
|
|
||||||
@@ -281,16 +401,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def execute(
|
def execute(
|
||||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
) -> Tuple[AdapterResponse, agate.Table]:
|
||||||
"""Execute the given SQL.
|
"""Execute the given SQL.
|
||||||
|
|
||||||
:param str sql: The sql to execute.
|
:param str sql: The sql to execute.
|
||||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||||
transaction, automatically begin one.
|
transaction, automatically begin one.
|
||||||
:param bool fetch: If set, fetch results.
|
:param bool fetch: If set, fetch results.
|
||||||
:return: A tuple of the status and the results (empty if fetch=False).
|
:return: A tuple of the query status and results (empty if fetch=False).
|
||||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||||
"""
|
"""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||||
"`execute` is not implemented for this adapter!"
|
|
||||||
)
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +1,17 @@
|
|||||||
from typing import List, Optional, Type
|
from typing import List, Optional, Type
|
||||||
|
|
||||||
from dbt.adapters.base import Credentials
|
from dbt.adapters.base import Credentials
|
||||||
from dbt.exceptions import CompilationException
|
from dbt.exceptions import CompilationError
|
||||||
from dbt.adapters.protocol import AdapterProtocol
|
from dbt.adapters.protocol import AdapterProtocol
|
||||||
|
|
||||||
|
|
||||||
def project_name_from_path(include_path: str) -> str:
|
def project_name_from_path(include_path: str) -> str:
|
||||||
# avoid an import cycle
|
# avoid an import cycle
|
||||||
from dbt.config.project import Project
|
from dbt.config.project import PartialProject
|
||||||
|
|
||||||
partial = Project.partial_load(include_path)
|
partial = PartialProject.from_project_root(include_path)
|
||||||
if partial.project_name is None:
|
if partial.project_name is None:
|
||||||
raise CompilationException(f"Invalid project at {include_path}: name not set!")
|
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||||
return partial.project_name
|
return partial.project_name
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@ from dbt.clients.jinja import QueryStringGenerator
|
|||||||
|
|
||||||
from dbt.context.manifest import generate_query_header_context
|
from dbt.context.manifest import generate_query_header_context
|
||||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||||
from dbt.contracts.graph.compiled import CompileResultNode
|
from dbt.contracts.graph.nodes import ResultNode
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
from dbt.exceptions import RuntimeException
|
from dbt.exceptions import DbtRuntimeError
|
||||||
|
|
||||||
|
|
||||||
class NodeWrapper:
|
class NodeWrapper:
|
||||||
@@ -48,7 +48,7 @@ class _QueryComment(local):
|
|||||||
if isinstance(comment, str) and "*/" in comment:
|
if isinstance(comment, str) and "*/" in comment:
|
||||||
# tell the user "no" so they don't hurt themselves by writing
|
# tell the user "no" so they don't hurt themselves by writing
|
||||||
# garbage
|
# garbage
|
||||||
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
|
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
|
||||||
self.query_comment = comment
|
self.query_comment = comment
|
||||||
self.append = append
|
self.append = append
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ class MacroQueryStringSetter:
|
|||||||
def reset(self):
|
def reset(self):
|
||||||
self.set("master", None)
|
self.set("master", None)
|
||||||
|
|
||||||
def set(self, name: str, node: Optional[CompileResultNode]):
|
def set(self, name: str, node: Optional[ResultNode]):
|
||||||
wrapped: Optional[NodeWrapper] = None
|
wrapped: Optional[NodeWrapper] = None
|
||||||
if node is not None:
|
if node is not None:
|
||||||
wrapped = NodeWrapper(node)
|
wrapped = NodeWrapper(node)
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
from collections.abc import Hashable
|
from collections.abc import Hashable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass, field
|
||||||
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
|
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||||
|
|
||||||
from dbt.contracts.graph.compiled import CompiledNode
|
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
|
|
||||||
from dbt.contracts.relation import (
|
from dbt.contracts.relation import (
|
||||||
RelationType,
|
RelationType,
|
||||||
ComponentName,
|
ComponentName,
|
||||||
@@ -12,7 +11,11 @@ from dbt.contracts.relation import (
|
|||||||
Policy,
|
Policy,
|
||||||
Path,
|
Path,
|
||||||
)
|
)
|
||||||
from dbt.exceptions import InternalException
|
from dbt.exceptions import (
|
||||||
|
ApproximateMatchError,
|
||||||
|
DbtInternalError,
|
||||||
|
MultipleDatabasesNotAllowedError,
|
||||||
|
)
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||||
|
|
||||||
@@ -27,8 +30,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
path: Path
|
path: Path
|
||||||
type: Optional[RelationType] = None
|
type: Optional[RelationType] = None
|
||||||
quote_character: str = '"'
|
quote_character: str = '"'
|
||||||
include_policy: Policy = Policy()
|
# Python 3.11 requires that these use default_factory instead of simple default
|
||||||
quote_policy: Policy = Policy()
|
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
|
||||||
|
include_policy: Policy = field(default_factory=lambda: Policy())
|
||||||
|
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||||
dbt_created: bool = False
|
dbt_created: bool = False
|
||||||
|
|
||||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||||
@@ -39,9 +44,9 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_field_named(cls, field_name):
|
def _get_field_named(cls, field_name):
|
||||||
for field, _ in cls._get_fields():
|
for f, _ in cls._get_fields():
|
||||||
if field.name == field_name:
|
if f.name == field_name:
|
||||||
return field
|
return f
|
||||||
# this should be unreachable
|
# this should be unreachable
|
||||||
raise ValueError(f"BaseRelation has no {field_name} field!")
|
raise ValueError(f"BaseRelation has no {field_name} field!")
|
||||||
|
|
||||||
@@ -52,11 +57,11 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_quote_policy(cls) -> Policy:
|
def get_default_quote_policy(cls) -> Policy:
|
||||||
return cls._get_field_named("quote_policy").default
|
return cls._get_field_named("quote_policy").default_factory()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_include_policy(cls) -> Policy:
|
def get_default_include_policy(cls) -> Policy:
|
||||||
return cls._get_field_named("include_policy").default
|
return cls._get_field_named("include_policy").default_factory()
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
"""Override `.get` to return a metadata object so we don't break
|
"""Override `.get` to return a metadata object so we don't break
|
||||||
@@ -82,7 +87,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
|
|
||||||
if not search:
|
if not search:
|
||||||
# nothing was passed in
|
# nothing was passed in
|
||||||
raise dbt.exceptions.RuntimeException(
|
raise dbt.exceptions.DbtRuntimeError(
|
||||||
"Tried to match relation, but no search path was passed!"
|
"Tried to match relation, but no search path was passed!"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -99,7 +104,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
|
|
||||||
if approximate_match and not exact_match:
|
if approximate_match and not exact_match:
|
||||||
target = self.create(database=database, schema=schema, identifier=identifier)
|
target = self.create(database=database, schema=schema, identifier=identifier)
|
||||||
dbt.exceptions.approximate_relation_match(target, self)
|
raise ApproximateMatchError(target, self)
|
||||||
|
|
||||||
return exact_match
|
return exact_match
|
||||||
|
|
||||||
@@ -184,7 +189,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
|
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||||
source_quoting.pop("column", None)
|
source_quoting.pop("column", None)
|
||||||
quote_policy = deep_merge(
|
quote_policy = deep_merge(
|
||||||
@@ -209,7 +214,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def create_ephemeral_from_node(
|
def create_ephemeral_from_node(
|
||||||
cls: Type[Self],
|
cls: Type[Self],
|
||||||
config: HasQuoting,
|
config: HasQuoting,
|
||||||
node: Union[ParsedNode, CompiledNode],
|
node: ManifestNode,
|
||||||
) -> Self:
|
) -> Self:
|
||||||
# Note that ephemeral models are based on the name.
|
# Note that ephemeral models are based on the name.
|
||||||
identifier = cls.add_ephemeral_prefix(node.name)
|
identifier = cls.add_ephemeral_prefix(node.name)
|
||||||
@@ -222,7 +227,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def create_from_node(
|
def create_from_node(
|
||||||
cls: Type[Self],
|
cls: Type[Self],
|
||||||
config: HasQuoting,
|
config: HasQuoting,
|
||||||
node: Union[ParsedNode, CompiledNode],
|
node,
|
||||||
quote_policy: Optional[Dict[str, bool]] = None,
|
quote_policy: Optional[Dict[str, bool]] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> Self:
|
) -> Self:
|
||||||
@@ -243,20 +248,20 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def create_from(
|
def create_from(
|
||||||
cls: Type[Self],
|
cls: Type[Self],
|
||||||
config: HasQuoting,
|
config: HasQuoting,
|
||||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
node: ResultNode,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> Self:
|
) -> Self:
|
||||||
if node.resource_type == NodeType.Source:
|
if node.resource_type == NodeType.Source:
|
||||||
if not isinstance(node, ParsedSourceDefinition):
|
if not isinstance(node, SourceDefinition):
|
||||||
raise InternalException(
|
raise DbtInternalError(
|
||||||
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
|
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||||
)
|
)
|
||||||
return cls.create_from_source(node, **kwargs)
|
return cls.create_from_source(node, **kwargs)
|
||||||
else:
|
else:
|
||||||
if not isinstance(node, (ParsedNode, CompiledNode)):
|
# Can't use ManifestNode here because of parameterized generics
|
||||||
raise InternalException(
|
if not isinstance(node, (ParsedNode)):
|
||||||
"type mismatch, expected ParsedNode or CompiledNode but "
|
raise DbtInternalError(
|
||||||
"got {}".format(type(node))
|
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||||
)
|
)
|
||||||
return cls.create_from_node(config, node, **kwargs)
|
return cls.create_from_node(config, node, **kwargs)
|
||||||
|
|
||||||
@@ -323,6 +328,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def is_view(self) -> bool:
|
def is_view(self) -> bool:
|
||||||
return self.type == RelationType.View
|
return self.type == RelationType.View
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_materialized_view(self) -> bool:
|
||||||
|
return self.type == RelationType.MaterializedView
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def Table(cls) -> str:
|
def Table(cls) -> str:
|
||||||
return str(RelationType.Table)
|
return str(RelationType.Table)
|
||||||
@@ -339,6 +348,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
def External(cls) -> str:
|
def External(cls) -> str:
|
||||||
return str(RelationType.External)
|
return str(RelationType.External)
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def MaterializedView(cls) -> str:
|
||||||
|
return str(RelationType.MaterializedView)
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def get_relation_type(cls) -> Type[RelationType]:
|
def get_relation_type(cls) -> Type[RelationType]:
|
||||||
return RelationType
|
return RelationType
|
||||||
@@ -353,7 +366,7 @@ class InformationSchema(BaseRelation):
|
|||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||||
raise dbt.exceptions.CompilationException(
|
raise dbt.exceptions.CompilationError(
|
||||||
"Got an invalid name: {}".format(self.information_schema_view)
|
"Got an invalid name: {}".format(self.information_schema_view)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -437,7 +450,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
|||||||
if not allow_multiple_databases:
|
if not allow_multiple_databases:
|
||||||
seen = {r.database.lower() for r in self if r.database}
|
seen = {r.database.lower() for r in self if r.database}
|
||||||
if len(seen) > 1:
|
if len(seen) > 1:
|
||||||
dbt.exceptions.raise_compiler_error(str(seen))
|
raise MultipleDatabasesNotAllowedError(seen)
|
||||||
|
|
||||||
for information_schema_name, schema in self.search():
|
for information_schema_name, schema in self.search():
|
||||||
path = {"database": information_schema_name.database, "schema": schema}
|
path = {"database": information_schema_name.database, "schema": schema}
|
||||||
|
|||||||
@@ -2,26 +2,22 @@ import threading
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||||
|
|
||||||
from dbt.adapters.reference_keys import _make_key, _ReferenceKey
|
from dbt.adapters.reference_keys import (
|
||||||
import dbt.exceptions
|
_make_ref_key,
|
||||||
from dbt.events.functions import fire_event
|
_make_ref_key_dict,
|
||||||
from dbt.events.types import (
|
_ReferenceKey,
|
||||||
AddLink,
|
|
||||||
AddRelation,
|
|
||||||
DropCascade,
|
|
||||||
DropMissingRelation,
|
|
||||||
DropRelation,
|
|
||||||
DumpAfterAddGraph,
|
|
||||||
DumpAfterRenameSchema,
|
|
||||||
DumpBeforeAddGraph,
|
|
||||||
DumpBeforeRenameSchema,
|
|
||||||
RenameSchema,
|
|
||||||
TemporaryRelation,
|
|
||||||
UncachedRelation,
|
|
||||||
UpdateReference,
|
|
||||||
)
|
)
|
||||||
|
from dbt.exceptions import (
|
||||||
|
DependentLinkNotCachedError,
|
||||||
|
NewNameAlreadyInCacheError,
|
||||||
|
NoneRelationFoundError,
|
||||||
|
ReferencedLinkNotCachedError,
|
||||||
|
TruncatedModelNameCausedCollisionError,
|
||||||
|
)
|
||||||
|
from dbt.events.functions import fire_event, fire_event_if
|
||||||
|
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||||
|
from dbt.flags import get_flags
|
||||||
from dbt.utils import lowercase
|
from dbt.utils import lowercase
|
||||||
from dbt.helper_types import Lazy
|
|
||||||
|
|
||||||
|
|
||||||
def dot_separated(key: _ReferenceKey) -> str:
|
def dot_separated(key: _ReferenceKey) -> str:
|
||||||
@@ -81,7 +77,7 @@ class _CachedRelation:
|
|||||||
|
|
||||||
:return _ReferenceKey: A key for this relation.
|
:return _ReferenceKey: A key for this relation.
|
||||||
"""
|
"""
|
||||||
return _make_key(self)
|
return _make_ref_key(self)
|
||||||
|
|
||||||
def add_reference(self, referrer: "_CachedRelation"):
|
def add_reference(self, referrer: "_CachedRelation"):
|
||||||
"""Add a reference from referrer to self, indicating that if this node
|
"""Add a reference from referrer to self, indicating that if this node
|
||||||
@@ -144,11 +140,7 @@ class _CachedRelation:
|
|||||||
:raises InternalError: If the new key already exists.
|
:raises InternalError: If the new key already exists.
|
||||||
"""
|
"""
|
||||||
if new_key in self.referenced_by:
|
if new_key in self.referenced_by:
|
||||||
dbt.exceptions.raise_cache_inconsistent(
|
raise NewNameAlreadyInCacheError(old_key, new_key)
|
||||||
'in rename of "{}" -> "{}", new name is in the cache already'.format(
|
|
||||||
old_key, new_key
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if old_key not in self.referenced_by:
|
if old_key not in self.referenced_by:
|
||||||
return
|
return
|
||||||
@@ -237,7 +229,7 @@ class RelationsCache:
|
|||||||
# self.relations or any cache entry's referenced_by during iteration
|
# self.relations or any cache entry's referenced_by during iteration
|
||||||
# it's a runtime error!
|
# it's a runtime error!
|
||||||
with self.lock:
|
with self.lock:
|
||||||
return {dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items()}
|
return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()}
|
||||||
|
|
||||||
def _setdefault(self, relation: _CachedRelation):
|
def _setdefault(self, relation: _CachedRelation):
|
||||||
"""Add a relation to the cache, or return it if it already exists.
|
"""Add a relation to the cache, or return it if it already exists.
|
||||||
@@ -264,21 +256,17 @@ class RelationsCache:
|
|||||||
if referenced is None:
|
if referenced is None:
|
||||||
return
|
return
|
||||||
if referenced is None:
|
if referenced is None:
|
||||||
dbt.exceptions.raise_cache_inconsistent(
|
raise ReferencedLinkNotCachedError(referenced_key)
|
||||||
"in add_link, referenced link key {} not in cache!".format(referenced_key)
|
|
||||||
)
|
|
||||||
|
|
||||||
dependent = self.relations.get(dependent_key)
|
dependent = self.relations.get(dependent_key)
|
||||||
if dependent is None:
|
if dependent is None:
|
||||||
dbt.exceptions.raise_cache_inconsistent(
|
raise DependentLinkNotCachedError(dependent_key)
|
||||||
"in add_link, dependent link key {} not in cache!".format(dependent_key)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert dependent is not None # we just raised!
|
assert dependent is not None # we just raised!
|
||||||
|
|
||||||
referenced.add_reference(dependent)
|
referenced.add_reference(dependent)
|
||||||
|
|
||||||
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
|
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
|
||||||
def add_link(self, referenced, dependent):
|
def add_link(self, referenced, dependent):
|
||||||
"""Add a link between two relations to the database. If either relation
|
"""Add a link between two relations to the database. If either relation
|
||||||
does not exist, it will be added as an "external" relation.
|
does not exist, it will be added as an "external" relation.
|
||||||
@@ -293,13 +281,18 @@ class RelationsCache:
|
|||||||
:param BaseRelation dependent: The dependent model.
|
:param BaseRelation dependent: The dependent model.
|
||||||
:raises InternalError: If either entry does not exist.
|
:raises InternalError: If either entry does not exist.
|
||||||
"""
|
"""
|
||||||
ref_key = _make_key(referenced)
|
ref_key = _make_ref_key(referenced)
|
||||||
dep_key = _make_key(dependent)
|
dep_key = _make_ref_key(dependent)
|
||||||
if (ref_key.database, ref_key.schema) not in self:
|
if (ref_key.database, ref_key.schema) not in self:
|
||||||
# if we have not cached the referenced schema at all, we must be
|
# if we have not cached the referenced schema at all, we must be
|
||||||
# referring to a table outside our control. There's no need to make
|
# referring to a table outside our control. There's no need to make
|
||||||
# a link - we will never drop the referenced relation during a run.
|
# a link - we will never drop the referenced relation during a run.
|
||||||
fire_event(UncachedRelation(dep_key=dep_key, ref_key=ref_key))
|
fire_event(
|
||||||
|
CacheAction(
|
||||||
|
ref_key=ref_key._asdict(),
|
||||||
|
ref_key_2=dep_key._asdict(),
|
||||||
|
)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
if ref_key not in self.relations:
|
if ref_key not in self.relations:
|
||||||
# Insert a dummy "external" relation.
|
# Insert a dummy "external" relation.
|
||||||
@@ -309,7 +302,13 @@ class RelationsCache:
|
|||||||
# Insert a dummy "external" relation.
|
# Insert a dummy "external" relation.
|
||||||
dependent = dependent.replace(type=referenced.External)
|
dependent = dependent.replace(type=referenced.External)
|
||||||
self.add(dependent)
|
self.add(dependent)
|
||||||
fire_event(AddLink(dep_key=dep_key, ref_key=ref_key))
|
fire_event(
|
||||||
|
CacheAction(
|
||||||
|
action="add_link",
|
||||||
|
ref_key=dep_key._asdict(),
|
||||||
|
ref_key_2=ref_key._asdict(),
|
||||||
|
)
|
||||||
|
)
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self._add_link(ref_key, dep_key)
|
self._add_link(ref_key, dep_key)
|
||||||
|
|
||||||
@@ -319,13 +318,20 @@ class RelationsCache:
|
|||||||
|
|
||||||
:param BaseRelation relation: The underlying relation.
|
:param BaseRelation relation: The underlying relation.
|
||||||
"""
|
"""
|
||||||
|
flags = get_flags()
|
||||||
cached = _CachedRelation(relation)
|
cached = _CachedRelation(relation)
|
||||||
fire_event(AddRelation(relation=_make_key(cached)))
|
fire_event_if(
|
||||||
fire_event(DumpBeforeAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
flags.LOG_CACHE_EVENTS,
|
||||||
|
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||||
|
)
|
||||||
|
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self._setdefault(cached)
|
self._setdefault(cached)
|
||||||
fire_event(DumpAfterAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
fire_event_if(
|
||||||
|
flags.LOG_CACHE_EVENTS,
|
||||||
|
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||||
|
)
|
||||||
|
|
||||||
def _remove_refs(self, keys):
|
def _remove_refs(self, keys):
|
||||||
"""Removes all references to all entries in keys. This does not
|
"""Removes all references to all entries in keys. This does not
|
||||||
@@ -340,19 +346,6 @@ class RelationsCache:
|
|||||||
for cached in self.relations.values():
|
for cached in self.relations.values():
|
||||||
cached.release_references(keys)
|
cached.release_references(keys)
|
||||||
|
|
||||||
def _drop_cascade_relation(self, dropped_key):
|
|
||||||
"""Drop the given relation and cascade it appropriately to all
|
|
||||||
dependent relations.
|
|
||||||
|
|
||||||
:param _CachedRelation dropped: An existing _CachedRelation to drop.
|
|
||||||
"""
|
|
||||||
if dropped_key not in self.relations:
|
|
||||||
fire_event(DropMissingRelation(relation=dropped_key))
|
|
||||||
return
|
|
||||||
consequences = self.relations[dropped_key].collect_consequences()
|
|
||||||
fire_event(DropCascade(dropped=dropped_key, consequences=consequences))
|
|
||||||
self._remove_refs(consequences)
|
|
||||||
|
|
||||||
def drop(self, relation):
|
def drop(self, relation):
|
||||||
"""Drop the named relation and cascade it appropriately to all
|
"""Drop the named relation and cascade it appropriately to all
|
||||||
dependent relations.
|
dependent relations.
|
||||||
@@ -364,10 +357,22 @@ class RelationsCache:
|
|||||||
:param str schema: The schema of the relation to drop.
|
:param str schema: The schema of the relation to drop.
|
||||||
:param str identifier: The identifier of the relation to drop.
|
:param str identifier: The identifier of the relation to drop.
|
||||||
"""
|
"""
|
||||||
dropped_key = _make_key(relation)
|
dropped_key = _make_ref_key(relation)
|
||||||
fire_event(DropRelation(dropped=dropped_key))
|
dropped_key_msg = _make_ref_key_dict(relation)
|
||||||
|
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self._drop_cascade_relation(dropped_key)
|
if dropped_key not in self.relations:
|
||||||
|
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
|
||||||
|
return
|
||||||
|
consequences = self.relations[dropped_key].collect_consequences()
|
||||||
|
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||||
|
consequence_msgs = [key._asdict() for key in consequences]
|
||||||
|
fire_event(
|
||||||
|
CacheAction(
|
||||||
|
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._remove_refs(consequences)
|
||||||
|
|
||||||
def _rename_relation(self, old_key, new_relation):
|
def _rename_relation(self, old_key, new_relation):
|
||||||
"""Rename a relation named old_key to new_key, updating references.
|
"""Rename a relation named old_key to new_key, updating references.
|
||||||
@@ -383,14 +388,20 @@ class RelationsCache:
|
|||||||
relation = self.relations.pop(old_key)
|
relation = self.relations.pop(old_key)
|
||||||
new_key = new_relation.key()
|
new_key = new_relation.key()
|
||||||
|
|
||||||
# relaton has to rename its innards, so it needs the _CachedRelation.
|
# relation has to rename its innards, so it needs the _CachedRelation.
|
||||||
relation.rename(new_relation)
|
relation.rename(new_relation)
|
||||||
# update all the relations that refer to it
|
# update all the relations that refer to it
|
||||||
for cached in self.relations.values():
|
for cached in self.relations.values():
|
||||||
if cached.is_referenced_by(old_key):
|
if cached.is_referenced_by(old_key):
|
||||||
fire_event(
|
fire_event(
|
||||||
UpdateReference(old_key=old_key, new_key=new_key, cached_key=cached.key())
|
CacheAction(
|
||||||
|
action="update_reference",
|
||||||
|
ref_key=_make_ref_key_dict(old_key),
|
||||||
|
ref_key_2=_make_ref_key_dict(new_key),
|
||||||
|
ref_key_3=_make_ref_key_dict(cached.key()),
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
cached.rename_key(old_key, new_key)
|
cached.rename_key(old_key, new_key)
|
||||||
|
|
||||||
self.relations[new_key] = relation
|
self.relations[new_key] = relation
|
||||||
@@ -413,14 +424,12 @@ class RelationsCache:
|
|||||||
:raises InternalError: If the new key is already present.
|
:raises InternalError: If the new key is already present.
|
||||||
"""
|
"""
|
||||||
if new_key in self.relations:
|
if new_key in self.relations:
|
||||||
dbt.exceptions.raise_cache_inconsistent(
|
# Tell user when collision caused by model names truncated during
|
||||||
"in rename, new key {} already in cache: {}".format(
|
# materialization.
|
||||||
new_key, list(self.relations.keys())
|
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if old_key not in self.relations:
|
if old_key not in self.relations:
|
||||||
fire_event(TemporaryRelation(key=old_key))
|
fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict()))
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -436,11 +445,20 @@ class RelationsCache:
|
|||||||
:param BaseRelation new: The new relation name information.
|
:param BaseRelation new: The new relation name information.
|
||||||
:raises InternalError: If the new key is already present.
|
:raises InternalError: If the new key is already present.
|
||||||
"""
|
"""
|
||||||
old_key = _make_key(old)
|
old_key = _make_ref_key(old)
|
||||||
new_key = _make_key(new)
|
new_key = _make_ref_key(new)
|
||||||
fire_event(RenameSchema(old_key=old_key, new_key=new_key))
|
fire_event(
|
||||||
|
CacheAction(
|
||||||
fire_event(DumpBeforeRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
action="rename_relation",
|
||||||
|
ref_key=old_key._asdict(),
|
||||||
|
ref_key_2=new_key._asdict(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
flags = get_flags()
|
||||||
|
fire_event_if(
|
||||||
|
flags.LOG_CACHE_EVENTS,
|
||||||
|
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||||
|
)
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if self._check_rename_constraints(old_key, new_key):
|
if self._check_rename_constraints(old_key, new_key):
|
||||||
@@ -448,7 +466,10 @@ class RelationsCache:
|
|||||||
else:
|
else:
|
||||||
self._setdefault(_CachedRelation(new))
|
self._setdefault(_CachedRelation(new))
|
||||||
|
|
||||||
fire_event(DumpAfterRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
fire_event_if(
|
||||||
|
flags.LOG_CACHE_EVENTS,
|
||||||
|
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||||
|
)
|
||||||
|
|
||||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||||
"""Case-insensitively yield all relations matching the given schema.
|
"""Case-insensitively yield all relations matching the given schema.
|
||||||
@@ -467,9 +488,7 @@ class RelationsCache:
|
|||||||
]
|
]
|
||||||
|
|
||||||
if None in results:
|
if None in results:
|
||||||
dbt.exceptions.raise_cache_inconsistent(
|
raise NoneRelationFoundError()
|
||||||
"in get_relations, a None relation was found in the cache!"
|
|
||||||
)
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
@@ -496,6 +515,6 @@ class RelationsCache:
|
|||||||
"""
|
"""
|
||||||
for relation in to_remove:
|
for relation in to_remove:
|
||||||
# it may have been cascaded out already
|
# it may have been cascaded out already
|
||||||
drop_key = _make_key(relation)
|
drop_key = _make_ref_key(relation)
|
||||||
if drop_key in self.relations:
|
if drop_key in self.relations:
|
||||||
self.drop(drop_key)
|
self.drop(drop_key)
|
||||||
|
|||||||
@@ -1,23 +1,19 @@
|
|||||||
import threading
|
import threading
|
||||||
from pathlib import Path
|
import traceback
|
||||||
|
from contextlib import contextmanager
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from typing import Type, Dict, Any, List, Optional, Set
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Set, Type
|
||||||
|
|
||||||
from dbt.exceptions import RuntimeException, InternalException
|
|
||||||
from dbt.include.global_project import (
|
|
||||||
PACKAGE_PATH as GLOBAL_PROJECT_PATH,
|
|
||||||
PROJECT_NAME as GLOBAL_PROJECT_NAME,
|
|
||||||
)
|
|
||||||
from dbt.events.functions import fire_event
|
|
||||||
from dbt.events.types import AdapterImportError, PluginLoadError
|
|
||||||
from dbt.contracts.connection import Credentials, AdapterRequiredConfig
|
|
||||||
from dbt.adapters.protocol import (
|
|
||||||
AdapterProtocol,
|
|
||||||
AdapterConfig,
|
|
||||||
RelationProtocol,
|
|
||||||
)
|
|
||||||
from dbt.adapters.base.plugin import AdapterPlugin
|
from dbt.adapters.base.plugin import AdapterPlugin
|
||||||
|
from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol
|
||||||
|
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||||
|
from dbt.events.functions import fire_event
|
||||||
|
from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered
|
||||||
|
from dbt.exceptions import DbtInternalError, DbtRuntimeError
|
||||||
|
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
|
||||||
|
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||||
|
from dbt.semver import VersionSpecifier
|
||||||
|
|
||||||
Adapter = AdapterProtocol
|
Adapter = AdapterProtocol
|
||||||
|
|
||||||
@@ -39,7 +35,7 @@ class AdapterContainer:
|
|||||||
names = ", ".join(self.plugins.keys())
|
names = ", ".join(self.plugins.keys())
|
||||||
|
|
||||||
message = f"Invalid adapter type {name}! Must be one of {names}"
|
message = f"Invalid adapter type {name}! Must be one of {names}"
|
||||||
raise RuntimeException(message)
|
raise DbtRuntimeError(message)
|
||||||
|
|
||||||
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
||||||
plugin = self.get_plugin_by_name(name)
|
plugin = self.get_plugin_by_name(name)
|
||||||
@@ -64,18 +60,18 @@ class AdapterContainer:
|
|||||||
# if we failed to import the target module in particular, inform
|
# if we failed to import the target module in particular, inform
|
||||||
# the user about it via a runtime error
|
# the user about it via a runtime error
|
||||||
if exc.name == "dbt.adapters." + name:
|
if exc.name == "dbt.adapters." + name:
|
||||||
fire_event(AdapterImportError(exc=exc))
|
fire_event(AdapterImportError(exc=str(exc)))
|
||||||
raise RuntimeException(f"Could not find adapter type {name}!")
|
raise DbtRuntimeError(f"Could not find adapter type {name}!")
|
||||||
# otherwise, the error had to have come from some underlying
|
# otherwise, the error had to have come from some underlying
|
||||||
# library. Log the stack trace.
|
# library. Log the stack trace.
|
||||||
|
|
||||||
fire_event(PluginLoadError())
|
fire_event(PluginLoadError(exc_info=traceback.format_exc()))
|
||||||
raise
|
raise
|
||||||
plugin: AdapterPlugin = mod.Plugin
|
plugin: AdapterPlugin = mod.Plugin
|
||||||
plugin_type = plugin.adapter.type()
|
plugin_type = plugin.adapter.type()
|
||||||
|
|
||||||
if plugin_type != name:
|
if plugin_type != name:
|
||||||
raise RuntimeException(
|
raise DbtRuntimeError(
|
||||||
f"Expected to find adapter with type named {name}, got "
|
f"Expected to find adapter with type named {name}, got "
|
||||||
f"adapter with type {plugin_type}"
|
f"adapter with type {plugin_type}"
|
||||||
)
|
)
|
||||||
@@ -94,7 +90,13 @@ class AdapterContainer:
|
|||||||
def register_adapter(self, config: AdapterRequiredConfig) -> None:
|
def register_adapter(self, config: AdapterRequiredConfig) -> None:
|
||||||
adapter_name = config.credentials.type
|
adapter_name = config.credentials.type
|
||||||
adapter_type = self.get_adapter_class_by_name(adapter_name)
|
adapter_type = self.get_adapter_class_by_name(adapter_name)
|
||||||
|
adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version
|
||||||
|
adapter_version_specifier = VersionSpecifier.from_version_string(
|
||||||
|
adapter_version
|
||||||
|
).to_version_string()
|
||||||
|
fire_event(
|
||||||
|
AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier)
|
||||||
|
)
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if adapter_name in self.adapters:
|
if adapter_name in self.adapters:
|
||||||
# this shouldn't really happen...
|
# this shouldn't really happen...
|
||||||
@@ -137,11 +139,9 @@ class AdapterContainer:
|
|||||||
try:
|
try:
|
||||||
plugin = self.plugins[plugin_name]
|
plugin = self.plugins[plugin_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise InternalException(f"No plugin found for {plugin_name}") from None
|
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
|
||||||
plugins.append(plugin)
|
plugins.append(plugin)
|
||||||
seen.add(plugin_name)
|
seen.add(plugin_name)
|
||||||
if plugin.dependencies is None:
|
|
||||||
continue
|
|
||||||
for dep in plugin.dependencies:
|
for dep in plugin.dependencies:
|
||||||
if dep not in seen:
|
if dep not in seen:
|
||||||
plugin_names.append(dep)
|
plugin_names.append(dep)
|
||||||
@@ -158,13 +158,16 @@ class AdapterContainer:
|
|||||||
try:
|
try:
|
||||||
path = self.packages[package_name]
|
path = self.packages[package_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise InternalException(f"No internal package listing found for {package_name}")
|
raise DbtInternalError(f"No internal package listing found for {package_name}")
|
||||||
paths.append(path)
|
paths.append(path)
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
def get_adapter_type_names(self, name: Optional[str]) -> List[str]:
|
def get_adapter_type_names(self, name: Optional[str]) -> List[str]:
|
||||||
return [p.adapter.type() for p in self.get_adapter_plugins(name)]
|
return [p.adapter.type() for p in self.get_adapter_plugins(name)]
|
||||||
|
|
||||||
|
def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]:
|
||||||
|
return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore
|
||||||
|
|
||||||
|
|
||||||
FACTORY: AdapterContainer = AdapterContainer()
|
FACTORY: AdapterContainer = AdapterContainer()
|
||||||
|
|
||||||
@@ -219,3 +222,16 @@ def get_adapter_package_names(name: Optional[str]) -> List[str]:
|
|||||||
|
|
||||||
def get_adapter_type_names(name: Optional[str]) -> List[str]:
|
def get_adapter_type_names(name: Optional[str]) -> List[str]:
|
||||||
return FACTORY.get_adapter_type_names(name)
|
return FACTORY.get_adapter_type_names(name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_adapter_constraint_support(name: Optional[str]) -> List[str]:
|
||||||
|
return FACTORY.get_adapter_constraint_support(name)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def adapter_management():
|
||||||
|
reset_adapters()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
cleanup_connections()
|
||||||
|
|||||||
@@ -7,9 +7,7 @@ from typing import (
|
|||||||
List,
|
List,
|
||||||
Generic,
|
Generic,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
ClassVar,
|
|
||||||
Tuple,
|
Tuple,
|
||||||
Union,
|
|
||||||
Dict,
|
Dict,
|
||||||
Any,
|
Any,
|
||||||
)
|
)
|
||||||
@@ -18,8 +16,7 @@ from typing_extensions import Protocol
|
|||||||
import agate
|
import agate
|
||||||
|
|
||||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||||
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
|
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
|
||||||
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
|
|
||||||
from dbt.contracts.graph.model_config import BaseConfig
|
from dbt.contracts.graph.model_config import BaseConfig
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
from dbt.contracts.relation import Policy, HasQuoting
|
from dbt.contracts.relation import Policy, HasQuoting
|
||||||
@@ -49,11 +46,7 @@ class RelationProtocol(Protocol):
|
|||||||
...
|
...
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from(
|
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
|
||||||
cls: Type[Self],
|
|
||||||
config: HasQuoting,
|
|
||||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
|
||||||
) -> Self:
|
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
@@ -66,7 +59,7 @@ class CompilerProtocol(Protocol):
|
|||||||
node: ManifestNode,
|
node: ManifestNode,
|
||||||
manifest: Manifest,
|
manifest: Manifest,
|
||||||
extra_context: Optional[Dict[str, Any]] = None,
|
extra_context: Optional[Dict[str, Any]] = None,
|
||||||
) -> NonSourceCompiledNode:
|
) -> ManifestNode:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
@@ -88,10 +81,13 @@ class AdapterProtocol( # type: ignore[misc]
|
|||||||
Compiler_T,
|
Compiler_T,
|
||||||
],
|
],
|
||||||
):
|
):
|
||||||
AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]]
|
# N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a
|
||||||
Column: ClassVar[Type[Column_T]]
|
# ClassVar due to the restrictiveness of PEP-526
|
||||||
Relation: ClassVar[Type[Relation_T]]
|
# See: https://github.com/python/mypy/issues/5144
|
||||||
ConnectionManager: ClassVar[Type[ConnectionManager_T]]
|
AdapterSpecificConfigs: Type[AdapterConfig_T]
|
||||||
|
Column: Type[Column_T]
|
||||||
|
Relation: Type[Relation_T]
|
||||||
|
ConnectionManager: Type[ConnectionManager_T]
|
||||||
connections: ConnectionManager_T
|
connections: ConnectionManager_T
|
||||||
|
|
||||||
def __init__(self, config: AdapterRequiredConfig):
|
def __init__(self, config: AdapterRequiredConfig):
|
||||||
@@ -155,7 +151,7 @@ class AdapterProtocol( # type: ignore[misc]
|
|||||||
|
|
||||||
def execute(
|
def execute(
|
||||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
) -> Tuple[AdapterResponse, agate.Table]:
|
||||||
...
|
...
|
||||||
|
|
||||||
def get_compiler(self) -> Compiler_T:
|
def get_compiler(self) -> Compiler_T:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# this module exists to resolve circular imports with the events module
|
# this module exists to resolve circular imports with the events module
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||||
@@ -14,7 +14,12 @@ def lowercase(value: Optional[str]) -> Optional[str]:
|
|||||||
return value.lower()
|
return value.lower()
|
||||||
|
|
||||||
|
|
||||||
def _make_key(relation) -> _ReferenceKey:
|
# For backwards compatibility. New code should use _make_ref_key
|
||||||
|
def _make_key(relation: Any) -> _ReferenceKey:
|
||||||
|
return _make_ref_key(relation)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_ref_key(relation: Any) -> _ReferenceKey:
|
||||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||||
to keep track of quoting
|
to keep track of quoting
|
||||||
"""
|
"""
|
||||||
@@ -22,3 +27,11 @@ def _make_key(relation) -> _ReferenceKey:
|
|||||||
return _ReferenceKey(
|
return _ReferenceKey(
|
||||||
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_ref_key_dict(relation: Any):
|
||||||
|
return {
|
||||||
|
"database": relation.database,
|
||||||
|
"schema": relation.schema,
|
||||||
|
"identifier": relation.identifier,
|
||||||
|
}
|
||||||
|
|||||||
25
core/dbt/adapters/relation_configs/README.md
Normal file
25
core/dbt/adapters/relation_configs/README.md
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# RelationConfig
|
||||||
|
This package serves as an initial abstraction for managing the inspection of existing relations and determining
|
||||||
|
changes on those relations. It arose from the materialized view work and is currently only supporting
|
||||||
|
materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main
|
||||||
|
classes in this package.
|
||||||
|
|
||||||
|
## RelationConfigBase
|
||||||
|
This is a very small class that only has a `from_dict()` method and a default `NotImplementedError()`. At some
|
||||||
|
point this could be replaced by a more robust framework, like `mashumaro` or `pydantic`.
|
||||||
|
|
||||||
|
## RelationConfigChange
|
||||||
|
This class inherits from `RelationConfigBase` ; however, this can be thought of as a separate class. The subclassing
|
||||||
|
merely points to the idea that both classes would likely inherit from the same class in a `mashumaro` or
|
||||||
|
`pydantic` implementation. This class is much more restricted in attribution. It should really only
|
||||||
|
ever need an `action` and a `context`. This can be though of as being analogous to a web request. You need to
|
||||||
|
know what you're doing (`action`: 'create' = GET, 'drop' = DELETE, etc.) and the information (`context`) needed
|
||||||
|
to make the change. In our scenarios, the context tends to be an instance of `RelationConfigBase` corresponding
|
||||||
|
to the new state.
|
||||||
|
|
||||||
|
## RelationConfigValidationMixin
|
||||||
|
This mixin provides optional validation mechanics that can be applied to either `RelationConfigBase` or
|
||||||
|
`RelationConfigChange` subclasses. A validation rule is a combination of a `validation_check`, something
|
||||||
|
that should evaluate to `True`, and an optional `validation_error`, an instance of `DbtRuntimeError`
|
||||||
|
that should be raised in the event the `validation_check` fails. While optional, it's recommended that
|
||||||
|
the `validation_error` be provided for clearer transparency to the end user.
|
||||||
12
core/dbt/adapters/relation_configs/__init__.py
Normal file
12
core/dbt/adapters/relation_configs/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from dbt.adapters.relation_configs.config_base import ( # noqa: F401
|
||||||
|
RelationConfigBase,
|
||||||
|
RelationResults,
|
||||||
|
)
|
||||||
|
from dbt.adapters.relation_configs.config_change import ( # noqa: F401
|
||||||
|
RelationConfigChangeAction,
|
||||||
|
RelationConfigChange,
|
||||||
|
)
|
||||||
|
from dbt.adapters.relation_configs.config_validation import ( # noqa: F401
|
||||||
|
RelationConfigValidationMixin,
|
||||||
|
RelationConfigValidationRule,
|
||||||
|
)
|
||||||
44
core/dbt/adapters/relation_configs/config_base.py
Normal file
44
core/dbt/adapters/relation_configs/config_base.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Union, Dict
|
||||||
|
|
||||||
|
import agate
|
||||||
|
from dbt.utils import filter_null_values
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
This is what relation metadata from the database looks like. It's a dictionary because there will be
|
||||||
|
multiple grains of data for a single object. For example, a materialized view in Postgres has base level information,
|
||||||
|
like name. But it also can have multiple indexes, which needs to be a separate query. It might look like this:
|
||||||
|
|
||||||
|
{
|
||||||
|
"base": agate.Row({"table_name": "table_abc", "query": "select * from table_def"})
|
||||||
|
"indexes": agate.Table("rows": [
|
||||||
|
agate.Row({"name": "index_a", "columns": ["column_a"], "type": "hash", "unique": False}),
|
||||||
|
agate.Row({"name": "index_b", "columns": ["time_dim_a"], "type": "btree", "unique": False}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
RelationResults = Dict[str, Union[agate.Row, agate.Table]]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RelationConfigBase:
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, kwargs_dict) -> "RelationConfigBase":
|
||||||
|
"""
|
||||||
|
This assumes the subclass of `RelationConfigBase` is flat, in the sense that no attribute is
|
||||||
|
itself another subclass of `RelationConfigBase`. If that's not the case, this should be overriden
|
||||||
|
to manually manage that complexity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
kwargs_dict: the dict representation of this instance
|
||||||
|
|
||||||
|
Returns: the `RelationConfigBase` representation associated with the provided dict
|
||||||
|
"""
|
||||||
|
return cls(**filter_null_values(kwargs_dict)) # type: ignore
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _not_implemented_error(cls) -> NotImplementedError:
|
||||||
|
return NotImplementedError(
|
||||||
|
"This relation type has not been fully configured for this adapter."
|
||||||
|
)
|
||||||
23
core/dbt/adapters/relation_configs/config_change.py
Normal file
23
core/dbt/adapters/relation_configs/config_change.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Hashable
|
||||||
|
|
||||||
|
from dbt.adapters.relation_configs.config_base import RelationConfigBase
|
||||||
|
from dbt.dataclass_schema import StrEnum
|
||||||
|
|
||||||
|
|
||||||
|
class RelationConfigChangeAction(StrEnum):
|
||||||
|
alter = "alter"
|
||||||
|
create = "create"
|
||||||
|
drop = "drop"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||||
|
class RelationConfigChange(RelationConfigBase, ABC):
|
||||||
|
action: RelationConfigChangeAction
|
||||||
|
context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def requires_full_refresh(self) -> bool:
|
||||||
|
raise self._not_implemented_error()
|
||||||
57
core/dbt/adapters/relation_configs/config_validation.py
Normal file
57
core/dbt/adapters/relation_configs/config_validation.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Set, Optional
|
||||||
|
|
||||||
|
from dbt.exceptions import DbtRuntimeError
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||||
|
class RelationConfigValidationRule:
|
||||||
|
validation_check: bool
|
||||||
|
validation_error: Optional[DbtRuntimeError]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_error(self):
|
||||||
|
return DbtRuntimeError(
|
||||||
|
"There was a validation error in preparing this relation config."
|
||||||
|
"No additional context was provided by this adapter."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RelationConfigValidationMixin:
|
||||||
|
def __post_init__(self):
|
||||||
|
self.run_validation_rules()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def validation_rules(self) -> Set[RelationConfigValidationRule]:
|
||||||
|
"""
|
||||||
|
A set of validation rules to run against the object upon creation.
|
||||||
|
|
||||||
|
A validation rule is a combination of a validation check (bool) and an optional error message.
|
||||||
|
|
||||||
|
This defaults to no validation rules if not implemented. It's recommended to override this with values,
|
||||||
|
but that may not always be necessary.
|
||||||
|
|
||||||
|
Returns: a set of validation rules
|
||||||
|
"""
|
||||||
|
return set()
|
||||||
|
|
||||||
|
def run_validation_rules(self):
|
||||||
|
for validation_rule in self.validation_rules:
|
||||||
|
try:
|
||||||
|
assert validation_rule.validation_check
|
||||||
|
except AssertionError:
|
||||||
|
if validation_rule.validation_error:
|
||||||
|
raise validation_rule.validation_error
|
||||||
|
else:
|
||||||
|
raise validation_rule.default_error
|
||||||
|
self.run_child_validation_rules()
|
||||||
|
|
||||||
|
def run_child_validation_rules(self):
|
||||||
|
for attr_value in vars(self).values():
|
||||||
|
if hasattr(attr_value, "validation_rules"):
|
||||||
|
attr_value.run_validation_rules()
|
||||||
|
if isinstance(attr_value, set):
|
||||||
|
for member in attr_value:
|
||||||
|
if hasattr(member, "validation_rules"):
|
||||||
|
member.run_validation_rules()
|
||||||
@@ -10,6 +10,8 @@ from dbt.adapters.base import BaseConnectionManager
|
|||||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||||
|
from dbt.events.contextvars import get_node_info
|
||||||
|
from dbt.utils import cast_to_str
|
||||||
|
|
||||||
|
|
||||||
class SQLConnectionManager(BaseConnectionManager):
|
class SQLConnectionManager(BaseConnectionManager):
|
||||||
@@ -25,9 +27,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def cancel(self, connection: Connection):
|
def cancel(self, connection: Connection):
|
||||||
"""Cancel the given connection."""
|
"""Cancel the given connection."""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
|
||||||
"`cancel` is not implemented for this adapter!"
|
|
||||||
)
|
|
||||||
|
|
||||||
def cancel_open(self) -> List[str]:
|
def cancel_open(self) -> List[str]:
|
||||||
names = []
|
names = []
|
||||||
@@ -55,7 +55,13 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
connection = self.get_thread_connection()
|
connection = self.get_thread_connection()
|
||||||
if auto_begin and connection.transaction_open is False:
|
if auto_begin and connection.transaction_open is False:
|
||||||
self.begin()
|
self.begin()
|
||||||
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=connection.name))
|
fire_event(
|
||||||
|
ConnectionUsed(
|
||||||
|
conn_type=self.TYPE,
|
||||||
|
conn_name=cast_to_str(connection.name),
|
||||||
|
node_info=get_node_info(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
with self.exception_handler(sql):
|
with self.exception_handler(sql):
|
||||||
if abridge_sql_log:
|
if abridge_sql_log:
|
||||||
@@ -63,7 +69,11 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
else:
|
else:
|
||||||
log_sql = sql
|
log_sql = sql
|
||||||
|
|
||||||
fire_event(SQLQuery(conn_name=connection.name, sql=log_sql))
|
fire_event(
|
||||||
|
SQLQuery(
|
||||||
|
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
|
||||||
|
)
|
||||||
|
)
|
||||||
pre = time.time()
|
pre = time.time()
|
||||||
|
|
||||||
cursor = connection.handle.cursor()
|
cursor = connection.handle.cursor()
|
||||||
@@ -71,16 +81,19 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
|
|
||||||
fire_event(
|
fire_event(
|
||||||
SQLQueryStatus(
|
SQLQueryStatus(
|
||||||
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
|
status=str(self.get_response(cursor)),
|
||||||
|
elapsed=round((time.time() - pre)),
|
||||||
|
node_info=get_node_info(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return connection, cursor
|
return connection, cursor
|
||||||
|
|
||||||
@abc.abstractclassmethod
|
@classmethod
|
||||||
def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
|
@abc.abstractmethod
|
||||||
|
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||||
"""Get the status of the cursor."""
|
"""Get the status of the cursor."""
|
||||||
raise dbt.exceptions.NotImplementedException(
|
raise dbt.exceptions.NotImplementedError(
|
||||||
"`get_response` is not implemented for this adapter!"
|
"`get_response` is not implemented for this adapter!"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -104,25 +117,36 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
return [dict(zip(column_names, row)) for row in rows]
|
return [dict(zip(column_names, row)) for row in rows]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_result_from_cursor(cls, cursor: Any) -> agate.Table:
|
def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table:
|
||||||
data: List[Any] = []
|
data: List[Any] = []
|
||||||
column_names: List[str] = []
|
column_names: List[str] = []
|
||||||
|
|
||||||
if cursor.description is not None:
|
if cursor.description is not None:
|
||||||
column_names = [col[0] for col in cursor.description]
|
column_names = [col[0] for col in cursor.description]
|
||||||
|
if limit:
|
||||||
|
rows = cursor.fetchmany(limit)
|
||||||
|
else:
|
||||||
rows = cursor.fetchall()
|
rows = cursor.fetchall()
|
||||||
data = cls.process_results(column_names, rows)
|
data = cls.process_results(column_names, rows)
|
||||||
|
|
||||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||||
|
"""Get the string representation of the data type from the type_code."""
|
||||||
|
# https://peps.python.org/pep-0249/#type-objects
|
||||||
|
raise dbt.exceptions.NotImplementedError(
|
||||||
|
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
def execute(
|
def execute(
|
||||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||||
) -> Tuple[Union[AdapterResponse, str], agate.Table]:
|
) -> Tuple[AdapterResponse, agate.Table]:
|
||||||
sql = self._add_query_comment(sql)
|
sql = self._add_query_comment(sql)
|
||||||
_, cursor = self.add_query(sql, auto_begin)
|
_, cursor = self.add_query(sql, auto_begin)
|
||||||
response = self.get_response(cursor)
|
response = self.get_response(cursor)
|
||||||
if fetch:
|
if fetch:
|
||||||
table = self.get_result_from_cursor(cursor)
|
table = self.get_result_from_cursor(cursor, limit)
|
||||||
else:
|
else:
|
||||||
table = dbt.clients.agate_helper.empty_table()
|
table = dbt.clients.agate_helper.empty_table()
|
||||||
return response, table
|
return response, table
|
||||||
@@ -133,10 +157,14 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
def add_commit_query(self):
|
def add_commit_query(self):
|
||||||
return self.add_query("COMMIT", auto_begin=False)
|
return self.add_query("COMMIT", auto_begin=False)
|
||||||
|
|
||||||
|
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||||
|
sql = self._add_query_comment(sql)
|
||||||
|
return self.add_query(sql, auto_begin=False)
|
||||||
|
|
||||||
def begin(self):
|
def begin(self):
|
||||||
connection = self.get_thread_connection()
|
connection = self.get_thread_connection()
|
||||||
if connection.transaction_open is True:
|
if connection.transaction_open is True:
|
||||||
raise dbt.exceptions.InternalException(
|
raise dbt.exceptions.DbtInternalError(
|
||||||
'Tried to begin a new transaction on connection "{}", but '
|
'Tried to begin a new transaction on connection "{}", but '
|
||||||
"it already had one open!".format(connection.name)
|
"it already had one open!".format(connection.name)
|
||||||
)
|
)
|
||||||
@@ -149,12 +177,12 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
def commit(self):
|
def commit(self):
|
||||||
connection = self.get_thread_connection()
|
connection = self.get_thread_connection()
|
||||||
if connection.transaction_open is False:
|
if connection.transaction_open is False:
|
||||||
raise dbt.exceptions.InternalException(
|
raise dbt.exceptions.DbtInternalError(
|
||||||
'Tried to commit transaction on connection "{}", but '
|
'Tried to commit transaction on connection "{}", but '
|
||||||
"it does not have one open!".format(connection.name)
|
"it does not have one open!".format(connection.name)
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(SQLCommit(conn_name=connection.name))
|
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
|
||||||
self.add_commit_query()
|
self.add_commit_query()
|
||||||
|
|
||||||
connection.transaction_open = False
|
connection.transaction_open = False
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
import agate
|
import agate
|
||||||
from typing import Any, Optional, Tuple, Type, List
|
from typing import Any, Optional, Tuple, Type, List
|
||||||
|
|
||||||
import dbt.clients.agate_helper
|
from dbt.contracts.connection import Connection, AdapterResponse
|
||||||
from dbt.contracts.connection import Connection
|
from dbt.exceptions import RelationTypeNullError
|
||||||
import dbt.exceptions
|
|
||||||
from dbt.adapters.base import BaseAdapter, available
|
from dbt.adapters.base import BaseAdapter, available
|
||||||
from dbt.adapters.cache import _make_key
|
from dbt.adapters.cache import _make_ref_key_dict
|
||||||
from dbt.adapters.sql import SQLConnectionManager
|
from dbt.adapters.sql import SQLConnectionManager
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
||||||
@@ -23,6 +22,7 @@ RENAME_RELATION_MACRO_NAME = "rename_relation"
|
|||||||
TRUNCATE_RELATION_MACRO_NAME = "truncate_relation"
|
TRUNCATE_RELATION_MACRO_NAME = "truncate_relation"
|
||||||
DROP_RELATION_MACRO_NAME = "drop_relation"
|
DROP_RELATION_MACRO_NAME = "drop_relation"
|
||||||
ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
|
ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
|
||||||
|
VALIDATE_SQL_MACRO_NAME = "validate_sql"
|
||||||
|
|
||||||
|
|
||||||
class SQLAdapter(BaseAdapter):
|
class SQLAdapter(BaseAdapter):
|
||||||
@@ -110,7 +110,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
ColTypeChange(
|
ColTypeChange(
|
||||||
orig_type=target_column.data_type,
|
orig_type=target_column.data_type,
|
||||||
new_type=new_type,
|
new_type=new_type,
|
||||||
table=_make_key(current),
|
table=_make_ref_key_dict(current),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -132,9 +132,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
|
|
||||||
def drop_relation(self, relation):
|
def drop_relation(self, relation):
|
||||||
if relation.type is None:
|
if relation.type is None:
|
||||||
dbt.exceptions.raise_compiler_error(
|
raise RelationTypeNullError(relation)
|
||||||
"Tried to drop relation {}, but its type is null.".format(relation)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.cache_dropped(relation)
|
self.cache_dropped(relation)
|
||||||
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
||||||
@@ -155,7 +153,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
|
|
||||||
def create_schema(self, relation: BaseRelation) -> None:
|
def create_schema(self, relation: BaseRelation) -> None:
|
||||||
relation = relation.without_identifier()
|
relation = relation.without_identifier()
|
||||||
fire_event(SchemaCreation(relation=_make_key(relation)))
|
fire_event(SchemaCreation(relation=_make_ref_key_dict(relation)))
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"relation": relation,
|
"relation": relation,
|
||||||
}
|
}
|
||||||
@@ -166,11 +164,12 @@ class SQLAdapter(BaseAdapter):
|
|||||||
|
|
||||||
def drop_schema(self, relation: BaseRelation) -> None:
|
def drop_schema(self, relation: BaseRelation) -> None:
|
||||||
relation = relation.without_identifier()
|
relation = relation.without_identifier()
|
||||||
fire_event(SchemaDrop(relation=_make_key(relation)))
|
fire_event(SchemaDrop(relation=_make_ref_key_dict(relation)))
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"relation": relation,
|
"relation": relation,
|
||||||
}
|
}
|
||||||
self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||||
|
self.commit_if_has_connection()
|
||||||
# we can update the cache here
|
# we can update the cache here
|
||||||
self.cache.drop_schema(relation.database, relation.schema)
|
self.cache.drop_schema(relation.database, relation.schema)
|
||||||
|
|
||||||
@@ -199,6 +198,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
)
|
)
|
||||||
return relations
|
return relations
|
||||||
|
|
||||||
|
@classmethod
|
||||||
def quote(self, identifier):
|
def quote(self, identifier):
|
||||||
return '"{}"'.format(identifier)
|
return '"{}"'.format(identifier)
|
||||||
|
|
||||||
@@ -218,3 +218,53 @@ class SQLAdapter(BaseAdapter):
|
|||||||
kwargs = {"information_schema": information_schema, "schema": schema}
|
kwargs = {"information_schema": information_schema, "schema": schema}
|
||||||
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
|
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
|
||||||
return results[0][0] > 0
|
return results[0][0] > 0
|
||||||
|
|
||||||
|
def validate_sql(self, sql: str) -> AdapterResponse:
|
||||||
|
"""Submit the given SQL to the engine for validation, but not execution.
|
||||||
|
|
||||||
|
By default we simply prefix the query with the explain keyword and allow the
|
||||||
|
exceptions thrown by the underlying engine on invalid SQL inputs to bubble up
|
||||||
|
to the exception handler. For adjustments to the explain statement - such as
|
||||||
|
for adapters that have different mechanisms for hinting at query validation
|
||||||
|
or dry-run - callers may be able to override the validate_sql_query macro with
|
||||||
|
the addition of an <adapter>__validate_sql implementation.
|
||||||
|
|
||||||
|
:param sql str: The sql to validate
|
||||||
|
"""
|
||||||
|
kwargs = {
|
||||||
|
"sql": sql,
|
||||||
|
}
|
||||||
|
result = self.execute_macro(VALIDATE_SQL_MACRO_NAME, kwargs=kwargs)
|
||||||
|
# The statement macro always returns an AdapterResponse in the output AttrDict's
|
||||||
|
# `response` property, and we preserve the full payload in case we want to
|
||||||
|
# return fetched output for engines where explain plans are emitted as columnar
|
||||||
|
# results. Any macro override that deviates from this behavior may encounter an
|
||||||
|
# assertion error in the runtime.
|
||||||
|
adapter_response = result.response # type: ignore[attr-defined]
|
||||||
|
assert isinstance(adapter_response, AdapterResponse), (
|
||||||
|
f"Expected AdapterResponse from validate_sql macro execution, "
|
||||||
|
f"got {type(adapter_response)}."
|
||||||
|
)
|
||||||
|
return adapter_response
|
||||||
|
|
||||||
|
# This is for use in the test suite
|
||||||
|
def run_sql_for_tests(self, sql, fetch, conn):
|
||||||
|
cursor = conn.handle.cursor()
|
||||||
|
try:
|
||||||
|
cursor.execute(sql)
|
||||||
|
if hasattr(conn.handle, "commit"):
|
||||||
|
conn.handle.commit()
|
||||||
|
if fetch == "one":
|
||||||
|
return cursor.fetchone()
|
||||||
|
elif fetch == "all":
|
||||||
|
return cursor.fetchall()
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
except BaseException as e:
|
||||||
|
if conn.handle and not getattr(conn.handle, "closed", True):
|
||||||
|
conn.handle.rollback()
|
||||||
|
print(sql)
|
||||||
|
print(e)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
conn.transaction_open = False
|
||||||
|
|||||||
71
core/dbt/cli/README.md
Normal file
71
core/dbt/cli/README.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# Adding a new command
|
||||||
|
|
||||||
|
## `main.py`
|
||||||
|
Add the new command with all necessary decorators. Every command will need at minimum:
|
||||||
|
- a decorator for the click group it belongs to which also names the command
|
||||||
|
- the postflight decorator (must come before other decorators from the `requires` module for error handling)
|
||||||
|
- the preflight decorator
|
||||||
|
```py
|
||||||
|
@cli.command("my-new-command")
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
def my_new_command(ctx, **kwargs):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
## `types.py`
|
||||||
|
Add an entry to the `Command` enum with your new command. Commands that are sub-commands should have entries
|
||||||
|
that represent their full command path (e.g. `source freshness -> SOURCE_FRESHNESS`, `docs serve -> DOCS_SERVE`).
|
||||||
|
|
||||||
|
## `flags.py`
|
||||||
|
Add the new command to the dictionary within the `command_args` function.
|
||||||
|
|
||||||
|
# Exception Handling
|
||||||
|
|
||||||
|
## `requires.py`
|
||||||
|
|
||||||
|
### `postflight`
|
||||||
|
In the postflight decorator, the click command is invoked (i.e. `func(*args, **kwargs)`) and wrapped in a `try/except` block to handle any exceptions thrown.
|
||||||
|
Any exceptions thrown from `postflight` are wrapped by custom exceptions from the `dbt.cli.exceptions` module (i.e. `ResultExit`, `ExceptionExit`) to instruct click to complete execution with a particular exit code.
|
||||||
|
|
||||||
|
Some `dbt-core` handled exceptions have an attribute named `results` which contains results from running nodes (e.g. `FailFastError`). These are wrapped in the `ResultExit` exception to represent runs that have failed in a way that `dbt-core` expects.
|
||||||
|
If the invocation of the command does not throw any exceptions but does not succeed, `postflight` will still raise the `ResultExit` exception to make use of the exit code.
|
||||||
|
These exceptions produce an exit code of `1`.
|
||||||
|
|
||||||
|
Exceptions wrapped with `ExceptionExit` may be thrown by `dbt-core` intentionally (i.e. an exception that inherits from `dbt.exceptions.Exception`) or unintentionally (i.e. exceptions thrown by the python runtime). In either case these are considered errors that `dbt-core` did not expect and are treated as genuine exceptions.
|
||||||
|
These exceptions produce an exit code of `2`.
|
||||||
|
|
||||||
|
If no exceptions are thrown from invoking the command and the command succeeds, `postflight` will not raise any exceptions.
|
||||||
|
When no exceptions are raised an exit code of `0` is produced.
|
||||||
|
|
||||||
|
## `main.py`
|
||||||
|
|
||||||
|
### `dbtRunner`
|
||||||
|
`dbtRunner` provides a programmatic interface for our click CLI and wraps the invocation of the click commands to handle any exceptions thrown.
|
||||||
|
|
||||||
|
`dbtRunner.invoke` should ideally only ever return an instantiated `dbtRunnerResult` which contains the following fields:
|
||||||
|
- `success`: A boolean representing whether the command invocation was successful
|
||||||
|
- `result`: The optional result of the command invoked. This attribute can have many types, please see the definition of `dbtRunnerResult` for more information
|
||||||
|
- `exception`: If an exception was thrown during command invocation it will be saved here, otherwise it will be `None`. Please note that the exceptions held in this attribute are not the exceptions thrown by `preflight` but instead the exceptions that `ResultExit` and `ExceptionExit` wrap
|
||||||
|
|
||||||
|
Programmatic exception handling might look like the following:
|
||||||
|
```python
|
||||||
|
res = dbtRunner().invoke(["run"])
|
||||||
|
if not res.success:
|
||||||
|
...
|
||||||
|
if type(res.exception) == SomeExceptionType:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
## `dbt/tests/util.py`
|
||||||
|
|
||||||
|
### `run_dbt`
|
||||||
|
In many of our functional and integration tests, we want to be sure that an invocation of `dbt` raises a certain exception.
|
||||||
|
A common pattern for these assertions:
|
||||||
|
```python
|
||||||
|
class TestSomething:
|
||||||
|
def test_something(self, project):
|
||||||
|
with pytest.raises(SomeException):
|
||||||
|
run_dbt(["run"])
|
||||||
|
```
|
||||||
|
To allow these tests to assert that exceptions have been thrown, the `run_dbt` function will raise any exceptions it recieves from the invocation of a `dbt` command.
|
||||||
1
core/dbt/cli/__init__.py
Normal file
1
core/dbt/cli/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
from .main import cli as dbt_cli # noqa
|
||||||
16
core/dbt/cli/context.py
Normal file
16
core/dbt/cli/context.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import click
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from dbt.cli.main import cli as dbt
|
||||||
|
|
||||||
|
|
||||||
|
def make_context(args, command=dbt) -> Optional[click.Context]:
|
||||||
|
try:
|
||||||
|
ctx = command.make_context(command.name, args)
|
||||||
|
except click.exceptions.Exit:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ctx.invoked_subcommand = ctx.protected_args[0] if ctx.protected_args else None
|
||||||
|
ctx.obj = {}
|
||||||
|
|
||||||
|
return ctx
|
||||||
43
core/dbt/cli/exceptions.py
Normal file
43
core/dbt/cli/exceptions.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
from typing import Optional, IO
|
||||||
|
|
||||||
|
from click.exceptions import ClickException
|
||||||
|
from dbt.utils import ExitCodes
|
||||||
|
|
||||||
|
|
||||||
|
class DbtUsageException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DbtInternalException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CliException(ClickException):
|
||||||
|
"""The base exception class for our implementation of the click CLI.
|
||||||
|
The exit_code attribute is used by click to determine which exit code to produce
|
||||||
|
after an invocation."""
|
||||||
|
|
||||||
|
def __init__(self, exit_code: ExitCodes) -> None:
|
||||||
|
self.exit_code = exit_code.value
|
||||||
|
|
||||||
|
# the typing of _file is to satisfy the signature of ClickException.show
|
||||||
|
# overriding this method prevents click from printing any exceptions to stdout
|
||||||
|
def show(self, _file: Optional[IO] = None) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ResultExit(CliException):
|
||||||
|
"""This class wraps any exception that contains results while invoking dbt, or the
|
||||||
|
results of an invocation that did not succeed but did not throw any exceptions."""
|
||||||
|
|
||||||
|
def __init__(self, result) -> None:
|
||||||
|
super().__init__(ExitCodes.ModelError)
|
||||||
|
self.result = result
|
||||||
|
|
||||||
|
|
||||||
|
class ExceptionExit(CliException):
|
||||||
|
"""This class wraps any exception that does not contain results thrown while invoking dbt."""
|
||||||
|
|
||||||
|
def __init__(self, exception: Exception) -> None:
|
||||||
|
super().__init__(ExitCodes.UnhandledError)
|
||||||
|
self.exception = exception
|
||||||
404
core/dbt/cli/flags.py
Normal file
404
core/dbt/cli/flags.py
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from importlib import import_module
|
||||||
|
from multiprocessing import get_context
|
||||||
|
from pprint import pformat as pf
|
||||||
|
from typing import Any, Callable, Dict, List, Optional, Set, Union
|
||||||
|
|
||||||
|
from click import Context, get_current_context, Parameter
|
||||||
|
from click.core import Command as ClickCommand, Group, ParameterSource
|
||||||
|
from dbt.cli.exceptions import DbtUsageException
|
||||||
|
from dbt.cli.resolvers import default_log_path, default_project_dir
|
||||||
|
from dbt.cli.types import Command as CliCommand
|
||||||
|
from dbt.config.profile import read_user_config
|
||||||
|
from dbt.contracts.project import UserConfig
|
||||||
|
from dbt.exceptions import DbtInternalError
|
||||||
|
from dbt.deprecations import renamed_env_var
|
||||||
|
from dbt.helper_types import WarnErrorOptions
|
||||||
|
|
||||||
|
if os.name != "nt":
|
||||||
|
# https://bugs.python.org/issue41567
|
||||||
|
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
|
||||||
|
|
||||||
|
FLAGS_DEFAULTS = {
|
||||||
|
"INDIRECT_SELECTION": "eager",
|
||||||
|
"TARGET_PATH": None,
|
||||||
|
# Cli args without user_config or env var option.
|
||||||
|
"FULL_REFRESH": False,
|
||||||
|
"STRICT_MODE": False,
|
||||||
|
"STORE_FAILURES": False,
|
||||||
|
"INTROSPECT": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
DEPRECATED_PARAMS = {
|
||||||
|
"deprecated_defer": "defer",
|
||||||
|
"deprecated_favor_state": "favor_state",
|
||||||
|
"deprecated_print": "print",
|
||||||
|
"deprecated_state": "state",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
WHICH_KEY = "which"
|
||||||
|
|
||||||
|
|
||||||
|
def convert_config(config_name, config_value):
|
||||||
|
"""Convert the values from config and original set_from_args to the correct type."""
|
||||||
|
ret = config_value
|
||||||
|
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
|
||||||
|
ret = WarnErrorOptions(
|
||||||
|
include=config_value.get("include", []), exclude=config_value.get("exclude", [])
|
||||||
|
)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def args_to_context(args: List[str]) -> Context:
|
||||||
|
"""Convert a list of args to a click context with proper hierarchy for dbt commands"""
|
||||||
|
from dbt.cli.main import cli
|
||||||
|
|
||||||
|
cli_ctx = cli.make_context(cli.name, args)
|
||||||
|
# Split args if they're a comma seperated string.
|
||||||
|
if len(args) == 1 and "," in args[0]:
|
||||||
|
args = args[0].split(",")
|
||||||
|
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||||
|
|
||||||
|
# Handle source and docs group.
|
||||||
|
if isinstance(sub_command, Group):
|
||||||
|
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||||
|
|
||||||
|
assert isinstance(sub_command, ClickCommand)
|
||||||
|
sub_command_ctx = sub_command.make_context(sub_command_name, args)
|
||||||
|
sub_command_ctx.parent = cli_ctx
|
||||||
|
return sub_command_ctx
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Flags:
|
||||||
|
"""Primary configuration artifact for running dbt"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, ctx: Optional[Context] = None, user_config: Optional[UserConfig] = None
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# Set the default flags.
|
||||||
|
for key, value in FLAGS_DEFAULTS.items():
|
||||||
|
object.__setattr__(self, key, value)
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
ctx = get_current_context()
|
||||||
|
|
||||||
|
def _get_params_by_source(ctx: Context, source_type: ParameterSource):
|
||||||
|
"""Generates all params of a given source type."""
|
||||||
|
yield from [
|
||||||
|
name for name, source in ctx._parameter_source.items() if source is source_type
|
||||||
|
]
|
||||||
|
if ctx.parent:
|
||||||
|
yield from _get_params_by_source(ctx.parent, source_type)
|
||||||
|
|
||||||
|
# Ensure that any params sourced from the commandline are not present more than once.
|
||||||
|
# Click handles this exclusivity, but only at a per-subcommand level.
|
||||||
|
seen_params = []
|
||||||
|
for param in _get_params_by_source(ctx, ParameterSource.COMMANDLINE):
|
||||||
|
if param in seen_params:
|
||||||
|
raise DbtUsageException(
|
||||||
|
f"{param.lower()} was provided both before and after the subcommand, it can only be set either before or after.",
|
||||||
|
)
|
||||||
|
seen_params.append(param)
|
||||||
|
|
||||||
|
def _assign_params(
|
||||||
|
ctx: Context,
|
||||||
|
params_assigned_from_default: set,
|
||||||
|
deprecated_env_vars: Dict[str, Callable],
|
||||||
|
):
|
||||||
|
"""Recursively adds all click params to flag object"""
|
||||||
|
for param_name, param_value in ctx.params.items():
|
||||||
|
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
|
||||||
|
# when using frozen dataclasses.
|
||||||
|
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
|
||||||
|
|
||||||
|
# Handle deprecated env vars while still respecting old values
|
||||||
|
# e.g. DBT_NO_PRINT -> DBT_PRINT if DBT_NO_PRINT is set, it is
|
||||||
|
# respected over DBT_PRINT or --print.
|
||||||
|
new_name: Union[str, None] = None
|
||||||
|
if param_name in DEPRECATED_PARAMS:
|
||||||
|
|
||||||
|
# Deprecated env vars can only be set via env var.
|
||||||
|
# We use the deprecated option in click to serialize the value
|
||||||
|
# from the env var string.
|
||||||
|
param_source = ctx.get_parameter_source(param_name)
|
||||||
|
if param_source == ParameterSource.DEFAULT:
|
||||||
|
continue
|
||||||
|
elif param_source != ParameterSource.ENVIRONMENT:
|
||||||
|
raise DbtUsageException(
|
||||||
|
"Deprecated parameters can only be set via environment variables",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rename for clarity.
|
||||||
|
dep_name = param_name
|
||||||
|
new_name = DEPRECATED_PARAMS.get(dep_name)
|
||||||
|
try:
|
||||||
|
assert isinstance(new_name, str)
|
||||||
|
except AssertionError:
|
||||||
|
raise Exception(
|
||||||
|
f"No deprecated param name match in DEPRECATED_PARAMS from {dep_name} to {new_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find param objects for their envvar name.
|
||||||
|
try:
|
||||||
|
dep_param = [x for x in ctx.command.params if x.name == dep_name][0]
|
||||||
|
new_param = [x for x in ctx.command.params if x.name == new_name][0]
|
||||||
|
except IndexError:
|
||||||
|
raise Exception(
|
||||||
|
f"No deprecated param name match in context from {dep_name} to {new_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove param from defaulted set since the deprecated
|
||||||
|
# value is not set from default, but from an env var.
|
||||||
|
if new_name in params_assigned_from_default:
|
||||||
|
params_assigned_from_default.remove(new_name)
|
||||||
|
|
||||||
|
# Add the deprecation warning function to the set.
|
||||||
|
assert isinstance(dep_param.envvar, str)
|
||||||
|
assert isinstance(new_param.envvar, str)
|
||||||
|
deprecated_env_vars[new_name] = renamed_env_var(
|
||||||
|
old_name=dep_param.envvar,
|
||||||
|
new_name=new_param.envvar,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set the flag value.
|
||||||
|
is_duplicate = hasattr(self, param_name.upper())
|
||||||
|
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||||
|
flag_name = (new_name or param_name).upper()
|
||||||
|
|
||||||
|
if (is_duplicate and not is_default) or not is_duplicate:
|
||||||
|
object.__setattr__(self, flag_name, param_value)
|
||||||
|
|
||||||
|
# Track default assigned params.
|
||||||
|
if is_default:
|
||||||
|
params_assigned_from_default.add(param_name)
|
||||||
|
|
||||||
|
if ctx.parent:
|
||||||
|
_assign_params(ctx.parent, params_assigned_from_default, deprecated_env_vars)
|
||||||
|
|
||||||
|
params_assigned_from_default = set() # type: Set[str]
|
||||||
|
deprecated_env_vars: Dict[str, Callable] = {}
|
||||||
|
_assign_params(ctx, params_assigned_from_default, deprecated_env_vars)
|
||||||
|
|
||||||
|
# Set deprecated_env_var_warnings to be fired later after events have been init.
|
||||||
|
object.__setattr__(
|
||||||
|
self, "deprecated_env_var_warnings", [x for x in deprecated_env_vars.values()]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the invoked command flags.
|
||||||
|
invoked_subcommand_name = (
|
||||||
|
ctx.invoked_subcommand if hasattr(ctx, "invoked_subcommand") else None
|
||||||
|
)
|
||||||
|
if invoked_subcommand_name is not None:
|
||||||
|
invoked_subcommand = getattr(import_module("dbt.cli.main"), invoked_subcommand_name)
|
||||||
|
invoked_subcommand.allow_extra_args = True
|
||||||
|
invoked_subcommand.ignore_unknown_options = True
|
||||||
|
invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv)
|
||||||
|
_assign_params(
|
||||||
|
invoked_subcommand_ctx, params_assigned_from_default, deprecated_env_vars
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user_config:
|
||||||
|
profiles_dir = getattr(self, "PROFILES_DIR", None)
|
||||||
|
user_config = read_user_config(profiles_dir) if profiles_dir else None
|
||||||
|
|
||||||
|
# Add entire invocation command to flags
|
||||||
|
object.__setattr__(self, "INVOCATION_COMMAND", "dbt " + " ".join(sys.argv[1:]))
|
||||||
|
|
||||||
|
# Overwrite default assignments with user config if available.
|
||||||
|
if user_config:
|
||||||
|
param_assigned_from_default_copy = params_assigned_from_default.copy()
|
||||||
|
for param_assigned_from_default in params_assigned_from_default:
|
||||||
|
user_config_param_value = getattr(user_config, param_assigned_from_default, None)
|
||||||
|
if user_config_param_value is not None:
|
||||||
|
object.__setattr__(
|
||||||
|
self,
|
||||||
|
param_assigned_from_default.upper(),
|
||||||
|
convert_config(param_assigned_from_default, user_config_param_value),
|
||||||
|
)
|
||||||
|
param_assigned_from_default_copy.remove(param_assigned_from_default)
|
||||||
|
params_assigned_from_default = param_assigned_from_default_copy
|
||||||
|
|
||||||
|
# Set hard coded flags.
|
||||||
|
object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name)
|
||||||
|
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
|
||||||
|
|
||||||
|
# Apply the lead/follow relationship between some parameters.
|
||||||
|
self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default)
|
||||||
|
self._override_if_set("LOG_LEVEL", "LOG_LEVEL_FILE", params_assigned_from_default)
|
||||||
|
self._override_if_set("LOG_FORMAT", "LOG_FORMAT_FILE", params_assigned_from_default)
|
||||||
|
|
||||||
|
# Set default LOG_PATH from PROJECT_DIR, if available.
|
||||||
|
# Starting in v1.5, if `log-path` is set in `dbt_project.yml`, it will raise a deprecation warning,
|
||||||
|
# with the possibility of removing it in a future release.
|
||||||
|
if getattr(self, "LOG_PATH", None) is None:
|
||||||
|
project_dir = getattr(self, "PROJECT_DIR", default_project_dir())
|
||||||
|
version_check = getattr(self, "VERSION_CHECK", True)
|
||||||
|
object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check))
|
||||||
|
|
||||||
|
# Support console DO NOT TRACK initiative.
|
||||||
|
if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"):
|
||||||
|
object.__setattr__(self, "SEND_ANONYMOUS_USAGE_STATS", False)
|
||||||
|
|
||||||
|
# Check mutual exclusivity once all flags are set.
|
||||||
|
self._assert_mutually_exclusive(
|
||||||
|
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Support lower cased access for legacy code.
|
||||||
|
params = set(
|
||||||
|
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||||
|
)
|
||||||
|
for param in params:
|
||||||
|
object.__setattr__(self, param.lower(), getattr(self, param))
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return str(pf(self.__dict__))
|
||||||
|
|
||||||
|
def _override_if_set(self, lead: str, follow: str, defaulted: Set[str]) -> None:
|
||||||
|
"""If the value of the lead parameter was set explicitly, apply the value to follow, unless follow was also set explicitly."""
|
||||||
|
if lead.lower() not in defaulted and follow.lower() in defaulted:
|
||||||
|
object.__setattr__(self, follow.upper(), getattr(self, lead.upper(), None))
|
||||||
|
|
||||||
|
def _assert_mutually_exclusive(
|
||||||
|
self, params_assigned_from_default: Set[str], group: List[str]
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Ensure no elements from group are simultaneously provided by a user, as inferred from params_assigned_from_default.
|
||||||
|
Raises click.UsageError if any two elements from group are simultaneously provided by a user.
|
||||||
|
"""
|
||||||
|
set_flag = None
|
||||||
|
for flag in group:
|
||||||
|
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||||
|
if flag_set_by_user and set_flag:
|
||||||
|
raise DbtUsageException(
|
||||||
|
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||||
|
)
|
||||||
|
elif flag_set_by_user:
|
||||||
|
set_flag = flag
|
||||||
|
|
||||||
|
def fire_deprecations(self):
|
||||||
|
"""Fires events for deprecated env_var usage."""
|
||||||
|
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
||||||
|
# It is necessary to remove this attr from the class so it does
|
||||||
|
# not get pickled when written to disk as json.
|
||||||
|
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||||
|
command_arg_list = command_params(command, args_dict)
|
||||||
|
ctx = args_to_context(command_arg_list)
|
||||||
|
flags = cls(ctx=ctx)
|
||||||
|
flags.fire_deprecations()
|
||||||
|
return flags
|
||||||
|
|
||||||
|
|
||||||
|
CommandParams = List[str]
|
||||||
|
|
||||||
|
|
||||||
|
def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandParams:
|
||||||
|
"""Given a command and a dict, returns a list of strings representing
|
||||||
|
the CLI params for that command. The order of this list is consistent with
|
||||||
|
which flags are expected at the parent level vs the command level.
|
||||||
|
|
||||||
|
e.g. fn("run", {"defer": True, "print": False}) -> ["--no-print", "run", "--defer"]
|
||||||
|
|
||||||
|
The result of this function can be passed in to the args_to_context function
|
||||||
|
to produce a click context to instantiate Flags with.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cmd_args = set(command_args(command))
|
||||||
|
prnt_args = set(parent_args())
|
||||||
|
default_args = set([x.lower() for x in FLAGS_DEFAULTS.keys()])
|
||||||
|
|
||||||
|
res = command.to_list()
|
||||||
|
|
||||||
|
for k, v in args_dict.items():
|
||||||
|
k = k.lower()
|
||||||
|
|
||||||
|
# if a "which" value exists in the args dict, it should match the command provided
|
||||||
|
if k == WHICH_KEY:
|
||||||
|
if v != command.value:
|
||||||
|
raise DbtInternalError(
|
||||||
|
f"Command '{command.value}' does not match value of which: '{v}'"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# param was assigned from defaults and should not be included
|
||||||
|
if k not in (cmd_args | prnt_args) - default_args:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# if the param is in parent args, it should come before the arg name
|
||||||
|
# e.g. ["--print", "run"] vs ["run", "--print"]
|
||||||
|
add_fn = res.append
|
||||||
|
if k in prnt_args:
|
||||||
|
|
||||||
|
def add_fn(x):
|
||||||
|
res.insert(0, x)
|
||||||
|
|
||||||
|
spinal_cased = k.replace("_", "-")
|
||||||
|
|
||||||
|
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||||
|
add_fn(v)
|
||||||
|
elif v in (None, False):
|
||||||
|
add_fn(f"--no-{spinal_cased}")
|
||||||
|
elif v is True:
|
||||||
|
add_fn(f"--{spinal_cased}")
|
||||||
|
else:
|
||||||
|
add_fn(f"--{spinal_cased}={v}")
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
ArgsList = List[str]
|
||||||
|
|
||||||
|
|
||||||
|
def parent_args() -> ArgsList:
|
||||||
|
"""Return a list representing the params the base click command takes."""
|
||||||
|
from dbt.cli.main import cli
|
||||||
|
|
||||||
|
return format_params(cli.params)
|
||||||
|
|
||||||
|
|
||||||
|
def command_args(command: CliCommand) -> ArgsList:
|
||||||
|
"""Given a command, return a list of strings representing the params
|
||||||
|
that command takes. This function only returns params assigned to a
|
||||||
|
specific command, not those of its parent command.
|
||||||
|
|
||||||
|
e.g. fn("run") -> ["defer", "favor_state", "exclude", ...]
|
||||||
|
"""
|
||||||
|
import dbt.cli.main as cli
|
||||||
|
|
||||||
|
CMD_DICT: Dict[CliCommand, ClickCommand] = {
|
||||||
|
CliCommand.BUILD: cli.build,
|
||||||
|
CliCommand.CLEAN: cli.clean,
|
||||||
|
CliCommand.CLONE: cli.clone,
|
||||||
|
CliCommand.COMPILE: cli.compile,
|
||||||
|
CliCommand.DOCS_GENERATE: cli.docs_generate,
|
||||||
|
CliCommand.DOCS_SERVE: cli.docs_serve,
|
||||||
|
CliCommand.DEBUG: cli.debug,
|
||||||
|
CliCommand.DEPS: cli.deps,
|
||||||
|
CliCommand.INIT: cli.init,
|
||||||
|
CliCommand.LIST: cli.list,
|
||||||
|
CliCommand.PARSE: cli.parse,
|
||||||
|
CliCommand.RUN: cli.run,
|
||||||
|
CliCommand.RUN_OPERATION: cli.run_operation,
|
||||||
|
CliCommand.SEED: cli.seed,
|
||||||
|
CliCommand.SHOW: cli.show,
|
||||||
|
CliCommand.SNAPSHOT: cli.snapshot,
|
||||||
|
CliCommand.SOURCE_FRESHNESS: cli.freshness,
|
||||||
|
CliCommand.TEST: cli.test,
|
||||||
|
CliCommand.RETRY: cli.retry,
|
||||||
|
}
|
||||||
|
click_cmd: Optional[ClickCommand] = CMD_DICT.get(command, None)
|
||||||
|
if click_cmd is None:
|
||||||
|
raise DbtInternalError(f"No command found for name '{command.name}'")
|
||||||
|
return format_params(click_cmd.params)
|
||||||
|
|
||||||
|
|
||||||
|
def format_params(params: List[Parameter]) -> ArgsList:
|
||||||
|
return [str(x.name) for x in params if not str(x.name).lower().startswith("deprecated_")]
|
||||||
851
core/dbt/cli/main.py
Normal file
851
core/dbt/cli/main.py
Normal file
@@ -0,0 +1,851 @@
|
|||||||
|
from copy import copy
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable, List, Optional, Union
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click.exceptions import (
|
||||||
|
Exit as ClickExit,
|
||||||
|
BadOptionUsage,
|
||||||
|
NoSuchOption,
|
||||||
|
UsageError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from dbt.cli import requires, params as p
|
||||||
|
from dbt.cli.exceptions import (
|
||||||
|
DbtInternalException,
|
||||||
|
DbtUsageException,
|
||||||
|
)
|
||||||
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.contracts.results import (
|
||||||
|
CatalogArtifact,
|
||||||
|
RunExecutionResult,
|
||||||
|
)
|
||||||
|
from dbt.events.base_types import EventMsg
|
||||||
|
from dbt.task.build import BuildTask
|
||||||
|
from dbt.task.clean import CleanTask
|
||||||
|
from dbt.task.clone import CloneTask
|
||||||
|
from dbt.task.compile import CompileTask
|
||||||
|
from dbt.task.debug import DebugTask
|
||||||
|
from dbt.task.deps import DepsTask
|
||||||
|
from dbt.task.freshness import FreshnessTask
|
||||||
|
from dbt.task.generate import GenerateTask
|
||||||
|
from dbt.task.init import InitTask
|
||||||
|
from dbt.task.list import ListTask
|
||||||
|
from dbt.task.retry import RetryTask
|
||||||
|
from dbt.task.run import RunTask
|
||||||
|
from dbt.task.run_operation import RunOperationTask
|
||||||
|
from dbt.task.seed import SeedTask
|
||||||
|
from dbt.task.serve import ServeTask
|
||||||
|
from dbt.task.show import ShowTask
|
||||||
|
from dbt.task.snapshot import SnapshotTask
|
||||||
|
from dbt.task.test import TestTask
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class dbtRunnerResult:
|
||||||
|
"""Contains the result of an invocation of the dbtRunner"""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
|
||||||
|
exception: Optional[BaseException] = None
|
||||||
|
result: Union[
|
||||||
|
bool, # debug
|
||||||
|
CatalogArtifact, # docs generate
|
||||||
|
List[str], # list/ls
|
||||||
|
Manifest, # parse
|
||||||
|
None, # clean, deps, init, source
|
||||||
|
RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation
|
||||||
|
] = None
|
||||||
|
|
||||||
|
|
||||||
|
# Programmatic invocation
|
||||||
|
class dbtRunner:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
manifest: Optional[Manifest] = None,
|
||||||
|
callbacks: Optional[List[Callable[[EventMsg], None]]] = None,
|
||||||
|
):
|
||||||
|
self.manifest = manifest
|
||||||
|
|
||||||
|
if callbacks is None:
|
||||||
|
callbacks = []
|
||||||
|
self.callbacks = callbacks
|
||||||
|
|
||||||
|
def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult:
|
||||||
|
try:
|
||||||
|
dbt_ctx = cli.make_context(cli.name, args)
|
||||||
|
dbt_ctx.obj = {
|
||||||
|
"manifest": self.manifest,
|
||||||
|
"callbacks": self.callbacks,
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
dbt_ctx.params[key] = value
|
||||||
|
# Hack to set parameter source to custom string
|
||||||
|
dbt_ctx.set_parameter_source(key, "kwargs") # type: ignore
|
||||||
|
|
||||||
|
result, success = cli.invoke(dbt_ctx)
|
||||||
|
return dbtRunnerResult(
|
||||||
|
result=result,
|
||||||
|
success=success,
|
||||||
|
)
|
||||||
|
except requires.ResultExit as e:
|
||||||
|
return dbtRunnerResult(
|
||||||
|
result=e.result,
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
except requires.ExceptionExit as e:
|
||||||
|
return dbtRunnerResult(
|
||||||
|
exception=e.exception,
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
except (BadOptionUsage, NoSuchOption, UsageError) as e:
|
||||||
|
return dbtRunnerResult(
|
||||||
|
exception=DbtUsageException(e.message),
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
except ClickExit as e:
|
||||||
|
if e.exit_code == 0:
|
||||||
|
return dbtRunnerResult(success=True)
|
||||||
|
return dbtRunnerResult(
|
||||||
|
exception=DbtInternalException(f"unhandled exit code {e.exit_code}"),
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
except BaseException as e:
|
||||||
|
return dbtRunnerResult(
|
||||||
|
exception=e,
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# dbt
|
||||||
|
@click.group(
|
||||||
|
context_settings={"help_option_names": ["-h", "--help"]},
|
||||||
|
invoke_without_command=True,
|
||||||
|
no_args_is_help=True,
|
||||||
|
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
@p.cache_selected_only
|
||||||
|
@p.debug
|
||||||
|
@p.deprecated_print
|
||||||
|
@p.enable_legacy_logger
|
||||||
|
@p.fail_fast
|
||||||
|
@p.log_cache_events
|
||||||
|
@p.log_file_max_bytes
|
||||||
|
@p.log_format
|
||||||
|
@p.log_format_file
|
||||||
|
@p.log_level
|
||||||
|
@p.log_level_file
|
||||||
|
@p.log_path
|
||||||
|
@p.macro_debugging
|
||||||
|
@p.partial_parse
|
||||||
|
@p.partial_parse_file_path
|
||||||
|
@p.populate_cache
|
||||||
|
@p.print
|
||||||
|
@p.printer_width
|
||||||
|
@p.quiet
|
||||||
|
@p.record_timing_info
|
||||||
|
@p.send_anonymous_usage_stats
|
||||||
|
@p.single_threaded
|
||||||
|
@p.static_parser
|
||||||
|
@p.use_colors
|
||||||
|
@p.use_colors_file
|
||||||
|
@p.use_experimental_parser
|
||||||
|
@p.version
|
||||||
|
@p.version_check
|
||||||
|
@p.warn_error
|
||||||
|
@p.warn_error_options
|
||||||
|
@p.write_json
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
"""An ELT tool for managing your SQL transformations and data models.
|
||||||
|
For more documentation on these commands, visit: docs.getdbt.com
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# dbt build
|
||||||
|
@cli.command("build")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.exclude
|
||||||
|
@p.fail_fast
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.full_refresh
|
||||||
|
@p.indirect_selection
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.resource_type
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.show
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.store_failures
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def build(ctx, **kwargs):
|
||||||
|
"""Run all seeds, models, snapshots, and tests in DAG order"""
|
||||||
|
task = BuildTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt clean
|
||||||
|
@cli.command("clean")
|
||||||
|
@click.pass_context
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.unset_profile
|
||||||
|
@requires.project
|
||||||
|
def clean(ctx, **kwargs):
|
||||||
|
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||||
|
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt docs
|
||||||
|
@cli.group()
|
||||||
|
@click.pass_context
|
||||||
|
def docs(ctx, **kwargs):
|
||||||
|
"""Generate or serve the documentation website for your project"""
|
||||||
|
|
||||||
|
|
||||||
|
# dbt docs generate
|
||||||
|
@docs.command("generate")
|
||||||
|
@click.pass_context
|
||||||
|
@p.compile_docs
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.exclude
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.empty_catalog
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest(write=False)
|
||||||
|
def docs_generate(ctx, **kwargs):
|
||||||
|
"""Generate the documentation website for your project"""
|
||||||
|
task = GenerateTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt docs serve
|
||||||
|
@docs.command("serve")
|
||||||
|
@click.pass_context
|
||||||
|
@p.browser
|
||||||
|
@p.port
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
def docs_serve(ctx, **kwargs):
|
||||||
|
"""Serve the documentation website for your project"""
|
||||||
|
task = ServeTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt compile
|
||||||
|
@cli.command("compile")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.exclude
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.full_refresh
|
||||||
|
@p.show_output_format
|
||||||
|
@p.indirect_selection
|
||||||
|
@p.introspect
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.inline
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def compile(ctx, **kwargs):
|
||||||
|
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the
|
||||||
|
target/ directory."""
|
||||||
|
task = CompileTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt show
|
||||||
|
@cli.command("show")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.exclude
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.full_refresh
|
||||||
|
@p.show_output_format
|
||||||
|
@p.show_limit
|
||||||
|
@p.indirect_selection
|
||||||
|
@p.introspect
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.inline
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def show(ctx, **kwargs):
|
||||||
|
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
|
||||||
|
results. Does not materialize anything to the warehouse."""
|
||||||
|
task = ShowTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt debug
|
||||||
|
@cli.command("debug")
|
||||||
|
@click.pass_context
|
||||||
|
@p.debug_connection
|
||||||
|
@p.config_dir
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir_exists_false
|
||||||
|
@p.project_dir
|
||||||
|
@p.target
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
def debug(ctx, **kwargs):
|
||||||
|
"""Show information on the current dbt environment and check dependencies, then test the database connection. Not to be confused with the --debug option which increases verbosity."""
|
||||||
|
|
||||||
|
task = DebugTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt deps
|
||||||
|
@cli.command("deps")
|
||||||
|
@click.pass_context
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir_exists_false
|
||||||
|
@p.project_dir
|
||||||
|
@p.target
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.unset_profile
|
||||||
|
@requires.project
|
||||||
|
def deps(ctx, **kwargs):
|
||||||
|
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||||
|
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt init
|
||||||
|
@cli.command("init")
|
||||||
|
@click.pass_context
|
||||||
|
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||||
|
@click.argument("project_name", required=False)
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir_exists_false
|
||||||
|
@p.project_dir
|
||||||
|
@p.skip_profile_setup
|
||||||
|
@p.target
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
def init(ctx, **kwargs):
|
||||||
|
"""Initialize a new dbt project."""
|
||||||
|
task = InitTask(ctx.obj["flags"], None)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt list
|
||||||
|
@cli.command("list")
|
||||||
|
@click.pass_context
|
||||||
|
@p.exclude
|
||||||
|
@p.indirect_selection
|
||||||
|
@p.models
|
||||||
|
@p.output
|
||||||
|
@p.output_keys
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.resource_type
|
||||||
|
@p.raw_select
|
||||||
|
@p.selector
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def list(ctx, **kwargs):
|
||||||
|
"""List the resources in your project"""
|
||||||
|
task = ListTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# Alias "list" to "ls"
|
||||||
|
ls = copy(cli.commands["list"])
|
||||||
|
ls.hidden = True
|
||||||
|
cli.add_command(ls, "ls")
|
||||||
|
|
||||||
|
|
||||||
|
# dbt parse
|
||||||
|
@cli.command("parse")
|
||||||
|
@click.pass_context
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest(write_perf_info=True)
|
||||||
|
def parse(ctx, **kwargs):
|
||||||
|
"""Parses the project and provides information on performance"""
|
||||||
|
# manifest generation and writing happens in @requires.manifest
|
||||||
|
|
||||||
|
return ctx.obj["manifest"], True
|
||||||
|
|
||||||
|
|
||||||
|
# dbt run
|
||||||
|
@cli.command("run")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.exclude
|
||||||
|
@p.fail_fast
|
||||||
|
@p.full_refresh
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def run(ctx, **kwargs):
|
||||||
|
"""Compile SQL and execute against the current target database."""
|
||||||
|
task = RunTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt retry
|
||||||
|
@cli.command("retry")
|
||||||
|
@click.pass_context
|
||||||
|
@p.project_dir
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.vars
|
||||||
|
@p.profile
|
||||||
|
@p.target
|
||||||
|
@p.state
|
||||||
|
@p.threads
|
||||||
|
@p.fail_fast
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def retry(ctx, **kwargs):
|
||||||
|
"""Retry the nodes that failed in the previous run."""
|
||||||
|
task = RetryTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt clone
|
||||||
|
@cli.command("clone")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer_state
|
||||||
|
@p.exclude
|
||||||
|
@p.full_refresh
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.resource_type
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.state # required
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
@requires.postflight
|
||||||
|
def clone(ctx, **kwargs):
|
||||||
|
"""Create clones of selected nodes based on their location in the manifest provided to --state."""
|
||||||
|
task = CloneTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt run operation
|
||||||
|
@cli.command("run-operation")
|
||||||
|
@click.pass_context
|
||||||
|
@click.argument("macro")
|
||||||
|
@p.args
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def run_operation(ctx, **kwargs):
|
||||||
|
"""Run the named macro with any supplied arguments."""
|
||||||
|
task = RunOperationTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt seed
|
||||||
|
@cli.command("seed")
|
||||||
|
@click.pass_context
|
||||||
|
@p.exclude
|
||||||
|
@p.full_refresh
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.show
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def seed(ctx, **kwargs):
|
||||||
|
"""Load data from csv files into your data warehouse."""
|
||||||
|
task = SeedTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt snapshot
|
||||||
|
@cli.command("snapshot")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.exclude
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def snapshot(ctx, **kwargs):
|
||||||
|
"""Execute snapshots defined in your project"""
|
||||||
|
task = SnapshotTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# dbt source
|
||||||
|
@cli.group()
|
||||||
|
@click.pass_context
|
||||||
|
def source(ctx, **kwargs):
|
||||||
|
"""Manage your project's sources"""
|
||||||
|
|
||||||
|
|
||||||
|
# dbt source freshness
|
||||||
|
@source.command("freshness")
|
||||||
|
@click.pass_context
|
||||||
|
@p.exclude
|
||||||
|
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def freshness(ctx, **kwargs):
|
||||||
|
"""check the current freshness of the project's sources"""
|
||||||
|
task = FreshnessTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# Alias "source freshness" to "snapshot-freshness"
|
||||||
|
snapshot_freshness = copy(cli.commands["source"].commands["freshness"]) # type: ignore
|
||||||
|
snapshot_freshness.hidden = True
|
||||||
|
cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
# dbt test
|
||||||
|
@cli.command("test")
|
||||||
|
@click.pass_context
|
||||||
|
@p.defer
|
||||||
|
@p.deprecated_defer
|
||||||
|
@p.exclude
|
||||||
|
@p.fail_fast
|
||||||
|
@p.favor_state
|
||||||
|
@p.deprecated_favor_state
|
||||||
|
@p.indirect_selection
|
||||||
|
@p.profile
|
||||||
|
@p.profiles_dir
|
||||||
|
@p.project_dir
|
||||||
|
@p.select
|
||||||
|
@p.selector
|
||||||
|
@p.state
|
||||||
|
@p.defer_state
|
||||||
|
@p.deprecated_state
|
||||||
|
@p.store_failures
|
||||||
|
@p.target
|
||||||
|
@p.target_path
|
||||||
|
@p.threads
|
||||||
|
@p.vars
|
||||||
|
@p.version_check
|
||||||
|
@requires.postflight
|
||||||
|
@requires.preflight
|
||||||
|
@requires.profile
|
||||||
|
@requires.project
|
||||||
|
@requires.runtime_config
|
||||||
|
@requires.manifest
|
||||||
|
def test(ctx, **kwargs):
|
||||||
|
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||||
|
task = TestTask(
|
||||||
|
ctx.obj["flags"],
|
||||||
|
ctx.obj["runtime_config"],
|
||||||
|
ctx.obj["manifest"],
|
||||||
|
)
|
||||||
|
|
||||||
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
# Support running as a module
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli()
|
||||||
62
core/dbt/cli/option_types.py
Normal file
62
core/dbt/cli/option_types.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
from click import ParamType, Choice
|
||||||
|
|
||||||
|
from dbt.config.utils import parse_cli_yaml_string
|
||||||
|
from dbt.exceptions import ValidationError, DbtValidationError, OptionNotYamlDictError
|
||||||
|
|
||||||
|
from dbt.helper_types import WarnErrorOptions
|
||||||
|
|
||||||
|
|
||||||
|
class YAML(ParamType):
|
||||||
|
"""The Click YAML type. Converts YAML strings into objects."""
|
||||||
|
|
||||||
|
name = "YAML"
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
# assume non-string values are a problem
|
||||||
|
if not isinstance(value, str):
|
||||||
|
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
|
||||||
|
try:
|
||||||
|
param_option_name = param.opts[0] if param.opts else param.name
|
||||||
|
return parse_cli_yaml_string(value, param_option_name.strip("-"))
|
||||||
|
except (ValidationError, DbtValidationError, OptionNotYamlDictError):
|
||||||
|
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||||
|
|
||||||
|
|
||||||
|
class WarnErrorOptionsType(YAML):
|
||||||
|
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
|
||||||
|
|
||||||
|
name = "WarnErrorOptionsType"
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
# this function is being used by param in click
|
||||||
|
include_exclude = super().convert(value, param, ctx)
|
||||||
|
|
||||||
|
return WarnErrorOptions(
|
||||||
|
include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Truthy(ParamType):
|
||||||
|
"""The Click Truthy type. Converts strings into a "truthy" type"""
|
||||||
|
|
||||||
|
name = "TRUTHY"
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
# assume non-string / non-None values are a problem
|
||||||
|
if not isinstance(value, (str, None)):
|
||||||
|
self.fail(f"Cannot load TRUTHY from type {type(value)}", param, ctx)
|
||||||
|
|
||||||
|
if value is None or value.lower() in ("0", "false", "f"):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class ChoiceTuple(Choice):
|
||||||
|
name = "CHOICE_TUPLE"
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
for value_item in value:
|
||||||
|
super().convert(value_item, param, ctx)
|
||||||
|
|
||||||
|
return value
|
||||||
75
core/dbt/cli/options.py
Normal file
75
core/dbt/cli/options.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import click
|
||||||
|
import inspect
|
||||||
|
import typing as t
|
||||||
|
from click import Context
|
||||||
|
from dbt.cli.option_types import ChoiceTuple
|
||||||
|
|
||||||
|
|
||||||
|
# Implementation from: https://stackoverflow.com/a/48394004
|
||||||
|
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||||
|
class MultiOption(click.Option):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||||
|
nargs = kwargs.pop("nargs", -1)
|
||||||
|
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||||
|
super(MultiOption, self).__init__(*args, **kwargs)
|
||||||
|
self._previous_parser_process = None
|
||||||
|
self._eat_all_parser = None
|
||||||
|
|
||||||
|
# validate that multiple=True
|
||||||
|
multiple = kwargs.pop("multiple", None)
|
||||||
|
msg = f"MultiOption named `{self.name}` must have multiple=True (rather than {multiple})"
|
||||||
|
assert multiple, msg
|
||||||
|
|
||||||
|
# validate that type=tuple or type=ChoiceTuple
|
||||||
|
option_type = kwargs.pop("type", None)
|
||||||
|
msg = f"MultiOption named `{self.name}` must be tuple or ChoiceTuple (rather than {option_type})"
|
||||||
|
if inspect.isclass(option_type):
|
||||||
|
assert issubclass(option_type, tuple), msg
|
||||||
|
else:
|
||||||
|
assert isinstance(option_type, ChoiceTuple), msg
|
||||||
|
|
||||||
|
def add_to_parser(self, parser, ctx):
|
||||||
|
def parser_process(value, state):
|
||||||
|
# method to hook to the parser.process
|
||||||
|
done = False
|
||||||
|
value = [value]
|
||||||
|
if self.save_other_options:
|
||||||
|
# grab everything up to the next option
|
||||||
|
while state.rargs and not done:
|
||||||
|
for prefix in self._eat_all_parser.prefixes:
|
||||||
|
if state.rargs[0].startswith(prefix):
|
||||||
|
done = True
|
||||||
|
if not done:
|
||||||
|
value.append(state.rargs.pop(0))
|
||||||
|
else:
|
||||||
|
# grab everything remaining
|
||||||
|
value += state.rargs
|
||||||
|
state.rargs[:] = []
|
||||||
|
value = tuple(value)
|
||||||
|
# call the actual process
|
||||||
|
self._previous_parser_process(value, state)
|
||||||
|
|
||||||
|
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||||
|
for name in self.opts:
|
||||||
|
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||||
|
if our_parser:
|
||||||
|
self._eat_all_parser = our_parser
|
||||||
|
self._previous_parser_process = our_parser.process
|
||||||
|
our_parser.process = parser_process
|
||||||
|
break
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
|
||||||
|
def flatten(data):
|
||||||
|
if isinstance(data, tuple):
|
||||||
|
for x in data:
|
||||||
|
yield from flatten(x)
|
||||||
|
else:
|
||||||
|
yield data
|
||||||
|
|
||||||
|
# there will be nested tuples to flatten when multiple=True
|
||||||
|
value = super(MultiOption, self).type_cast_value(ctx, value)
|
||||||
|
if value:
|
||||||
|
value = tuple(flatten(value))
|
||||||
|
return value
|
||||||
592
core/dbt/cli/params.py
Normal file
592
core/dbt/cli/params.py
Normal file
@@ -0,0 +1,592 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
from dbt.cli.options import MultiOption
|
||||||
|
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||||
|
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||||
|
from dbt.version import get_version_information
|
||||||
|
|
||||||
|
args = click.option(
|
||||||
|
"--args",
|
||||||
|
envvar=None,
|
||||||
|
help="Supply arguments to the macro. This dictionary will be mapped to the keyword arguments defined in the selected macro. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||||
|
type=YAML(),
|
||||||
|
)
|
||||||
|
|
||||||
|
browser = click.option(
|
||||||
|
"--browser/--no-browser",
|
||||||
|
envvar=None,
|
||||||
|
help="Wether or not to open a local web browser after starting the server",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
cache_selected_only = click.option(
|
||||||
|
"--cache-selected-only/--no-cache-selected-only",
|
||||||
|
envvar="DBT_CACHE_SELECTED_ONLY",
|
||||||
|
help="At start of run, populate relational cache only for schemas containing selected nodes, or for all schemas of interest.",
|
||||||
|
)
|
||||||
|
|
||||||
|
introspect = click.option(
|
||||||
|
"--introspect/--no-introspect",
|
||||||
|
envvar="DBT_INTROSPECT",
|
||||||
|
help="Whether to scaffold introspective queries as part of compilation",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
compile_docs = click.option(
|
||||||
|
"--compile/--no-compile",
|
||||||
|
envvar=None,
|
||||||
|
help="Whether or not to run 'dbt compile' as part of docs generation",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
config_dir = click.option(
|
||||||
|
"--config-dir",
|
||||||
|
envvar=None,
|
||||||
|
help="Print a system-specific command to access the directory that the current dbt project is searching for a profiles.yml. Then, exit. This flag renders other debug step flags no-ops.",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
debug = click.option(
|
||||||
|
"--debug/--no-debug",
|
||||||
|
"-d/ ",
|
||||||
|
envvar="DBT_DEBUG",
|
||||||
|
help="Display debug logging during dbt execution. Useful for debugging and making bug reports.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# flag was previously named DEFER_MODE
|
||||||
|
defer = click.option(
|
||||||
|
"--defer/--no-defer",
|
||||||
|
envvar="DBT_DEFER",
|
||||||
|
help="If set, resolve unselected nodes by deferring to the manifest within the --state directory.",
|
||||||
|
)
|
||||||
|
|
||||||
|
deprecated_defer = click.option(
|
||||||
|
"--deprecated-defer",
|
||||||
|
envvar="DBT_DEFER_TO_STATE",
|
||||||
|
help="Internal flag for deprecating old env var.",
|
||||||
|
default=False,
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
enable_legacy_logger = click.option(
|
||||||
|
"--enable-legacy-logger/--no-enable-legacy-logger",
|
||||||
|
envvar="DBT_ENABLE_LEGACY_LOGGER",
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
exclude = click.option(
|
||||||
|
"--exclude",
|
||||||
|
envvar=None,
|
||||||
|
type=tuple,
|
||||||
|
cls=MultiOption,
|
||||||
|
multiple=True,
|
||||||
|
help="Specify the nodes to exclude.",
|
||||||
|
)
|
||||||
|
|
||||||
|
fail_fast = click.option(
|
||||||
|
"--fail-fast/--no-fail-fast",
|
||||||
|
"-x/ ",
|
||||||
|
envvar="DBT_FAIL_FAST",
|
||||||
|
help="Stop execution on first failure.",
|
||||||
|
)
|
||||||
|
|
||||||
|
favor_state = click.option(
|
||||||
|
"--favor-state/--no-favor-state",
|
||||||
|
envvar="DBT_FAVOR_STATE",
|
||||||
|
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
|
||||||
|
)
|
||||||
|
|
||||||
|
deprecated_favor_state = click.option(
|
||||||
|
"--deprecated-favor-state",
|
||||||
|
envvar="DBT_FAVOR_STATE_MODE",
|
||||||
|
help="Internal flag for deprecating old env var.",
|
||||||
|
)
|
||||||
|
|
||||||
|
full_refresh = click.option(
|
||||||
|
"--full-refresh",
|
||||||
|
"-f",
|
||||||
|
envvar="DBT_FULL_REFRESH",
|
||||||
|
help="If specified, dbt will drop incremental models and fully-recalculate the incremental table from the model definition.",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
indirect_selection = click.option(
|
||||||
|
"--indirect-selection",
|
||||||
|
envvar="DBT_INDIRECT_SELECTION",
|
||||||
|
help="Choose which tests to select that are adjacent to selected resources. Eager is most inclusive, cautious is most exclusive, and buildable is in between. Empty includes no tests at all.",
|
||||||
|
type=click.Choice(["eager", "cautious", "buildable", "empty"], case_sensitive=False),
|
||||||
|
default="eager",
|
||||||
|
)
|
||||||
|
|
||||||
|
log_cache_events = click.option(
|
||||||
|
"--log-cache-events/--no-log-cache-events",
|
||||||
|
help="Enable verbose logging for relational cache events to help when debugging.",
|
||||||
|
envvar="DBT_LOG_CACHE_EVENTS",
|
||||||
|
)
|
||||||
|
|
||||||
|
log_format = click.option(
|
||||||
|
"--log-format",
|
||||||
|
envvar="DBT_LOG_FORMAT",
|
||||||
|
help="Specify the format of logging to the console and the log file. Use --log-format-file to configure the format for the log file differently than the console.",
|
||||||
|
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||||
|
default="default",
|
||||||
|
)
|
||||||
|
|
||||||
|
log_format_file = click.option(
|
||||||
|
"--log-format-file",
|
||||||
|
envvar="DBT_LOG_FORMAT_FILE",
|
||||||
|
help="Specify the format of logging to the log file by overriding the default value and the general --log-format setting.",
|
||||||
|
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||||
|
default="debug",
|
||||||
|
)
|
||||||
|
|
||||||
|
log_level = click.option(
|
||||||
|
"--log-level",
|
||||||
|
envvar="DBT_LOG_LEVEL",
|
||||||
|
help="Specify the minimum severity of events that are logged to the console and the log file. Use --log-level-file to configure the severity for the log file differently than the console.",
|
||||||
|
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||||
|
default="info",
|
||||||
|
)
|
||||||
|
|
||||||
|
log_level_file = click.option(
|
||||||
|
"--log-level-file",
|
||||||
|
envvar="DBT_LOG_LEVEL_FILE",
|
||||||
|
help="Specify the minimum severity of events that are logged to the log file by overriding the default value and the general --log-level setting.",
|
||||||
|
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||||
|
default="debug",
|
||||||
|
)
|
||||||
|
|
||||||
|
use_colors = click.option(
|
||||||
|
"--use-colors/--no-use-colors",
|
||||||
|
envvar="DBT_USE_COLORS",
|
||||||
|
help="Specify whether log output is colorized in the console and the log file. Use --use-colors-file/--no-use-colors-file to colorize the log file differently than the console.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
use_colors_file = click.option(
|
||||||
|
"--use-colors-file/--no-use-colors-file",
|
||||||
|
envvar="DBT_USE_COLORS_FILE",
|
||||||
|
help="Specify whether log file output is colorized by overriding the default value and the general --use-colors/--no-use-colors setting.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
log_file_max_bytes = click.option(
|
||||||
|
"--log-file-max-bytes",
|
||||||
|
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||||
|
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||||
|
default=10 * 1024 * 1024, # 10mb
|
||||||
|
type=click.INT,
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
log_path = click.option(
|
||||||
|
"--log-path",
|
||||||
|
envvar="DBT_LOG_PATH",
|
||||||
|
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
||||||
|
default=None,
|
||||||
|
type=click.Path(resolve_path=True, path_type=Path),
|
||||||
|
)
|
||||||
|
|
||||||
|
macro_debugging = click.option(
|
||||||
|
"--macro-debugging/--no-macro-debugging",
|
||||||
|
envvar="DBT_MACRO_DEBUGGING",
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# This less standard usage of --output where output_path below is more standard
|
||||||
|
output = click.option(
|
||||||
|
"--output",
|
||||||
|
envvar=None,
|
||||||
|
help="Specify the output format: either JSON or a newline-delimited list of selectors, paths, or names",
|
||||||
|
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
|
||||||
|
default="selector",
|
||||||
|
)
|
||||||
|
|
||||||
|
show_output_format = click.option(
|
||||||
|
"--output",
|
||||||
|
envvar=None,
|
||||||
|
help="Output format for dbt compile and dbt show",
|
||||||
|
type=click.Choice(["json", "text"], case_sensitive=False),
|
||||||
|
default="text",
|
||||||
|
)
|
||||||
|
|
||||||
|
show_limit = click.option(
|
||||||
|
"--limit",
|
||||||
|
envvar=None,
|
||||||
|
help="Limit the number of results returned by dbt show",
|
||||||
|
type=click.INT,
|
||||||
|
default=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
output_keys = click.option(
|
||||||
|
"--output-keys",
|
||||||
|
envvar=None,
|
||||||
|
help=(
|
||||||
|
"Space-delimited listing of node properties to include as custom keys for JSON output "
|
||||||
|
"(e.g. `--output json --output-keys name resource_type description`)"
|
||||||
|
),
|
||||||
|
type=tuple,
|
||||||
|
cls=MultiOption,
|
||||||
|
multiple=True,
|
||||||
|
default=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
output_path = click.option(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
envvar=None,
|
||||||
|
help="Specify the output path for the JSON report. By default, outputs to 'target/sources.json'",
|
||||||
|
type=click.Path(file_okay=True, dir_okay=False, writable=True),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
partial_parse = click.option(
|
||||||
|
"--partial-parse/--no-partial-parse",
|
||||||
|
envvar="DBT_PARTIAL_PARSE",
|
||||||
|
help="Allow for partial parsing by looking for and writing to a pickle file in the target directory. This overrides the user configuration file.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
partial_parse_file_path = click.option(
|
||||||
|
"--partial-parse-file-path",
|
||||||
|
envvar="DBT_PARTIAL_PARSE_FILE_PATH",
|
||||||
|
help="Internal flag for path to partial_parse.manifest file.",
|
||||||
|
default=None,
|
||||||
|
hidden=True,
|
||||||
|
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
populate_cache = click.option(
|
||||||
|
"--populate-cache/--no-populate-cache",
|
||||||
|
envvar="DBT_POPULATE_CACHE",
|
||||||
|
help="At start of run, use `show` or `information_schema` queries to populate a relational cache, which can speed up subsequent materializations.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
port = click.option(
|
||||||
|
"--port",
|
||||||
|
envvar=None,
|
||||||
|
help="Specify the port number for the docs server",
|
||||||
|
default=8080,
|
||||||
|
type=click.INT,
|
||||||
|
)
|
||||||
|
|
||||||
|
print = click.option(
|
||||||
|
"--print/--no-print",
|
||||||
|
envvar="DBT_PRINT",
|
||||||
|
help="Output all {{ print() }} macro calls.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
deprecated_print = click.option(
|
||||||
|
"--deprecated-print/--deprecated-no-print",
|
||||||
|
envvar="DBT_NO_PRINT",
|
||||||
|
help="Internal flag for deprecating old env var.",
|
||||||
|
default=True,
|
||||||
|
hidden=True,
|
||||||
|
callback=lambda ctx, param, value: not value,
|
||||||
|
)
|
||||||
|
|
||||||
|
printer_width = click.option(
|
||||||
|
"--printer-width",
|
||||||
|
envvar="DBT_PRINTER_WIDTH",
|
||||||
|
help="Sets the width of terminal output",
|
||||||
|
type=click.INT,
|
||||||
|
default=80,
|
||||||
|
)
|
||||||
|
|
||||||
|
profile = click.option(
|
||||||
|
"--profile",
|
||||||
|
envvar=None,
|
||||||
|
help="Which profile to load. Overrides setting in dbt_project.yml.",
|
||||||
|
)
|
||||||
|
|
||||||
|
profiles_dir = click.option(
|
||||||
|
"--profiles-dir",
|
||||||
|
envvar="DBT_PROFILES_DIR",
|
||||||
|
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||||
|
default=default_profiles_dir,
|
||||||
|
type=click.Path(exists=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# `dbt debug` uses this because it implements custom behaviour for non-existent profiles.yml directories
|
||||||
|
# `dbt deps` does not load a profile at all
|
||||||
|
# `dbt init` will write profiles.yml if it doesn't yet exist
|
||||||
|
profiles_dir_exists_false = click.option(
|
||||||
|
"--profiles-dir",
|
||||||
|
envvar="DBT_PROFILES_DIR",
|
||||||
|
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||||
|
default=default_profiles_dir,
|
||||||
|
type=click.Path(exists=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
project_dir = click.option(
|
||||||
|
"--project-dir",
|
||||||
|
envvar="DBT_PROJECT_DIR",
|
||||||
|
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
||||||
|
default=default_project_dir,
|
||||||
|
type=click.Path(exists=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
quiet = click.option(
|
||||||
|
"--quiet/--no-quiet",
|
||||||
|
"-q",
|
||||||
|
envvar="DBT_QUIET",
|
||||||
|
help="Suppress all non-error logging to stdout. Does not affect {{ print() }} macro calls.",
|
||||||
|
)
|
||||||
|
|
||||||
|
record_timing_info = click.option(
|
||||||
|
"--record-timing-info",
|
||||||
|
"-r",
|
||||||
|
envvar=None,
|
||||||
|
help="When this option is passed, dbt will output low-level timing stats to the specified file. Example: `--record-timing-info output.profile`",
|
||||||
|
type=click.Path(exists=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
resource_type = click.option(
|
||||||
|
"--resource-types",
|
||||||
|
"--resource-type",
|
||||||
|
envvar=None,
|
||||||
|
help="Restricts the types of resources that dbt will include",
|
||||||
|
type=ChoiceTuple(
|
||||||
|
[
|
||||||
|
"metric",
|
||||||
|
"source",
|
||||||
|
"analysis",
|
||||||
|
"model",
|
||||||
|
"test",
|
||||||
|
"exposure",
|
||||||
|
"snapshot",
|
||||||
|
"seed",
|
||||||
|
"default",
|
||||||
|
"all",
|
||||||
|
],
|
||||||
|
case_sensitive=False,
|
||||||
|
),
|
||||||
|
cls=MultiOption,
|
||||||
|
multiple=True,
|
||||||
|
default=(),
|
||||||
|
)
|
||||||
|
|
||||||
|
model_decls = ("-m", "--models", "--model")
|
||||||
|
select_decls = ("-s", "--select")
|
||||||
|
select_attrs = {
|
||||||
|
"envvar": None,
|
||||||
|
"help": "Specify the nodes to include.",
|
||||||
|
"cls": MultiOption,
|
||||||
|
"multiple": True,
|
||||||
|
"type": tuple,
|
||||||
|
}
|
||||||
|
|
||||||
|
inline = click.option(
|
||||||
|
"--inline",
|
||||||
|
envvar=None,
|
||||||
|
help="Pass SQL inline to dbt compile and show",
|
||||||
|
)
|
||||||
|
|
||||||
|
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||||
|
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||||
|
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||||
|
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||||
|
models = click.option(*model_decls, **select_attrs)
|
||||||
|
raw_select = click.option(*select_decls, **select_attrs)
|
||||||
|
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||||
|
|
||||||
|
selector = click.option(
|
||||||
|
"--selector",
|
||||||
|
envvar=None,
|
||||||
|
help="The selector name to use, as defined in selectors.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
send_anonymous_usage_stats = click.option(
|
||||||
|
"--send-anonymous-usage-stats/--no-send-anonymous-usage-stats",
|
||||||
|
envvar="DBT_SEND_ANONYMOUS_USAGE_STATS",
|
||||||
|
help="Send anonymous usage stats to dbt Labs.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
show = click.option(
|
||||||
|
"--show",
|
||||||
|
envvar=None,
|
||||||
|
help="Show a sample of the loaded data in the terminal",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: The env var is a correction!
|
||||||
|
# The original env var was `DBT_TEST_SINGLE_THREADED`.
|
||||||
|
# This broke the existing naming convention.
|
||||||
|
# This will need to be communicated as a change to the community!
|
||||||
|
#
|
||||||
|
# N.B. This flag is only used for testing, hence it's hidden from help text.
|
||||||
|
single_threaded = click.option(
|
||||||
|
"--single-threaded/--no-single-threaded",
|
||||||
|
envvar="DBT_SINGLE_THREADED",
|
||||||
|
default=False,
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
skip_profile_setup = click.option(
|
||||||
|
"--skip-profile-setup",
|
||||||
|
"-s",
|
||||||
|
envvar=None,
|
||||||
|
help="Skip interactive profile setup.",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
empty_catalog = click.option(
|
||||||
|
"--empty-catalog",
|
||||||
|
help="If specified, generate empty catalog.json file during the `dbt docs generate` command.",
|
||||||
|
default=False,
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
state = click.option(
|
||||||
|
"--state",
|
||||||
|
envvar="DBT_STATE",
|
||||||
|
help="Unless overridden, use this state directory for both state comparison and deferral.",
|
||||||
|
type=click.Path(
|
||||||
|
dir_okay=True,
|
||||||
|
file_okay=False,
|
||||||
|
readable=True,
|
||||||
|
resolve_path=False,
|
||||||
|
path_type=Path,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
defer_state = click.option(
|
||||||
|
"--defer-state",
|
||||||
|
envvar="DBT_DEFER_STATE",
|
||||||
|
help="Override the state directory for deferral only.",
|
||||||
|
type=click.Path(
|
||||||
|
dir_okay=True,
|
||||||
|
file_okay=False,
|
||||||
|
readable=True,
|
||||||
|
resolve_path=False,
|
||||||
|
path_type=Path,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
deprecated_state = click.option(
|
||||||
|
"--deprecated-state",
|
||||||
|
envvar="DBT_ARTIFACT_STATE_PATH",
|
||||||
|
help="Internal flag for deprecating old env var.",
|
||||||
|
hidden=True,
|
||||||
|
type=click.Path(
|
||||||
|
dir_okay=True,
|
||||||
|
file_okay=False,
|
||||||
|
readable=True,
|
||||||
|
resolve_path=True,
|
||||||
|
path_type=Path,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
static_parser = click.option(
|
||||||
|
"--static-parser/--no-static-parser",
|
||||||
|
envvar="DBT_STATIC_PARSER",
|
||||||
|
help="Use the static parser.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
store_failures = click.option(
|
||||||
|
"--store-failures",
|
||||||
|
envvar="DBT_STORE_FAILURES",
|
||||||
|
help="Store test results (failing rows) in the database",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
target = click.option(
|
||||||
|
"--target",
|
||||||
|
"-t",
|
||||||
|
envvar=None,
|
||||||
|
help="Which target to load for the given profile",
|
||||||
|
)
|
||||||
|
|
||||||
|
target_path = click.option(
|
||||||
|
"--target-path",
|
||||||
|
envvar="DBT_TARGET_PATH",
|
||||||
|
help="Configure the 'target-path'. Only applies this setting for the current run. Overrides the 'DBT_TARGET_PATH' if it is set.",
|
||||||
|
type=click.Path(),
|
||||||
|
)
|
||||||
|
|
||||||
|
debug_connection = click.option(
|
||||||
|
"--connection",
|
||||||
|
envvar=None,
|
||||||
|
help="Test the connection to the target database independent of dependency checks.",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
threads = click.option(
|
||||||
|
"--threads",
|
||||||
|
envvar=None,
|
||||||
|
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
||||||
|
default=None,
|
||||||
|
type=click.INT,
|
||||||
|
)
|
||||||
|
|
||||||
|
use_experimental_parser = click.option(
|
||||||
|
"--use-experimental-parser/--no-use-experimental-parser",
|
||||||
|
envvar="DBT_USE_EXPERIMENTAL_PARSER",
|
||||||
|
help="Enable experimental parsing features.",
|
||||||
|
)
|
||||||
|
|
||||||
|
vars = click.option(
|
||||||
|
"--vars",
|
||||||
|
envvar=None,
|
||||||
|
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||||
|
type=YAML(),
|
||||||
|
default="{}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: when legacy flags are deprecated use
|
||||||
|
# click.version_option instead of a callback
|
||||||
|
def _version_callback(ctx, _param, value):
|
||||||
|
if not value or ctx.resilient_parsing:
|
||||||
|
return
|
||||||
|
click.echo(get_version_information())
|
||||||
|
ctx.exit()
|
||||||
|
|
||||||
|
|
||||||
|
version = click.option(
|
||||||
|
"--version",
|
||||||
|
"-V",
|
||||||
|
"-v",
|
||||||
|
callback=_version_callback,
|
||||||
|
envvar=None,
|
||||||
|
expose_value=False,
|
||||||
|
help="Show version information and exit",
|
||||||
|
is_eager=True,
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
version_check = click.option(
|
||||||
|
"--version-check/--no-version-check",
|
||||||
|
envvar="DBT_VERSION_CHECK",
|
||||||
|
help="If set, ensure the installed dbt version matches the require-dbt-version specified in the dbt_project.yml file (if any). Otherwise, allow them to differ.",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
warn_error = click.option(
|
||||||
|
"--warn-error",
|
||||||
|
envvar="DBT_WARN_ERROR",
|
||||||
|
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||||
|
default=None,
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
warn_error_options = click.option(
|
||||||
|
"--warn-error-options",
|
||||||
|
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||||
|
default="{}",
|
||||||
|
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||||
|
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||||
|
type=WarnErrorOptionsType(),
|
||||||
|
)
|
||||||
|
|
||||||
|
write_json = click.option(
|
||||||
|
"--write-json/--no-write-json",
|
||||||
|
envvar="DBT_WRITE_JSON",
|
||||||
|
help="Whether or not to write the manifest.json and run_results.json files to the target directory",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
267
core/dbt/cli/requires.py
Normal file
267
core/dbt/cli/requires.py
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
import dbt.tracking
|
||||||
|
from dbt.version import installed as installed_version
|
||||||
|
from dbt.adapters.factory import adapter_management, register_adapter
|
||||||
|
from dbt.flags import set_flags, get_flag_dict
|
||||||
|
from dbt.cli.exceptions import (
|
||||||
|
ExceptionExit,
|
||||||
|
ResultExit,
|
||||||
|
)
|
||||||
|
from dbt.cli.flags import Flags
|
||||||
|
from dbt.config import RuntimeConfig
|
||||||
|
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||||
|
from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger
|
||||||
|
from dbt.events.types import (
|
||||||
|
CommandCompleted,
|
||||||
|
MainReportVersion,
|
||||||
|
MainReportArgs,
|
||||||
|
MainTrackingUserState,
|
||||||
|
)
|
||||||
|
from dbt.events.helpers import get_json_string_utcnow
|
||||||
|
from dbt.events.types import MainEncounteredError, MainStackTrace
|
||||||
|
from dbt.exceptions import Exception as DbtException, DbtProjectError, FailFastError
|
||||||
|
from dbt.parser.manifest import ManifestLoader, write_manifest
|
||||||
|
from dbt.profiler import profiler
|
||||||
|
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||||
|
from dbt.utils import cast_dict_to_dict_of_strings
|
||||||
|
from dbt.plugins import set_up_plugin_manager, get_plugin_manager
|
||||||
|
|
||||||
|
from click import Context
|
||||||
|
from functools import update_wrapper
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
def preflight(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
ctx.obj = ctx.obj or {}
|
||||||
|
|
||||||
|
# Flags
|
||||||
|
flags = Flags(ctx)
|
||||||
|
ctx.obj["flags"] = flags
|
||||||
|
set_flags(flags)
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
callbacks = ctx.obj.get("callbacks", [])
|
||||||
|
set_invocation_id()
|
||||||
|
setup_event_logger(flags=flags, callbacks=callbacks)
|
||||||
|
|
||||||
|
# Tracking
|
||||||
|
initialize_from_flags(flags.SEND_ANONYMOUS_USAGE_STATS, flags.PROFILES_DIR)
|
||||||
|
ctx.with_resource(track_run(run_command=flags.WHICH))
|
||||||
|
|
||||||
|
# Now that we have our logger, fire away!
|
||||||
|
fire_event(MainReportVersion(version=str(installed_version), log_version=LOG_VERSION))
|
||||||
|
flags_dict_str = cast_dict_to_dict_of_strings(get_flag_dict())
|
||||||
|
fire_event(MainReportArgs(args=flags_dict_str))
|
||||||
|
|
||||||
|
# Deprecation warnings
|
||||||
|
flags.fire_deprecations()
|
||||||
|
|
||||||
|
if active_user is not None: # mypy appeasement, always true
|
||||||
|
fire_event(MainTrackingUserState(user_state=active_user.state()))
|
||||||
|
|
||||||
|
# Profiling
|
||||||
|
if flags.RECORD_TIMING_INFO:
|
||||||
|
ctx.with_resource(profiler(enable=True, outfile=flags.RECORD_TIMING_INFO))
|
||||||
|
|
||||||
|
# Adapter management
|
||||||
|
ctx.with_resource(adapter_management())
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def postflight(func):
|
||||||
|
"""The decorator that handles all exception handling for the click commands.
|
||||||
|
This decorator must be used before any other decorators that may throw an exception."""
|
||||||
|
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
start_func = time.perf_counter()
|
||||||
|
success = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
result, success = func(*args, **kwargs)
|
||||||
|
except FailFastError as e:
|
||||||
|
fire_event(MainEncounteredError(exc=str(e)))
|
||||||
|
raise ResultExit(e.result)
|
||||||
|
except DbtException as e:
|
||||||
|
fire_event(MainEncounteredError(exc=str(e)))
|
||||||
|
raise ExceptionExit(e)
|
||||||
|
except BaseException as e:
|
||||||
|
fire_event(MainEncounteredError(exc=str(e)))
|
||||||
|
fire_event(MainStackTrace(stack_trace=traceback.format_exc()))
|
||||||
|
raise ExceptionExit(e)
|
||||||
|
finally:
|
||||||
|
fire_event(
|
||||||
|
CommandCompleted(
|
||||||
|
command=ctx.command_path,
|
||||||
|
success=success,
|
||||||
|
completed_at=get_json_string_utcnow(),
|
||||||
|
elapsed=time.perf_counter() - start_func,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
raise ResultExit(result)
|
||||||
|
|
||||||
|
return (result, success)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: UnsetProfile is necessary for deps and clean to load a project.
|
||||||
|
# This decorator and its usage can be removed once https://github.com/dbt-labs/dbt-core/issues/6257 is closed.
|
||||||
|
def unset_profile(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
profile = UnsetProfile()
|
||||||
|
ctx.obj["profile"] = profile
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def profile(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
flags = ctx.obj["flags"]
|
||||||
|
# TODO: Generalize safe access to flags.THREADS:
|
||||||
|
# https://github.com/dbt-labs/dbt-core/issues/6259
|
||||||
|
threads = getattr(flags, "THREADS", None)
|
||||||
|
profile = load_profile(flags.PROJECT_DIR, flags.VARS, flags.PROFILE, flags.TARGET, threads)
|
||||||
|
ctx.obj["profile"] = profile
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def project(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
# TODO: Decouple target from profile, and remove the need for profile here:
|
||||||
|
# https://github.com/dbt-labs/dbt-core/issues/6257
|
||||||
|
if not ctx.obj.get("profile"):
|
||||||
|
raise DbtProjectError("profile required for project")
|
||||||
|
|
||||||
|
flags = ctx.obj["flags"]
|
||||||
|
project = load_project(
|
||||||
|
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
||||||
|
)
|
||||||
|
ctx.obj["project"] = project
|
||||||
|
|
||||||
|
# Plugins
|
||||||
|
set_up_plugin_manager(project_name=project.project_name)
|
||||||
|
|
||||||
|
if dbt.tracking.active_user is not None:
|
||||||
|
project_id = None if project is None else project.hashed_name()
|
||||||
|
|
||||||
|
dbt.tracking.track_project_id({"project_id": project_id})
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def runtime_config(func):
|
||||||
|
"""A decorator used by click command functions for generating a runtime
|
||||||
|
config given a profile and project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
req_strs = ["profile", "project"]
|
||||||
|
reqs = [ctx.obj.get(req_str) for req_str in req_strs]
|
||||||
|
|
||||||
|
if None in reqs:
|
||||||
|
raise DbtProjectError("profile and project required for runtime_config")
|
||||||
|
|
||||||
|
config = RuntimeConfig.from_parts(
|
||||||
|
ctx.obj["project"],
|
||||||
|
ctx.obj["profile"],
|
||||||
|
ctx.obj["flags"],
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx.obj["runtime_config"] = config
|
||||||
|
|
||||||
|
if dbt.tracking.active_user is not None:
|
||||||
|
adapter_type = (
|
||||||
|
getattr(config.credentials, "type", None)
|
||||||
|
if hasattr(config, "credentials")
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
adapter_unique_id = (
|
||||||
|
config.credentials.hashed_unique_field()
|
||||||
|
if hasattr(config, "credentials")
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
dbt.tracking.track_adapter_info(
|
||||||
|
{
|
||||||
|
"adapter_type": adapter_type,
|
||||||
|
"adapter_unique_id": adapter_unique_id,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
|
||||||
|
def manifest(*args0, write=True, write_perf_info=False):
|
||||||
|
"""A decorator used by click command functions for generating a manifest
|
||||||
|
given a profile, project, and runtime config. This also registers the adapter
|
||||||
|
from the runtime config and conditionally writes the manifest to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def outer_wrapper(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
ctx = args[0]
|
||||||
|
assert isinstance(ctx, Context)
|
||||||
|
|
||||||
|
req_strs = ["profile", "project", "runtime_config"]
|
||||||
|
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
||||||
|
|
||||||
|
if None in reqs:
|
||||||
|
raise DbtProjectError("profile, project, and runtime_config required for manifest")
|
||||||
|
|
||||||
|
runtime_config = ctx.obj["runtime_config"]
|
||||||
|
register_adapter(runtime_config)
|
||||||
|
|
||||||
|
# a manifest has already been set on the context, so don't overwrite it
|
||||||
|
if ctx.obj.get("manifest") is None:
|
||||||
|
manifest = ManifestLoader.get_full_manifest(
|
||||||
|
runtime_config,
|
||||||
|
write_perf_info=write_perf_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx.obj["manifest"] = manifest
|
||||||
|
if write and ctx.obj["flags"].write_json:
|
||||||
|
write_manifest(manifest, runtime_config.project_target_path)
|
||||||
|
pm = get_plugin_manager(runtime_config.project_name)
|
||||||
|
plugin_artifacts = pm.get_manifest_artifacts(manifest)
|
||||||
|
for path, plugin_artifact in plugin_artifacts.items():
|
||||||
|
plugin_artifact.write(path)
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return update_wrapper(wrapper, func)
|
||||||
|
|
||||||
|
# if there are no args, the decorator was used without params @decorator
|
||||||
|
# otherwise, the decorator was called with params @decorator(arg)
|
||||||
|
if len(args0) == 0:
|
||||||
|
return outer_wrapper
|
||||||
|
return outer_wrapper(args0[0])
|
||||||
31
core/dbt/cli/resolvers.py
Normal file
31
core/dbt/cli/resolvers.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from dbt.config.project import PartialProject
|
||||||
|
from dbt.exceptions import DbtProjectError
|
||||||
|
|
||||||
|
|
||||||
|
def default_project_dir() -> Path:
|
||||||
|
paths = list(Path.cwd().parents)
|
||||||
|
paths.insert(0, Path.cwd())
|
||||||
|
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
|
||||||
|
|
||||||
|
|
||||||
|
def default_profiles_dir() -> Path:
|
||||||
|
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||||
|
|
||||||
|
|
||||||
|
def default_log_path(project_dir: Path, verify_version: bool = False) -> Path:
|
||||||
|
"""If available, derive a default log path from dbt_project.yml. Otherwise, default to "logs".
|
||||||
|
Known limitations:
|
||||||
|
1. Using PartialProject here, so no jinja rendering of log-path.
|
||||||
|
2. Programmatic invocations of the cli via dbtRunner may pass a Project object directly,
|
||||||
|
which is not being taken into consideration here to extract a log-path.
|
||||||
|
"""
|
||||||
|
default_log_path = Path("logs")
|
||||||
|
try:
|
||||||
|
partial = PartialProject.from_project_root(str(project_dir), verify_version=verify_version)
|
||||||
|
partial_log_path = partial.project_dict.get("log-path") or default_log_path
|
||||||
|
default_log_path = Path(project_dir) / partial_log_path
|
||||||
|
except DbtProjectError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return default_log_path
|
||||||
40
core/dbt/cli/types.py
Normal file
40
core/dbt/cli/types.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from dbt.exceptions import DbtInternalError
|
||||||
|
|
||||||
|
|
||||||
|
class Command(Enum):
|
||||||
|
BUILD = "build"
|
||||||
|
CLEAN = "clean"
|
||||||
|
COMPILE = "compile"
|
||||||
|
CLONE = "clone"
|
||||||
|
DOCS_GENERATE = "generate"
|
||||||
|
DOCS_SERVE = "serve"
|
||||||
|
DEBUG = "debug"
|
||||||
|
DEPS = "deps"
|
||||||
|
INIT = "init"
|
||||||
|
LIST = "list"
|
||||||
|
PARSE = "parse"
|
||||||
|
RUN = "run"
|
||||||
|
RUN_OPERATION = "run-operation"
|
||||||
|
SEED = "seed"
|
||||||
|
SHOW = "show"
|
||||||
|
SNAPSHOT = "snapshot"
|
||||||
|
SOURCE_FRESHNESS = "freshness"
|
||||||
|
TEST = "test"
|
||||||
|
RETRY = "retry"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, s: str) -> "Command":
|
||||||
|
try:
|
||||||
|
return cls(s)
|
||||||
|
except ValueError:
|
||||||
|
raise DbtInternalError(f"No value '{s}' exists in Command enum")
|
||||||
|
|
||||||
|
def to_list(self) -> List[str]:
|
||||||
|
return {
|
||||||
|
Command.DOCS_GENERATE: ["docs", "generate"],
|
||||||
|
Command.DOCS_SERVE: ["docs", "serve"],
|
||||||
|
Command.SOURCE_FRESHNESS: ["source", "freshness"],
|
||||||
|
}.get(self, [self.value])
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user