mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-20 11:11:27 +00:00
Compare commits
988 Commits
testing-pr
...
test-docs-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
734b6429c7 | ||
|
|
a75b2c0a90 | ||
|
|
6b6ae22434 | ||
|
|
287f443ec9 | ||
|
|
aea2c4a29b | ||
|
|
21ffe31270 | ||
|
|
70c9074625 | ||
|
|
2048a1af6f | ||
|
|
71223dc253 | ||
|
|
e03d35a9fc | ||
|
|
f988f76fcc | ||
|
|
0cacfd0f88 | ||
|
|
c25260e5dd | ||
|
|
c521fa6b74 | ||
|
|
f304b4b2da | ||
|
|
064d890172 | ||
|
|
febbd978b5 | ||
|
|
0d7e87fac6 | ||
|
|
3500528506 | ||
|
|
c42221fcf3 | ||
|
|
f49f28c331 | ||
|
|
dc964c43d9 | ||
|
|
60e491b3c1 | ||
|
|
3bfce2bac9 | ||
|
|
d63ad4cd82 | ||
|
|
d5608dca32 | ||
|
|
e7031f2d74 | ||
|
|
68a2996788 | ||
|
|
f5f0a7f908 | ||
|
|
1cfc0851ca | ||
|
|
9fca33cb29 | ||
|
|
6360247d39 | ||
|
|
d257d0b44c | ||
|
|
f8d347e5f8 | ||
|
|
a02db03f45 | ||
|
|
6e8388c653 | ||
|
|
6572b7e0a5 | ||
|
|
26bb5c3484 | ||
|
|
83f4992073 | ||
|
|
8392023e9f | ||
|
|
309efaa141 | ||
|
|
a5993fc866 | ||
|
|
5b1bc72ae1 | ||
|
|
72b6a80b07 | ||
|
|
f0fbb0e551 | ||
|
|
e48f7ab32e | ||
|
|
16dc2be556 | ||
|
|
eea872c319 | ||
|
|
189c06dbb1 | ||
|
|
74662d1527 | ||
|
|
75f3e8cb74 | ||
|
|
aeee1c23a6 | ||
|
|
e50678c914 | ||
|
|
ae62f5708c | ||
|
|
cda88d1948 | ||
|
|
e7218d3e99 | ||
|
|
2c42fb436c | ||
|
|
a9e1a0e00a | ||
|
|
0d8e061a3d | ||
|
|
7532420eef | ||
|
|
03b17ff401 | ||
|
|
fc1fc2d5e9 | ||
|
|
7e43f36bb1 | ||
|
|
72c17c4464 | ||
|
|
3996a69861 | ||
|
|
aa8115aa5e | ||
|
|
ab0c3510eb | ||
|
|
4480d05cfb | ||
|
|
788694ec5b | ||
|
|
fb5bb7fff3 | ||
|
|
c270a77552 | ||
|
|
a2e040f389 | ||
|
|
a4376b96d8 | ||
|
|
ed5df342ca | ||
|
|
96f063e077 | ||
|
|
ee8f81de6a | ||
|
|
935edc70aa | ||
|
|
28c44a9be7 | ||
|
|
a2b3602485 | ||
|
|
3733817488 | ||
|
|
c5fb6c275a | ||
|
|
f633e9936f | ||
|
|
4e57c51c7a | ||
|
|
6267572ba7 | ||
|
|
32e1924c3b | ||
|
|
55af3c78d7 | ||
|
|
bdff19d909 | ||
|
|
f87c7819fb | ||
|
|
33694f3772 | ||
|
|
ebfc18408b | ||
|
|
6958f4f12e | ||
|
|
1f898c859a | ||
|
|
ce0bcc08a6 | ||
|
|
d1ae9dd37f | ||
|
|
31a3f2bdee | ||
|
|
1390715590 | ||
|
|
d09459c980 | ||
|
|
979e1c74bf | ||
|
|
7d0fccd63f | ||
|
|
37b8b65aad | ||
|
|
0211668361 | ||
|
|
f8c8322bb4 | ||
|
|
14c2bd9959 | ||
|
|
8db6bac1db | ||
|
|
080dd41876 | ||
|
|
8e9702cec5 | ||
|
|
5ff81c244e | ||
|
|
cfe81e81fd | ||
|
|
365414b5fc | ||
|
|
ec46be7368 | ||
|
|
f23a403468 | ||
|
|
15ad34e415 | ||
|
|
bacc891703 | ||
|
|
a2e167761c | ||
|
|
cce8fda06c | ||
|
|
dd4ac1ba4a | ||
|
|
401ebc2768 | ||
|
|
83612a98b7 | ||
|
|
827eae2750 | ||
|
|
3a3bedcd8e | ||
|
|
c1dfb4e6e6 | ||
|
|
5852f17f0b | ||
|
|
a94156703d | ||
|
|
2b3fb7a5d0 | ||
|
|
5f2a43864f | ||
|
|
88fbc94db2 | ||
|
|
6c277b5fe1 | ||
|
|
40e64b238c | ||
|
|
581bf51574 | ||
|
|
899b0ef224 | ||
|
|
3ade206e86 | ||
|
|
58bd750007 | ||
|
|
0ec829a096 | ||
|
|
7f953a6d48 | ||
|
|
0b92f04683 | ||
|
|
3f37a43a8c | ||
|
|
204d53516a | ||
|
|
5071b00baa | ||
|
|
81118d904a | ||
|
|
69cdc4148e | ||
|
|
1c71bf414d | ||
|
|
7cf57ae72d | ||
|
|
1b6f95fef4 | ||
|
|
38940eeeea | ||
|
|
6c950bad7c | ||
|
|
5e681929ae | ||
|
|
ea5a9da71e | ||
|
|
9c5ee59e19 | ||
|
|
55b1d3a191 | ||
|
|
a968aa7725 | ||
|
|
5e0a765917 | ||
|
|
0aeb9976f4 | ||
|
|
30a7da8112 | ||
|
|
f6a9dae422 | ||
|
|
62a7163334 | ||
|
|
e2f0467f5d | ||
|
|
3e3ecb1c3f | ||
|
|
27511d807f | ||
|
|
15077d087c | ||
|
|
5b01cc0c22 | ||
|
|
d1bcff865d | ||
|
|
0fce63665c | ||
|
|
1183e85eb4 | ||
|
|
3b86243f04 | ||
|
|
c251dae75e | ||
|
|
ecfd77f1ca | ||
|
|
9a0abc1bfc | ||
|
|
490d68e076 | ||
|
|
c45147fe6d | ||
|
|
bc3468e649 | ||
|
|
8fff6729a2 | ||
|
|
08f50acb9e | ||
|
|
436a5f5cd4 | ||
|
|
aca710048f | ||
|
|
673ad50e21 | ||
|
|
8ee86a61a0 | ||
|
|
0dda0a90cf | ||
|
|
220d8b888c | ||
|
|
42d5812577 | ||
|
|
dea4f5f8ff | ||
|
|
8f50eee330 | ||
|
|
8fd8dfcf74 | ||
|
|
10b27b9633 | ||
|
|
5808ee6dd7 | ||
|
|
a66fe7f467 | ||
|
|
18fef38702 | ||
|
|
3ad61d5d81 | ||
|
|
bb1f5b43be | ||
|
|
a642b20abc | ||
|
|
c112050455 | ||
|
|
43e3fc22c4 | ||
|
|
41c6177ae2 | ||
|
|
72ecd1ce74 | ||
|
|
2d0b975b6c | ||
|
|
8a0bc39a66 | ||
|
|
f3c7b6bfd1 | ||
|
|
0391e4e53a | ||
|
|
3ad3c21886 | ||
|
|
6e0ed751e1 | ||
|
|
c43c79a995 | ||
|
|
d6cc8b3042 | ||
|
|
2f4a6e33ec | ||
|
|
b9867e89cb | ||
|
|
13b18654f0 | ||
|
|
aafa1c7f47 | ||
|
|
638e3ad299 | ||
|
|
d9cfeb1ea3 | ||
|
|
e6786a2bc3 | ||
|
|
13571435a3 | ||
|
|
efb890db2d | ||
|
|
f3735187a6 | ||
|
|
3032594b26 | ||
|
|
1df7a029b4 | ||
|
|
f467fba151 | ||
|
|
8791313ec5 | ||
|
|
7798f932a0 | ||
|
|
a588607ec6 | ||
|
|
348764d99d | ||
|
|
5aeb088a73 | ||
|
|
e943b9fc84 | ||
|
|
892426eecb | ||
|
|
1d25b2b046 | ||
|
|
da70840be8 | ||
|
|
7632782ecd | ||
|
|
6fae647097 | ||
|
|
fc8b8c11d5 | ||
|
|
26a7922a34 | ||
|
|
c18b4f1f1a | ||
|
|
fa31a67499 | ||
|
|
742cd990ee | ||
|
|
8463af35c3 | ||
|
|
b34a4ab493 | ||
|
|
417ccdc3b4 | ||
|
|
7c46b784ef | ||
|
|
067b861d30 | ||
|
|
9f6ed3cec3 | ||
|
|
43edc887f9 | ||
|
|
6d4c64a436 | ||
|
|
0ed14fa236 | ||
|
|
51f2daf4b0 | ||
|
|
76f7bf9900 | ||
|
|
3fc715f066 | ||
|
|
b6811da84f | ||
|
|
1dffccd9da | ||
|
|
9ed9936c84 | ||
|
|
e75ae8c754 | ||
|
|
b68535b8cb | ||
|
|
5310498647 | ||
|
|
22b1a09aa2 | ||
|
|
6855fe06a7 | ||
|
|
affd8619c2 | ||
|
|
b67d5f396b | ||
|
|
b3039fdc76 | ||
|
|
9bdf5fe74a | ||
|
|
c675c2d318 | ||
|
|
2cd1f7d98e | ||
|
|
ce9ac8ea10 | ||
|
|
b90ab74975 | ||
|
|
6d3c3f1995 | ||
|
|
74fbaa18cd | ||
|
|
fc7c073691 | ||
|
|
29f504e201 | ||
|
|
eeb490ed15 | ||
|
|
c220b1e42c | ||
|
|
d973ae9ec6 | ||
|
|
f461683df5 | ||
|
|
41ed976941 | ||
|
|
e93ad5f118 | ||
|
|
d75ed964f8 | ||
|
|
284ac9b138 | ||
|
|
7448ec5adb | ||
|
|
caa6269bc7 | ||
|
|
31691c3b88 | ||
|
|
3a904a811f | ||
|
|
b927a31a53 | ||
|
|
d8dd75320c | ||
|
|
a613556246 | ||
|
|
8d2351d541 | ||
|
|
f72b603196 | ||
|
|
4eb17b57fb | ||
|
|
85a4b87267 | ||
|
|
0d320c58da | ||
|
|
ed1ff2caac | ||
|
|
d80646c258 | ||
|
|
a9b4316346 | ||
|
|
36776b96e7 | ||
|
|
7f2d3cd24f | ||
|
|
d046ae0606 | ||
|
|
e8c267275e | ||
|
|
a4951749a8 | ||
|
|
e1a2e8d9f5 | ||
|
|
f80c78e3a5 | ||
|
|
c541eca592 | ||
|
|
726aba0586 | ||
|
|
d300addee1 | ||
|
|
d5d16f01f4 | ||
|
|
2cb26e2699 | ||
|
|
b4793b4f9b | ||
|
|
045e70ccf1 | ||
|
|
ba23395c8e | ||
|
|
0aacd99168 | ||
|
|
e4b5d73dc4 | ||
|
|
bd950f687a | ||
|
|
aea23a488b | ||
|
|
22731df07b | ||
|
|
c55be164e6 | ||
|
|
9d73304c1a | ||
|
|
719b2026ab | ||
|
|
22416980d1 | ||
|
|
3d28b6704c | ||
|
|
5d1b104e1f | ||
|
|
4a8a68049d | ||
|
|
4b7fd1d46a | ||
|
|
0722922c03 | ||
|
|
40321d7966 | ||
|
|
434f3d2678 | ||
|
|
6dd9c2c5ba | ||
|
|
5e6be1660e | ||
|
|
31acb95d7a | ||
|
|
683190b711 | ||
|
|
ebb84c404f | ||
|
|
2ca6ce688b | ||
|
|
a40550b89d | ||
|
|
b2aea11cdb | ||
|
|
43b39fd1aa | ||
|
|
5cc8626e96 | ||
|
|
f95e9efbc0 | ||
|
|
25c974af8c | ||
|
|
b5c6f09a9e | ||
|
|
bd3e623240 | ||
|
|
63343653a9 | ||
|
|
d8b97c1077 | ||
|
|
e0b0edaeed | ||
|
|
3cafc9e13f | ||
|
|
13f31aed90 | ||
|
|
d513491046 | ||
|
|
281d2491a5 | ||
|
|
9857e1dd83 | ||
|
|
6b36b18029 | ||
|
|
d8868c5197 | ||
|
|
b141620125 | ||
|
|
51d8440dd4 | ||
|
|
5b2562a919 | ||
|
|
44a9da621e | ||
|
|
69aa6bf964 | ||
|
|
f9ef9da110 | ||
|
|
57ae9180c2 | ||
|
|
efe926d20c | ||
|
|
1081b8e720 | ||
|
|
8205921c4b | ||
|
|
da6c211611 | ||
|
|
354c1e0d4d | ||
|
|
855419d698 | ||
|
|
e94fd61b24 | ||
|
|
4cf9b73c3d | ||
|
|
8442fb66a5 | ||
|
|
f8cefa3eff | ||
|
|
d83e0fb8d8 | ||
|
|
3e9da06365 | ||
|
|
bda70c988e | ||
|
|
229e897070 | ||
|
|
f20e83a32b | ||
|
|
dd84f9a896 | ||
|
|
e6df4266f6 | ||
|
|
b591e1a2b7 | ||
|
|
3dab058c73 | ||
|
|
c7bc6eb812 | ||
|
|
c690ecc1fd | ||
|
|
73e272f06e | ||
|
|
95d087b51b | ||
|
|
40ae6b6bc8 | ||
|
|
fe20534a98 | ||
|
|
dd7af477ac | ||
|
|
178f74b753 | ||
|
|
a14f563ec8 | ||
|
|
ff109e1806 | ||
|
|
5e46694b68 | ||
|
|
73af9a56e5 | ||
|
|
d2aa920275 | ||
|
|
c34f3530c8 | ||
|
|
c019a94206 | ||
|
|
f9bdfa050b | ||
|
|
1b35d1aa21 | ||
|
|
420ef9cc7b | ||
|
|
02fdc2cb9f | ||
|
|
f82745fb0c | ||
|
|
3397bdc6a5 | ||
|
|
96e858ac0b | ||
|
|
f6a98b5674 | ||
|
|
824f0bf2c0 | ||
|
|
5648b1c622 | ||
|
|
bb1382e576 | ||
|
|
085ea9181f | ||
|
|
eace5b77a7 | ||
|
|
1c61bb18e6 | ||
|
|
f79a968a09 | ||
|
|
34c23fe650 | ||
|
|
3ae9475655 | ||
|
|
11436fed45 | ||
|
|
21a7b71657 | ||
|
|
280e9ad9c9 | ||
|
|
97f31c88e1 | ||
|
|
5f483a6b13 | ||
|
|
86f24e13db | ||
|
|
4bda8c8880 | ||
|
|
80a5d27127 | ||
|
|
4307a82058 | ||
|
|
5c01c42308 | ||
|
|
80ba71682b | ||
|
|
26625e9627 | ||
|
|
134e8423b7 | ||
|
|
04a9195297 | ||
|
|
8a10a69f59 | ||
|
|
fd7c95d1d2 | ||
|
|
79aa136301 | ||
|
|
3b5cec6cc6 | ||
|
|
0e9a67956d | ||
|
|
f9f0eab0b7 | ||
|
|
ed01b439cf | ||
|
|
a398ed1a3e | ||
|
|
a818e6551b | ||
|
|
0a7471ebdc | ||
|
|
1a5bc83598 | ||
|
|
6e2df00648 | ||
|
|
b338dfc99a | ||
|
|
47033c459f | ||
|
|
92a0930634 | ||
|
|
cad1a48eb0 | ||
|
|
b451f87e3c | ||
|
|
20756290bc | ||
|
|
f44c6ed136 | ||
|
|
b501f4317c | ||
|
|
91b43f71bb | ||
|
|
6fc64f0d3b | ||
|
|
ee5c697645 | ||
|
|
3caec08ccb | ||
|
|
f7680379fc | ||
|
|
3789acc5a7 | ||
|
|
8ae232abe8 | ||
|
|
332d23c5eb | ||
|
|
5799973474 | ||
|
|
3d816d56ec | ||
|
|
111f3c28f8 | ||
|
|
10aded793c | ||
|
|
b5cc7b8dff | ||
|
|
449f042742 | ||
|
|
66b70e025b | ||
|
|
578c6d6a20 | ||
|
|
64ce9d6aa4 | ||
|
|
213ddedb85 | ||
|
|
c96201c060 | ||
|
|
16b02f4f55 | ||
|
|
e0d2b02d46 | ||
|
|
65e76df6ec | ||
|
|
052a3060d4 | ||
|
|
b65ae1ddde | ||
|
|
a8246ab1f1 | ||
|
|
6854e67464 | ||
|
|
ca7c1fc4ad | ||
|
|
5dbc945f23 | ||
|
|
655ff85dc9 | ||
|
|
6a2ceaa073 | ||
|
|
e8fb29d185 | ||
|
|
8443142f27 | ||
|
|
7ebe21dccb | ||
|
|
c25b7a1143 | ||
|
|
38eb46dfc3 | ||
|
|
fe9ed9ccdd | ||
|
|
ff4e5219b1 | ||
|
|
04632a008f | ||
|
|
6925cebcf6 | ||
|
|
571beb13d9 | ||
|
|
69cd82f483 | ||
|
|
11e379280f | ||
|
|
0018eb7db6 | ||
|
|
154a682180 | ||
|
|
1b79a245e6 | ||
|
|
6b590122c7 | ||
|
|
d5f632e6fd | ||
|
|
2fc8e5e0b6 | ||
|
|
5ab07273ba | ||
|
|
19c9e5bfdf | ||
|
|
60794367a5 | ||
|
|
ea07729bbf | ||
|
|
c4370773f6 | ||
|
|
fda17b456e | ||
|
|
bc3e1a0a71 | ||
|
|
a06988706c | ||
|
|
ce73124bbf | ||
|
|
352c62f3c3 | ||
|
|
81a51d3942 | ||
|
|
64fc3a39a7 | ||
|
|
e5b6f4f293 | ||
|
|
d26e63ed9a | ||
|
|
f4f5d31959 | ||
|
|
e7e12075b9 | ||
|
|
74dda5aa19 | ||
|
|
092e96ce70 | ||
|
|
18102027ba | ||
|
|
f80825d63e | ||
|
|
9316e47b77 | ||
|
|
f99cf1218a | ||
|
|
5871915ce9 | ||
|
|
5ce290043f | ||
|
|
080d27321b | ||
|
|
1d0936bd14 | ||
|
|
706b8ca9df | ||
|
|
7dc491b7ba | ||
|
|
779c789a64 | ||
|
|
409b4ba109 | ||
|
|
59d131d3ac | ||
|
|
6563d09ba7 | ||
|
|
05dea18b62 | ||
|
|
d7177c7d89 | ||
|
|
35f0fea804 | ||
|
|
8953c7c533 | ||
|
|
76c59a5545 | ||
|
|
237048c7ac | ||
|
|
30ff395b7b | ||
|
|
5c0a31b829 | ||
|
|
243bc3d41d | ||
|
|
67b594a950 | ||
|
|
2493c21649 | ||
|
|
d3826e670f | ||
|
|
4b5b1696b7 | ||
|
|
abb59ef14f | ||
|
|
3b7c2816b9 | ||
|
|
484517416f | ||
|
|
39447055d3 | ||
|
|
95cca277c9 | ||
|
|
96083dcaf5 | ||
|
|
75b4cf691b | ||
|
|
7c9171b00b | ||
|
|
3effade266 | ||
|
|
44e7390526 | ||
|
|
c141798abc | ||
|
|
df7ec3fb37 | ||
|
|
90e5507d03 | ||
|
|
332d3494b3 | ||
|
|
6393f5a5d7 | ||
|
|
ce97a9ca7a | ||
|
|
9af071bfe4 | ||
|
|
45a41202f3 | ||
|
|
9768999ca1 | ||
|
|
fc0d11c0a5 | ||
|
|
e6344205bb | ||
|
|
9d7a6556ef | ||
|
|
15f4add0b8 | ||
|
|
464becacd0 | ||
|
|
51a76d0d63 | ||
|
|
052e54d43a | ||
|
|
9e796671dd | ||
|
|
a9a6254f52 | ||
|
|
8b3a09c7ae | ||
|
|
6aa4d812d4 | ||
|
|
07fa719fb0 | ||
|
|
650b34ae24 | ||
|
|
0a935855f3 | ||
|
|
d500aae4dc | ||
|
|
370d3e746d | ||
|
|
ab06149c81 | ||
|
|
e72895c7c9 | ||
|
|
fe4a67daa4 | ||
|
|
09ea989d81 | ||
|
|
7fa14b6948 | ||
|
|
d4974cd35c | ||
|
|
459178811b | ||
|
|
b37f6a010e | ||
|
|
e817164d31 | ||
|
|
09ce43edbf | ||
|
|
2980cd17df | ||
|
|
8c804de643 | ||
|
|
c8241b87e6 | ||
|
|
f204d24ed8 | ||
|
|
d5461ccd8b | ||
|
|
a20d2d93d3 | ||
|
|
57e1eec165 | ||
|
|
d2dbe6afe4 | ||
|
|
72eb163223 | ||
|
|
af16c74c3a | ||
|
|
664f6584b9 | ||
|
|
76fd3bdf8c | ||
|
|
b633adb881 | ||
|
|
b6e534cdd0 | ||
|
|
1dc4adb86f | ||
|
|
0a4d7c4831 | ||
|
|
ad67e55d74 | ||
|
|
2fae64a488 | ||
|
|
1a984601ee | ||
|
|
454168204c | ||
|
|
43642956a2 | ||
|
|
1fe53750fa | ||
|
|
8609c02383 | ||
|
|
355b0c496e | ||
|
|
cd6894acf4 | ||
|
|
b90b3a9c19 | ||
|
|
e7b8488be8 | ||
|
|
06cc0c57e8 | ||
|
|
87072707ed | ||
|
|
ef63319733 | ||
|
|
2068dd5510 | ||
|
|
3e1e171c66 | ||
|
|
5f9ed1a83c | ||
|
|
3d9e54d970 | ||
|
|
52a0fdef6c | ||
|
|
d9b02fb0a0 | ||
|
|
6c8de62b24 | ||
|
|
2d3d1b030a | ||
|
|
88acf0727b | ||
|
|
02839ec779 | ||
|
|
44a8f6a3bf | ||
|
|
751ea92576 | ||
|
|
02007b3619 | ||
|
|
fe0b9e7ef5 | ||
|
|
4b1c6b51f9 | ||
|
|
0b4689f311 | ||
|
|
b77eff8f6f | ||
|
|
2782a33ecf | ||
|
|
94c6cf1b3c | ||
|
|
3c8daacd3e | ||
|
|
2f9907b072 | ||
|
|
287c4d2b03 | ||
|
|
ba9d76b3f9 | ||
|
|
0efaaf7daf | ||
|
|
9ae7d68260 | ||
|
|
486afa9fcd | ||
|
|
1f189f5225 | ||
|
|
580b1fdd68 | ||
|
|
bad0198a36 | ||
|
|
252280b56e | ||
|
|
64bf9c8885 | ||
|
|
935c138736 | ||
|
|
5891b59790 | ||
|
|
4e020c3878 | ||
|
|
3004969a93 | ||
|
|
873e9714f8 | ||
|
|
fe24dd43d4 | ||
|
|
ed91ded2c1 | ||
|
|
757614d57f | ||
|
|
faff8c00b3 | ||
|
|
45fe76eef4 | ||
|
|
80244a09fe | ||
|
|
ea772ae419 | ||
|
|
c68fca7937 | ||
|
|
37e86257f5 | ||
|
|
c182c05c2f | ||
|
|
b02875a12b | ||
|
|
03332b2955 | ||
|
|
f1f99a2371 | ||
|
|
95116dbb5b | ||
|
|
868fd64adf | ||
|
|
2f7ab2d038 | ||
|
|
159e79ee6b | ||
|
|
3d4a82cca2 | ||
|
|
6ba837d73d | ||
|
|
f4775d7673 | ||
|
|
429396aa02 | ||
|
|
8a5e9b71a5 | ||
|
|
fa78102eaf | ||
|
|
5466d474c5 | ||
|
|
80951ae973 | ||
|
|
d5662ef34c | ||
|
|
57783bb5f6 | ||
|
|
d73ee588e5 | ||
|
|
40089d710b | ||
|
|
6ec61950eb | ||
|
|
72c831a80a | ||
|
|
929931a26a | ||
|
|
577e2438c1 | ||
|
|
2679792199 | ||
|
|
2adf982991 | ||
|
|
1fb4a7f428 | ||
|
|
30e72bc5e2 | ||
|
|
35645a7233 | ||
|
|
d583c8d737 | ||
|
|
a83f00c594 | ||
|
|
45bb955b55 | ||
|
|
c448702c1b | ||
|
|
558a6a03ac | ||
|
|
52ec7907d3 | ||
|
|
792f39a888 | ||
|
|
16264f58c1 | ||
|
|
2317c0c3c8 | ||
|
|
3c09ab9736 | ||
|
|
f10dc0e1b3 | ||
|
|
634bc41d8a | ||
|
|
d7ea3648c6 | ||
|
|
e5c8e19ff2 | ||
|
|
4ddba7e44c | ||
|
|
37b31d10c8 | ||
|
|
93cf1f085f | ||
|
|
a84f824a44 | ||
|
|
9c58f3465b | ||
|
|
0e3778132b | ||
|
|
72722635f2 | ||
|
|
a4c7c7fc55 | ||
|
|
2bad73eead | ||
|
|
c8bc25d11a | ||
|
|
4c06689ff5 | ||
|
|
a45c9d0192 | ||
|
|
34e2c4f90b | ||
|
|
c0e2023c81 | ||
|
|
108b55bdc3 | ||
|
|
a29367b7fe | ||
|
|
1d7e8349ed | ||
|
|
67c194dcd1 | ||
|
|
75d3d87d64 | ||
|
|
4ff3f6d4e8 | ||
|
|
d0773f3346 | ||
|
|
ee58d27d94 | ||
|
|
9e3da391a7 | ||
|
|
bd7010678a | ||
|
|
9f716b31b3 | ||
|
|
3dd486d8fa | ||
|
|
33217891ca | ||
|
|
1d37c4e555 | ||
|
|
9f62ec2153 | ||
|
|
372eca76b8 | ||
|
|
e3cb050bbc | ||
|
|
0ae93c7f54 | ||
|
|
1f6386d760 | ||
|
|
66eb3964e2 | ||
|
|
f460d275ba | ||
|
|
fb91bad800 | ||
|
|
eaec22ae53 | ||
|
|
b7c1768cca | ||
|
|
387b26a202 | ||
|
|
8a1e6438f1 | ||
|
|
aaac5ff2e6 | ||
|
|
4dc29630b5 | ||
|
|
f716631439 | ||
|
|
648a780850 | ||
|
|
de0919ff88 | ||
|
|
8b1ea5fb6c | ||
|
|
85627aafcd | ||
|
|
49065158f5 | ||
|
|
bdb3049218 | ||
|
|
e10d1b0f86 | ||
|
|
83b98c8ebf | ||
|
|
b9d5123aa3 | ||
|
|
c09300bfd2 | ||
|
|
fc490cee7b | ||
|
|
3baa3d7fe8 | ||
|
|
764c7c0fdc | ||
|
|
c97ebbbf35 | ||
|
|
85fe32bd08 | ||
|
|
eba3fd2255 | ||
|
|
e2f2c07873 | ||
|
|
70850cd362 | ||
|
|
16992e6391 | ||
|
|
fd0d95140e | ||
|
|
ac65fcd557 | ||
|
|
4d246567b9 | ||
|
|
1ad1c834f3 | ||
|
|
41610b822c | ||
|
|
c794600242 | ||
|
|
9d414f6ec3 | ||
|
|
552e831306 | ||
|
|
c712c96a0b | ||
|
|
eb46bfc3d6 | ||
|
|
f52537b606 | ||
|
|
762419d2fe | ||
|
|
4feb7cb15b | ||
|
|
eb47b85148 | ||
|
|
9faa019a07 | ||
|
|
9589dc91fa | ||
|
|
14507a283e | ||
|
|
af0fe120ec | ||
|
|
16501ec1c6 | ||
|
|
bf867f6aff | ||
|
|
eb4ad4444f | ||
|
|
8fdba17ac6 | ||
|
|
abe8e83945 | ||
|
|
02cbae1f9f | ||
|
|
65908b395f | ||
|
|
4971395d5d | ||
|
|
eeec2038aa | ||
|
|
4fac086556 | ||
|
|
8818061d59 | ||
|
|
b195778eb9 | ||
|
|
de1763618a | ||
|
|
7485066ed4 | ||
|
|
15ce956380 | ||
|
|
e5c63884e2 | ||
|
|
9fef62d83e | ||
|
|
7563b997c2 | ||
|
|
291ff3600b | ||
|
|
2c405304ee | ||
|
|
1e5a7878e5 | ||
|
|
d89e1d7f85 | ||
|
|
98c015b775 | ||
|
|
a56502688f | ||
|
|
c0d757ab19 | ||
|
|
e68fd6eb7f | ||
|
|
90edc38859 | ||
|
|
0f018ea5dd | ||
|
|
1be6254363 | ||
|
|
760af71ed2 | ||
|
|
82f5e9f5b2 | ||
|
|
988c187db3 | ||
|
|
b23129982c | ||
|
|
4d5d0e2150 | ||
|
|
c0c487bf77 | ||
|
|
835d805079 | ||
|
|
c2a767184c | ||
|
|
1e7c8802eb | ||
|
|
a76ec42586 | ||
|
|
7418f36932 | ||
|
|
f9ef5e7e8e | ||
|
|
dbfa351395 | ||
|
|
e775f2b38e | ||
|
|
6f27454be4 | ||
|
|
201723d506 | ||
|
|
17555faaca | ||
|
|
36e0ab9f42 | ||
|
|
6017bd6cba | ||
|
|
30fed8d421 | ||
|
|
8ac5cdd2e1 | ||
|
|
114ac0793a | ||
|
|
d0b750461a | ||
|
|
9693170eb9 | ||
|
|
bbab6c2361 | ||
|
|
cfe3636c78 | ||
|
|
aadf3c702e | ||
|
|
1eac726a07 | ||
|
|
85e2c89794 | ||
|
|
fffcd3b404 | ||
|
|
fbfef4b1a3 | ||
|
|
526a6c0d0c | ||
|
|
1f33b6a74a | ||
|
|
95fc6d43e7 | ||
|
|
d8c261ffcf | ||
|
|
66ea0a9e0f | ||
|
|
435b542e7b | ||
|
|
10cd06f515 | ||
|
|
9da1868c3b | ||
|
|
2649fac4a4 | ||
|
|
6e05226e3b | ||
|
|
c1c3397f66 | ||
|
|
2065db2383 | ||
|
|
08fb868b63 | ||
|
|
8d39ef16b6 | ||
|
|
66c5082aa7 | ||
|
|
26fb58bd1b | ||
|
|
fed8826043 | ||
|
|
9af78a3249 | ||
|
|
bf1ad6cd17 | ||
|
|
15e995f2f5 | ||
|
|
b3e73b0de8 | ||
|
|
dd2633dfcb | ||
|
|
29f0278451 | ||
|
|
f0f98be692 | ||
|
|
5956a64b01 | ||
|
|
5fb36e3e2a | ||
|
|
9d295a1d91 | ||
|
|
39f350fe89 | ||
|
|
8c55e744b8 | ||
|
|
a260d4e25b | ||
|
|
509797588f | ||
|
|
2eed20f1f3 | ||
|
|
1d7b4c0db2 | ||
|
|
ac8cd788cb | ||
|
|
33dc970859 | ||
|
|
f73202734c | ||
|
|
32bacdab4b | ||
|
|
6113c3b533 | ||
|
|
1c634af489 | ||
|
|
428cdea2dc | ||
|
|
f14b55f839 | ||
|
|
5934d263b8 | ||
|
|
3860d919e6 | ||
|
|
fd0b9434ae | ||
|
|
efb30d0262 | ||
|
|
cee0bfbfa2 | ||
|
|
dc684d31d3 | ||
|
|
bfdf7f01b5 | ||
|
|
2cc0579b6e | ||
|
|
bfc472dc0f | ||
|
|
ea4e3680ab | ||
|
|
f02139956d | ||
|
|
cacbd1c212 | ||
|
|
3f78bb7819 | ||
|
|
aa65b01fe3 | ||
|
|
4f0968d678 | ||
|
|
118973cf79 | ||
|
|
df7cc0521f | ||
|
|
40c02d2cc9 | ||
|
|
be70b1a0c1 | ||
|
|
7ec5c122e1 | ||
|
|
a10ab99efc | ||
|
|
9f4398c557 | ||
|
|
d60f6bc89b | ||
|
|
617eeb4ff7 | ||
|
|
5b55825638 | ||
|
|
103d524db5 | ||
|
|
babd084a9b | ||
|
|
749f87397e | ||
|
|
307d47ebaf | ||
|
|
6acd4b91c1 | ||
|
|
f4a9530894 | ||
|
|
ab65385a16 | ||
|
|
ebd761e3dc | ||
|
|
3b942ec790 | ||
|
|
b373486908 | ||
|
|
c8cd5502f6 | ||
|
|
d6dd968c4f | ||
|
|
b8d73d2197 | ||
|
|
17e57f1e0b | ||
|
|
e21bf9fbc7 | ||
|
|
12e281f076 | ||
|
|
a5ce658755 | ||
|
|
ce30dfa82d | ||
|
|
c04d1e9d5c | ||
|
|
80031d122c | ||
|
|
943b090c90 | ||
|
|
39fd53d1f9 | ||
|
|
777e7b3b6d | ||
|
|
2783fe2a9f | ||
|
|
f5880cb001 | ||
|
|
26e501008a | ||
|
|
2c67e3f5c7 | ||
|
|
033596021d | ||
|
|
f36c72e085 | ||
|
|
fefaf7b4be | ||
|
|
91431401ad | ||
|
|
59d96c08a1 | ||
|
|
f10447395b | ||
|
|
c2b6222798 | ||
|
|
3a58c49184 | ||
|
|
440a5e49e2 | ||
|
|
77c10713a3 | ||
|
|
48e367ce2f | ||
|
|
934c23bf39 | ||
|
|
e0febcb6c3 | ||
|
|
044a6c6ea4 | ||
|
|
8ebbc10572 | ||
|
|
7435828082 | ||
|
|
369b595e8a | ||
|
|
9a6d30f03d | ||
|
|
6bdd01d52b | ||
|
|
bae9767498 | ||
|
|
b0e50dedb8 | ||
|
|
96bfb3b259 | ||
|
|
909068dfa8 | ||
|
|
f4c74968be | ||
|
|
0e958f3704 | ||
|
|
a8b2942f93 | ||
|
|
564fe62400 | ||
|
|
5c5013191b | ||
|
|
31989b85d1 | ||
|
|
5ed4af2372 | ||
|
|
4d18e391aa | ||
|
|
2feeb5b927 | ||
|
|
2853f07875 | ||
|
|
4e6adc07a1 | ||
|
|
429dcc7000 | ||
|
|
5f8235fcfc | ||
|
|
8dc1f49ac7 | ||
|
|
9fe2b651ed | ||
|
|
8566a46793 | ||
|
|
af3c3f4cbe | ||
|
|
8255c913a3 | ||
|
|
4d4d17669b | ||
|
|
540a0422f5 | ||
|
|
de4d7d6273 | ||
|
|
1345d95589 | ||
|
|
a5bc19dd69 | ||
|
|
25b143c8cc | ||
|
|
82cca959e4 | ||
|
|
d52374a0b6 | ||
|
|
c71a18ca07 | ||
|
|
8d73ae2cc0 | ||
|
|
7b0c74ca3e | ||
|
|
62be9f9064 | ||
|
|
2fdc113d93 | ||
|
|
af3a818f12 | ||
|
|
a07532d4c7 | ||
|
|
fb449ca4bc | ||
|
|
4da65643c0 | ||
|
|
bf64db474c | ||
|
|
808b980301 | ||
|
|
3528480562 | ||
|
|
6bd263d23f | ||
|
|
2b9aa3864b | ||
|
|
81155caf88 | ||
|
|
4f8c10c1aa | ||
|
|
9086634c8f | ||
|
|
55fbaabfda |
@@ -1,43 +1,39 @@
|
||||
[bumpversion]
|
||||
current_version = 0.19.0
|
||||
current_version = 1.3.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prerelease>[a-z]+)(?P<num>\d+))?
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
)?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prerelease}{num}
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
|
||||
[bumpversion:part:prerelease]
|
||||
[bumpversion:part:prekind]
|
||||
first_value = a
|
||||
optional_value = final
|
||||
values =
|
||||
a
|
||||
b
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:num]
|
||||
[bumpversion:part:pre]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/dbt/adapters/redshift/__version__.py]
|
||||
[bumpversion:file:docker/Dockerfile]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/dbt/adapters/snowflake/__version__.py]
|
||||
[bumpversion:file:tests/adapter/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/dbt/adapters/bigquery/__version__.py]
|
||||
[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py]
|
||||
|
||||
19
.changes/0.0.0.md
Normal file
19
.changes/0.0.0.md
Normal file
@@ -0,0 +1,19 @@
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
||||
* [0.18](https://github.com/dbt-labs/dbt-core/blob/0.18.latest/CHANGELOG.md)
|
||||
* [0.17](https://github.com/dbt-labs/dbt-core/blob/0.17.latest/CHANGELOG.md)
|
||||
* [0.16](https://github.com/dbt-labs/dbt-core/blob/0.16.latest/CHANGELOG.md)
|
||||
* [0.15](https://github.com/dbt-labs/dbt-core/blob/0.15.latest/CHANGELOG.md)
|
||||
* [0.14](https://github.com/dbt-labs/dbt-core/blob/0.14.latest/CHANGELOG.md)
|
||||
* [0.13](https://github.com/dbt-labs/dbt-core/blob/0.13.latest/CHANGELOG.md)
|
||||
* [0.12](https://github.com/dbt-labs/dbt-core/blob/0.12.latest/CHANGELOG.md)
|
||||
* [0.11 and earlier](https://github.com/dbt-labs/dbt-core/blob/0.11.latest/CHANGELOG.md)
|
||||
53
.changes/README.md
Normal file
53
.changes/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# CHANGELOG Automation
|
||||
|
||||
We use [changie](https://changie.dev/) to automate `CHANGELOG` generation. For installation and format/command specifics, see the documentation.
|
||||
|
||||
### Quick Tour
|
||||
|
||||
- All new change entries get generated under `/.changes/unreleased` as a yaml file
|
||||
- `header.tpl.md` contains the contents of the entire CHANGELOG file
|
||||
- `0.0.0.md` contains the contents of the footer for the entire CHANGELOG file. changie looks to be in the process of supporting a footer file the same as it supports a header file. Switch to that when available. For now, the 0.0.0 in the file name forces it to the bottom of the changelog no matter what version we are releasing.
|
||||
- `.changie.yaml` contains the fields in a change, the format of a single change, as well as the format of the Contributors section for each version.
|
||||
|
||||
### Workflow
|
||||
|
||||
#### Daily workflow
|
||||
Almost every code change we make associated with an issue will require a `CHANGELOG` entry. After you have created the PR in GitHub, run `changie new` and follow the command prompts to generate a yaml file with your change details. This only needs to be done once per PR.
|
||||
|
||||
The `changie new` command will ensure correct file format and file name. There is a one to one mapping of issues to changes. Multiple issues cannot be lumped into a single entry. If you make a mistake, the yaml file may be directly modified and saved as long as the format is preserved.
|
||||
|
||||
Note: If your PR has been cleared by the Core Team as not needing a changelog entry, the `Skip Changelog` label may be put on the PR to bypass the GitHub action that blacks PRs from being merged when they are missing a `CHANGELOG` entry.
|
||||
|
||||
#### Prerelease Workflow
|
||||
These commands batch up changes in `/.changes/unreleased` to be included in this prerelease and move those files to a directory named for the release version. The `--move-dir` will be created if it does not exist and is created in `/.changes`.
|
||||
|
||||
```
|
||||
changie batch <version> --move-dir '<version>' --prerelease 'rc1'
|
||||
changie merge
|
||||
```
|
||||
|
||||
Example
|
||||
```
|
||||
changie batch 1.0.5 --move-dir '1.0.5' --prerelease 'rc1'
|
||||
changie merge
|
||||
```
|
||||
|
||||
#### Final Release Workflow
|
||||
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
||||
|
||||
```
|
||||
changie batch <version> --include '<version>' --remove-prereleases
|
||||
changie merge
|
||||
```
|
||||
|
||||
Example
|
||||
```
|
||||
changie batch 1.0.5 --include '1.0.5' --remove-prereleases
|
||||
changie merge
|
||||
```
|
||||
|
||||
### A Note on Manual Edits & Gotchas
|
||||
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
||||
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
||||
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
||||
- changie can modify, create and delete files depending on the command you run. This is expected. Be sure to commit everything that has been modified and deleted.
|
||||
6
.changes/header.tpl.md
Executable file
6
.changes/header.tpl.md
Executable file
@@ -0,0 +1,6 @@
|
||||
# dbt Core Changelog
|
||||
|
||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
0
test/integration/004_simple_snapshot_test/models/.gitkeep → .changes/unreleased/.gitkeep
Normal file → Executable file
0
test/integration/004_simple_snapshot_test/models/.gitkeep → .changes/unreleased/.gitkeep
Normal file → Executable file
8
.changes/unreleased/Features-20220715-035555.yaml
Normal file
8
.changes/unreleased/Features-20220715-035555.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Features
|
||||
body: Add reusable function for retrying adapter connections. Utilize said function
|
||||
to add retries for Postgres (and Redshift).
|
||||
time: 2022-07-15T03:55:55.270637265+02:00
|
||||
custom:
|
||||
Author: tomasfarias
|
||||
Issue: "5022"
|
||||
PR: "5432"
|
||||
7
.changes/unreleased/Fixes-20220715-231148.yaml
Normal file
7
.changes/unreleased/Fixes-20220715-231148.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Rename try to strict for more intuitiveness
|
||||
time: 2022-07-15T23:11:48.327928+12:00
|
||||
custom:
|
||||
Author: jeremyyeo
|
||||
Issue: "5475"
|
||||
PR: "5477"
|
||||
61
.changie.yaml
Executable file
61
.changie.yaml
Executable file
@@ -0,0 +1,61 @@
|
||||
changesDir: .changes
|
||||
unreleasedDir: unreleased
|
||||
headerPath: header.tpl.md
|
||||
versionHeaderPath: ""
|
||||
changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
kinds:
|
||||
- label: Breaking Changes
|
||||
- label: Features
|
||||
- label: Fixes
|
||||
- label: Docs
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
- label: Security
|
||||
custom:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: Issue
|
||||
label: GitHub Issue Number
|
||||
type: int
|
||||
minLength: 4
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number
|
||||
type: int
|
||||
minLength: 4
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot" }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a PR */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- $pr := $change.Custom.PR }}
|
||||
{{- /* check if this contributor has other PRs associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $prList := get $contributorDict $author }}
|
||||
{{- $prList = append $prList $pr }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- else }}
|
||||
{{- $prList := list $change.Custom.PR }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
{{- end }}
|
||||
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
||||
{{- if $contributorDict}}
|
||||
### Contributors
|
||||
{{- range $k,$v := $contributorDict }}
|
||||
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-core/pull/{{$element}}){{end}})
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -1,218 +0,0 @@
|
||||
version: 2.1
|
||||
jobs:
|
||||
unit:
|
||||
docker: &test_only
|
||||
- image: fishtownanalytics/test-container:9
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
steps:
|
||||
- checkout
|
||||
- run: tox -e flake8,mypy,unit-py36,unit-py38
|
||||
build-wheels:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Build wheels
|
||||
command: |
|
||||
python3.8 -m venv "${PYTHON_ENV}"
|
||||
export PYTHON_BIN="${PYTHON_ENV}/bin/python"
|
||||
$PYTHON_BIN -m pip install -U pip setuptools
|
||||
$PYTHON_BIN -m pip install -r requirements.txt
|
||||
$PYTHON_BIN -m pip install -r dev_requirements.txt
|
||||
/bin/bash ./scripts/build-wheels.sh
|
||||
$PYTHON_BIN ./scripts/collect-dbt-contexts.py > ./dist/context_metadata.json
|
||||
$PYTHON_BIN ./scripts/collect-artifact-schema.py > ./dist/artifact_schemas.json
|
||||
environment:
|
||||
PYTHON_ENV: /home/tox/build_venv/
|
||||
- store_artifacts:
|
||||
path: ./dist
|
||||
destination: dist
|
||||
integration-postgres-py36:
|
||||
docker: &test_and_postgres
|
||||
- image: fishtownanalytics/test-container:9
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
- image: postgres
|
||||
name: database
|
||||
environment: &pgenv
|
||||
POSTGRES_USER: "root"
|
||||
POSTGRES_PASSWORD: "password"
|
||||
POSTGRES_DB: "dbt"
|
||||
steps:
|
||||
- checkout
|
||||
- run: &setupdb
|
||||
name: Setup postgres
|
||||
command: bash test/setup_db.sh
|
||||
environment:
|
||||
PGHOST: database
|
||||
PGUSER: root
|
||||
PGPASSWORD: password
|
||||
PGDATABASE: postgres
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py36
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py36:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py36
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py36:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py36
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py36:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py36
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-postgres-py38:
|
||||
docker: *test_and_postgres
|
||||
steps:
|
||||
- checkout
|
||||
- run: *setupdb
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py38
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py38:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py38
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py38:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py38
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py38:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py38
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
integration-postgres-py39:
|
||||
docker: *test_and_postgres
|
||||
steps:
|
||||
- checkout
|
||||
- run: *setupdb
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py39
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test-everything:
|
||||
jobs:
|
||||
- unit
|
||||
- integration-postgres-py36:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py36:
|
||||
requires:
|
||||
- integration-postgres-py36
|
||||
- integration-bigquery-py36:
|
||||
requires:
|
||||
- integration-postgres-py36
|
||||
- integration-snowflake-py36:
|
||||
requires:
|
||||
- integration-postgres-py36
|
||||
- integration-postgres-py38:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-bigquery-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-snowflake-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-postgres-py39:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py39:
|
||||
requires:
|
||||
- integration-postgres-py39
|
||||
- integration-bigquery-py39:
|
||||
requires:
|
||||
- integration-postgres-py39
|
||||
# - integration-snowflake-py39:
|
||||
# requires:
|
||||
# - integration-postgres-py39
|
||||
- build-wheels:
|
||||
requires:
|
||||
- unit
|
||||
- integration-postgres-py36
|
||||
- integration-redshift-py36
|
||||
- integration-bigquery-py36
|
||||
- integration-snowflake-py36
|
||||
- integration-postgres-py38
|
||||
- integration-redshift-py38
|
||||
- integration-bigquery-py38
|
||||
- integration-snowflake-py38
|
||||
- integration-postgres-py39
|
||||
- integration-redshift-py39
|
||||
- integration-bigquery-py39
|
||||
# - integration-snowflake-py39
|
||||
12
.flake8
Normal file
12
.flake8
Normal file
@@ -0,0 +1,12 @@
|
||||
[flake8]
|
||||
select =
|
||||
E
|
||||
W
|
||||
F
|
||||
ignore =
|
||||
W503 # makes Flake8 work like black
|
||||
W504
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test
|
||||
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,2 @@
|
||||
# Reformatting dbt-core via black, flake8, mypy, and assorted pre-commit hooks.
|
||||
43e3fc22c4eae4d3d901faba05e33c40f1f1dc5a
|
||||
43
.github/CODEOWNERS
vendored
Normal file
43
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# This file contains the code owners for the dbt-core repo.
|
||||
# PRs will be automatically assigned for review to the associated
|
||||
# team(s) or person(s) that touches any files that are mapped to them.
|
||||
#
|
||||
# A statement takes precedence over the statements above it so more general
|
||||
# assignments are found at the top with specific assignments being lower in
|
||||
# the ordering (i.e. catch all assignment should be the first item)
|
||||
#
|
||||
# Consult GitHub documentation for formatting guidelines:
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#example-of-a-codeowners-file
|
||||
|
||||
# As a default for areas with no assignment,
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core
|
||||
|
||||
# Changes to GitHub configurations including Actions
|
||||
/.github/ @leahwicz
|
||||
|
||||
# Language core modules
|
||||
/core/dbt/config/ @dbt-labs/core-language
|
||||
/core/dbt/context/ @dbt-labs/core-language
|
||||
/core/dbt/contracts/ @dbt-labs/core-language
|
||||
/core/dbt/deps/ @dbt-labs/core-language
|
||||
/core/dbt/parser/ @dbt-labs/core-language
|
||||
|
||||
# Execution core modules
|
||||
/core/dbt/events/ @dbt-labs/core-execution @dbt-labs/core-language # eventually remove language but they have knowledge here now
|
||||
/core/dbt/graph/ @dbt-labs/core-execution
|
||||
/core/dbt/task/ @dbt-labs/core-execution
|
||||
|
||||
# Adapter interface, scaffold, Postgres plugin
|
||||
/core/dbt/adapters @dbt-labs/core-adapters
|
||||
/core/scripts/create_adapter_plugin.py @dbt-labs/core-adapters
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
|
||||
# Global project: default macros, including generic tests + materializations
|
||||
/core/dbt/include/global_project @dbt-labs/core-execution @dbt-labs/core-adapters
|
||||
|
||||
# Perf regression testing framework
|
||||
# This excludes the test project files itself since those aren't specific
|
||||
# framework changes (excluded by not setting an owner next to it- no owner)
|
||||
/performance @nathaniel-may
|
||||
/performance/projects
|
||||
85
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
85
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: 🐞 Bug
|
||||
description: Report a bug or an issue you've found with dbt
|
||||
title: "[Bug] <title>"
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue already exists for the bug you encountered.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current Behavior
|
||||
description: A concise description of what you're experiencing.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A concise description of what you expected to happen.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Steps to reproduce the behavior.
|
||||
placeholder: |
|
||||
1. In this environment...
|
||||
2. With this config...
|
||||
3. Run '...'
|
||||
4. See error...
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
If applicable, log output to help explain your problem.
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **Python**: 3.7.2 (`python --version`)
|
||||
- **dbt**: 0.21.0 (`dbt --version`)
|
||||
value: |
|
||||
- OS:
|
||||
- Python:
|
||||
- dbt:
|
||||
render: markdown
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: database
|
||||
attributes:
|
||||
label: What database are you using dbt with?
|
||||
multiple: true
|
||||
options:
|
||||
- postgres
|
||||
- redshift
|
||||
- snowflake
|
||||
- bigquery
|
||||
- other (mention it in "Additional Context")
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||
|
||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,41 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a bug or an issue you've found with dbt
|
||||
title: ''
|
||||
labels: bug, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the bug
|
||||
A clear and concise description of what the bug is. What command did you run? What happened?
|
||||
|
||||
### Steps To Reproduce
|
||||
In as much detail as possible, please provide steps to reproduce the issue. Sample data that triggers the issue, example model code, etc is all very helpful here.
|
||||
|
||||
### Expected behavior
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
### Screenshots and log output
|
||||
If applicable, add screenshots or log output to help explain your problem.
|
||||
|
||||
### System information
|
||||
**Which database are you using dbt with?**
|
||||
- [ ] postgres
|
||||
- [ ] redshift
|
||||
- [ ] bigquery
|
||||
- [ ] snowflake
|
||||
- [ ] other (specify: ____________)
|
||||
|
||||
|
||||
**The output of `dbt --version`:**
|
||||
```
|
||||
<output goes here>
|
||||
```
|
||||
|
||||
**The operating system you're using:**
|
||||
|
||||
**The output of `python --version`:**
|
||||
|
||||
### Additional context
|
||||
Add any other context about the problem here.
|
||||
16
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
16
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
contact_links:
|
||||
- name: Create an issue for dbt-redshift
|
||||
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-redshift
|
||||
- name: Create an issue for dbt-bigquery
|
||||
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-bigquery
|
||||
- name: Create an issue for dbt-snowflake
|
||||
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-snowflake
|
||||
- name: Ask a question or get support
|
||||
url: https://docs.getdbt.com/docs/guides/getting-help
|
||||
about: Ask a question or request support
|
||||
- name: Questions on Stack Overflow
|
||||
url: https://stackoverflow.com/questions/tagged/dbt
|
||||
about: Look at questions/answers at Stack Overflow
|
||||
53
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
53
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: ✨ Feature
|
||||
description: Suggest an idea for dbt
|
||||
title: "[Feature] <title>"
|
||||
labels: ["enhancement", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this feature request!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing feature request for this?
|
||||
description: Please search to see if an issue already exists for the feature you would like.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
label: Is this your first time opening an issue?
|
||||
options:
|
||||
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the Feature
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Who will this benefit?
|
||||
description: |
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
attributes:
|
||||
label: Are you interested in contributing this feature?
|
||||
description: Let us know if you want to write some code, and how we can help.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else?
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the feature you are suggesting!
|
||||
validations:
|
||||
required: false
|
||||
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,23 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for dbt
|
||||
title: ''
|
||||
labels: enhancement, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the feature
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
### Describe alternatives you've considered
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
### Additional context
|
||||
Is this feature database-specific? Which database(s) is/are relevant? Please include any other relevant context here.
|
||||
|
||||
### Who will this benefit?
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
|
||||
### Are you interested in contributing this feature?
|
||||
Let us know if you want to write some code, and how we can help.
|
||||
14
.github/actions/latest-wrangler/Dockerfile
vendored
Normal file
14
.github/actions/latest-wrangler/Dockerfile
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM python:3-slim AS builder
|
||||
ADD . /app
|
||||
WORKDIR /app
|
||||
|
||||
# We are installing a dependency here directly into our app source dir
|
||||
RUN pip install --target=/app requests packaging
|
||||
|
||||
# A distroless container image with Python and some basics like SSL certificates
|
||||
# https://github.com/GoogleContainerTools/distroless
|
||||
FROM gcr.io/distroless/python3-debian10
|
||||
COPY --from=builder /app /app
|
||||
WORKDIR /app
|
||||
ENV PYTHONPATH /app
|
||||
CMD ["/app/main.py"]
|
||||
50
.github/actions/latest-wrangler/README.md
vendored
Normal file
50
.github/actions/latest-wrangler/README.md
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
# Github package 'latest' tag wrangler for containers
|
||||
## Usage
|
||||
|
||||
Plug in the necessary inputs to determine if the container being built should be tagged 'latest; at the package level, for example `dbt-redshift:latest`.
|
||||
|
||||
## Inputs
|
||||
| Input | Description |
|
||||
| - | - |
|
||||
| `package` | Name of the GH package to check against |
|
||||
| `new_version` | Semver of new container |
|
||||
| `gh_token` | GH token with package read scope|
|
||||
| `halt_on_missing` | Return non-zero exit code if requested package does not exist. (defaults to false)|
|
||||
|
||||
|
||||
## Outputs
|
||||
| Output | Description |
|
||||
| - | - |
|
||||
| `latest` | Wether or not the new container should be tagged 'latest'|
|
||||
| `minor_latest` | Wether or not the new container should be tagged major.minor.latest |
|
||||
|
||||
## Example workflow
|
||||
```yaml
|
||||
name: Ship it!
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to publish
|
||||
required: true
|
||||
version_number:
|
||||
description: The version number
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.new_version }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Print the results
|
||||
run: |
|
||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||
echo "Is it minor.latest? Survey says: ${{ steps.is_latest.outputs.minor_latest }} !"
|
||||
```
|
||||
20
.github/actions/latest-wrangler/action.yml
vendored
Normal file
20
.github/actions/latest-wrangler/action.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: "Github package 'latest' tag wrangler for containers"
|
||||
description: "Determines wether or not a given dbt container should be given a bare 'latest' tag (I.E. dbt-core:latest)"
|
||||
inputs:
|
||||
package_name:
|
||||
description: "Package to check (I.E. dbt-core, dbt-redshift, etc)"
|
||||
required: true
|
||||
new_version:
|
||||
description: "Semver of the container being built (I.E. 1.0.4)"
|
||||
required: true
|
||||
gh_token:
|
||||
description: "Auth token for github (must have view packages scope)"
|
||||
required: true
|
||||
outputs:
|
||||
latest:
|
||||
description: "Wether or not built container should be tagged latest (bool)"
|
||||
minor_latest:
|
||||
description: "Wether or not built container should be tagged minor.latest (bool)"
|
||||
runs:
|
||||
using: "docker"
|
||||
image: "Dockerfile"
|
||||
26
.github/actions/latest-wrangler/examples/example_workflow.yml
vendored
Normal file
26
.github/actions/latest-wrangler/examples/example_workflow.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Ship it!
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to publish
|
||||
required: true
|
||||
version_number:
|
||||
description: The version number
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.new_version }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Print the results
|
||||
run: |
|
||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||
6
.github/actions/latest-wrangler/examples/example_workflow_dispatch.json
vendored
Normal file
6
.github/actions/latest-wrangler/examples/example_workflow_dispatch.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"inputs": {
|
||||
"version_number": "1.0.1",
|
||||
"package": "dbt-redshift"
|
||||
}
|
||||
}
|
||||
95
.github/actions/latest-wrangler/main.py
vendored
Normal file
95
.github/actions/latest-wrangler/main.py
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
from distutils.util import strtobool
|
||||
from typing import Union
|
||||
from packaging.version import parse, Version
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# get inputs
|
||||
package = os.environ["INPUT_PACKAGE"]
|
||||
new_version = parse(os.environ["INPUT_NEW_VERSION"])
|
||||
gh_token = os.environ["INPUT_GH_TOKEN"]
|
||||
halt_on_missing = strtobool(os.environ.get("INPUT_HALT_ON_MISSING", "False"))
|
||||
|
||||
# get package metadata from github
|
||||
package_request = requests.get(
|
||||
f"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions",
|
||||
auth=("", gh_token),
|
||||
)
|
||||
package_meta = package_request.json()
|
||||
|
||||
# Log info if we don't get a 200
|
||||
if package_request.status_code != 200:
|
||||
print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}")
|
||||
|
||||
# Make an early exit if there is no matching package in github
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
else:
|
||||
# everything is the latest if the package doesn't exist
|
||||
print(f"::set-output name=latest::{True}")
|
||||
print(f"::set-output name=minor_latest::{True}")
|
||||
sys.exit(0)
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
|
||||
# map versions and tags
|
||||
version_tag_map = {
|
||||
version["id"]: version["metadata"]["container"]["tags"] for version in package_meta
|
||||
}
|
||||
|
||||
# is pre-release
|
||||
pre_rel = True if any(x in str(new_version) for x in ["a", "b", "rc"]) else False
|
||||
|
||||
# semver of current latest
|
||||
for version, tags in version_tag_map.items():
|
||||
if "latest" in tags:
|
||||
# N.B. This seems counterintuitive, but we expect any version tagged
|
||||
# 'latest' to have exactly three associated tags:
|
||||
# latest, major.minor.latest, and major.minor.patch.
|
||||
# Subtracting everything that contains the string 'latest' gets us
|
||||
# the major.minor.patch which is what's needed for comparison.
|
||||
current_latest = parse([tag for tag in tags if "latest" not in tag][0])
|
||||
else:
|
||||
current_latest = False
|
||||
|
||||
# semver of current_minor_latest
|
||||
for version, tags in version_tag_map.items():
|
||||
if f"{new_version.major}.{new_version.minor}.latest" in tags:
|
||||
# Similar to above, only now we expect exactly two tags:
|
||||
# major.minor.patch and major.minor.latest
|
||||
current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0])
|
||||
else:
|
||||
current_minor_latest = False
|
||||
|
||||
def is_latest(
|
||||
pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]
|
||||
) -> bool:
|
||||
"""Determine if a given contaier should be tagged 'latest' based on:
|
||||
- it's pre-release status
|
||||
- it's version
|
||||
- the version of a previously identified container tagged 'latest'
|
||||
|
||||
:param pre_rel: Wether or not the version of the new container is a pre-release
|
||||
:param new_version: The version of the new container
|
||||
:param remote_latest: The version of the previously identified container that's
|
||||
already tagged latest or False
|
||||
"""
|
||||
# is a pre-release = not latest
|
||||
if pre_rel:
|
||||
return False
|
||||
# + no latest tag found = is latest
|
||||
if not remote_latest:
|
||||
return True
|
||||
# + if remote version is lower than current = is latest, else not latest
|
||||
return True if remote_latest <= new_version else False
|
||||
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
|
||||
print(f"::set-output name=latest::{latest}")
|
||||
print(f"::set-output name=minor_latest::{minor_latest}")
|
||||
10
.github/actions/setup-postgres-linux/action.yml
vendored
Normal file
10
.github/actions/setup-postgres-linux/action.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
name: "Set up postgres (linux)"
|
||||
description: "Set up postgres service on linux vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
sudo systemctl start postgresql.service
|
||||
pg_isready
|
||||
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
||||
1
.github/actions/setup-postgres-linux/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-linux/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../test/setup_db.sh
|
||||
24
.github/actions/setup-postgres-macos/action.yml
vendored
Normal file
24
.github/actions/setup-postgres-macos/action.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: "Set up postgres (macos)"
|
||||
description: "Set up postgres service on macos vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
brew services start postgresql
|
||||
echo "Check PostgreSQL service is running"
|
||||
i=10
|
||||
COMMAND='pg_isready'
|
||||
while [ $i -gt -1 ]; do
|
||||
if [ $i == 0 ]; then
|
||||
echo "PostgreSQL service not ready, all attempts exhausted"
|
||||
exit 1
|
||||
fi
|
||||
echo "Check PostgreSQL service status"
|
||||
eval $COMMAND && break
|
||||
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
|
||||
sleep 10
|
||||
((i--))
|
||||
done
|
||||
createuser -s postgres
|
||||
bash ${{ github.action_path }}/setup_db.sh
|
||||
1
.github/actions/setup-postgres-macos/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-macos/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../test/setup_db.sh
|
||||
12
.github/actions/setup-postgres-windows/action.yml
vendored
Normal file
12
.github/actions/setup-postgres-windows/action.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "Set up postgres (windows)"
|
||||
description: "Set up postgres service on windows vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: pwsh
|
||||
run: |
|
||||
$pgService = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||
$env:Path += ";$env:PGBIN"
|
||||
bash ${{ github.action_path }}/setup_db.sh
|
||||
1
.github/actions/setup-postgres-windows/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-windows/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../test/setup_db.sh
|
||||
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@@ -11,26 +11,11 @@ updates:
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/bigquery"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/postgres"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/redshift"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/snowflake"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# docker dependencies
|
||||
- package-ecosystem: "docker"
|
||||
|
||||
21
.github/pull_request_template.md
vendored
21
.github/pull_request_template.md
vendored
@@ -4,19 +4,20 @@ resolves #
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Example:
|
||||
resolves #1234
|
||||
-->
|
||||
|
||||
|
||||
### Description
|
||||
|
||||
<!--- Describe the Pull Request here -->
|
||||
|
||||
<!---
|
||||
Describe the Pull Request here. Add any references and info to help reviewers
|
||||
understand your changes. Include any tradeoffs you considered.
|
||||
-->
|
||||
|
||||
### Checklist
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section.
|
||||
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] I have [opened an issue to add/update docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose), or docs changes are not required/relevant for this PR
|
||||
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#Adding-CHANGELOG-Entry)
|
||||
|
||||
40
.github/workflows/backport.yml
vendored
Normal file
40
.github/workflows/backport.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# **what?**
|
||||
# When a PR is merged, if it has the backport label, it will create
|
||||
# a new PR to backport those changes to the given branch. If it can't
|
||||
# cleanly do a backport, it will comment on the merged PR of the failure.
|
||||
#
|
||||
# Label naming convention: "backport <branch name to backport to>"
|
||||
# Example: backport 1.0.latest
|
||||
#
|
||||
# You MUST "Squash and merge" the original PR or this won't work.
|
||||
|
||||
# **why?**
|
||||
# Changes sometimes need to be backported to release branches.
|
||||
# This automates the backporting process
|
||||
|
||||
# **when?**
|
||||
# Once a PR is "Squash and merge"'d, by adding a backport label, this is triggered
|
||||
|
||||
name: Backport
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport
|
||||
runs-on: ubuntu-latest
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
github.event.pull_request.merged
|
||||
&& contains(github.event.label.name, 'backport')
|
||||
steps:
|
||||
- uses: tibdex/backport@v2.0.2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
78
.github/workflows/changelog-check.yml
vendored
Normal file
78
.github/workflows/changelog-check.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# **what?**
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
|
||||
# **when?**
|
||||
# This will run for all PRs going into main and *.latest. It will
|
||||
# run when they are opened, reopened, when any label is added or removed
|
||||
# and when new code is pushed to the branch. The action will then get
|
||||
# skipped if the 'Skip Changelog' label is present is any of the labels.
|
||||
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: changelog
|
||||
if: "!contains(github.event.pull_request.labels.*.name, 'Skip Changelog')"
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check if changelog file was added
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
# For each filter, it sets output variable named by the filter to the text:
|
||||
# 'true' - if any of changed files matches any of filter rules
|
||||
# 'false' - if none of changed files matches any of filter rules
|
||||
# also, returns:
|
||||
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||
uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filters: |
|
||||
changelog:
|
||||
- added: '.changes/unreleased/**.yaml'
|
||||
- name: Check if comment already exists
|
||||
uses: peter-evans/find-comment@v1
|
||||
id: changelog_comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: ${{ env.changelog_comment }}
|
||||
- name: Create PR comment if changelog entry is missing, required, and does not exist
|
||||
if: |
|
||||
steps.filter.outputs.changelog == 'false' &&
|
||||
steps.changelog_comment.outputs.comment-body == ''
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: ${{ env.changelog_comment }}
|
||||
- name: Fail job if changelog entry is missing and required
|
||||
if: steps.filter.outputs.changelog == 'false'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: core.setFailed('Changelog entry required to merge.')
|
||||
114
.github/workflows/dependency-changelog.yml
vendored
Normal file
114
.github/workflows/dependency-changelog.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
# **what?**
|
||||
# When dependabot create a PR, it always adds the `dependencies` label. This
|
||||
# action will add a corresponding changie yaml file to that PR when that label is added.
|
||||
# The file is created off a template:
|
||||
#
|
||||
# kind: Dependencies
|
||||
# body: <PR title>
|
||||
# time: <current timestamp>
|
||||
# custom:
|
||||
# Author: dependabot
|
||||
# Issue: 4904
|
||||
# PR: <PR number>
|
||||
#
|
||||
# **why?**
|
||||
# Automate changelog generation for more visability with automated dependency updates via dependabot.
|
||||
|
||||
# **when?**
|
||||
# Once a PR is created and it has been correctly labeled with `dependencies`. The intended use
|
||||
# is for the PRs created by dependabot. You can also manually trigger this by adding the
|
||||
# `dependencies` label at any time.
|
||||
|
||||
name: Dependency Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
# catch when the PR is opened with the label or when the label is added
|
||||
types: [opened, labeled]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
dependency_changelog:
|
||||
if: "contains(github.event.pull_request.labels.*.name, 'dependencies')"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# timestamp changes the order the changelog entries are listed in the final Changelog.md file. Precision is not
|
||||
# important here.
|
||||
# The timestamp on the filename and the timestamp in the contents of the file have different expected formats.
|
||||
- name: Get File Name Timestamp
|
||||
id: filename_time
|
||||
uses: nanzm/get-time-action@v1.1
|
||||
with:
|
||||
format: 'YYYYMMDD-HHmmss'
|
||||
|
||||
- name: Get File Content Timestamp
|
||||
id: file_content_time
|
||||
uses: nanzm/get-time-action@v1.1
|
||||
with:
|
||||
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
|
||||
|
||||
# changie expects files to be named in a specific pattern.
|
||||
- name: Generate Filepath
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "::set-output name=FILEPATH::$FILEPATH"
|
||||
|
||||
- name: Check if changelog file exists already
|
||||
# if there's already a changelog entry, don't add another one!
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
# For each filter, it sets output variable named by the filter to the text:
|
||||
# 'true' - if any of changed files matches any of filter rules
|
||||
# 'false' - if none of changed files matches any of filter rules
|
||||
# also, returns:
|
||||
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||
uses: dorny/paths-filter@v2
|
||||
id: changelog_check
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filters: |
|
||||
exists:
|
||||
- added: '.changes/unreleased/**.yaml'
|
||||
|
||||
- name: Checkout Branch
|
||||
if: steps.changelog_check.outputs.exists == 'false'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# specifying the ref avoids checking out the repository in a detached state
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
# If this is not set to false, Git push is performed with github.token and not the token
|
||||
# configured using the env: GITHUB_TOKEN in commit step
|
||||
persist-credentials: false
|
||||
|
||||
- name: Create file from template
|
||||
if: steps.changelog_check.outputs.exists == 'false'
|
||||
run: |
|
||||
echo kind: Dependencies > "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo 'body: "${{ github.event.pull_request.title }}"' >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo time: "${{ steps.file_content_time.outputs.time }}" >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo custom: >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo ' Author: ${{ github.event.pull_request.user.login }}' >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
echo ' Issue: "4904"' >> "${{ steps.fp.outputs.FILEPATH }}" # github.event.pull_request.issue for auto id?
|
||||
echo ' PR: "${{ github.event.pull_request.number }}"' >> "${{ steps.fp.outputs.FILEPATH }}"
|
||||
|
||||
- name: Commit Changelog File
|
||||
if: steps.changelog_check.outputs.exists == 'false'
|
||||
uses: gr2m/create-or-update-pull-request-action@v1
|
||||
env:
|
||||
# When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions
|
||||
# Workflow run. This is due to limitations set by GitHub.
|
||||
# See: https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow
|
||||
# When you use the repository's GITHUB_TOKEN to perform tasks on behalf of the GitHub Actions
|
||||
# app, events triggered by the GITHUB_TOKEN will not create a new workflow run. This prevents
|
||||
# you from accidentally creating recursive workflow runs. To get around this, use a Personal
|
||||
# Access Token to commit changes.
|
||||
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
with:
|
||||
branch: ${{ github.event.pull_request.head.ref }}
|
||||
# author expected in the format "Lorem J. Ipsum <lorem@example.com>"
|
||||
author: "Github Build Bot <buildbot@fishtownanalytics.com>"
|
||||
commit-message: "Add automated changelog yaml from template"
|
||||
26
.github/workflows/jira-creation.yml
vendored
Normal file
26
.github/workflows/jira-creation.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# **what?**
|
||||
# Mirrors issues into Jira. Includes the information: title,
|
||||
# GitHub Issue ID and URL
|
||||
|
||||
# **why?**
|
||||
# Jira is our tool for tracking and we need to see these issues in there
|
||||
|
||||
# **when?**
|
||||
# On issue creation or when an issue is labeled `Jira`
|
||||
|
||||
name: Jira Issue Creation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
call-label-action:
|
||||
uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
26
.github/workflows/jira-label.yml
vendored
Normal file
26
.github/workflows/jira-label.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# **what?**
|
||||
# Calls mirroring Jira label Action. Includes adding a new label
|
||||
# to an existing issue or removing a label as well
|
||||
|
||||
# **why?**
|
||||
# Jira is our tool for tracking and we need to see these labels in there
|
||||
|
||||
# **when?**
|
||||
# On labels being added or removed from issues
|
||||
|
||||
name: Jira Label Mirroring
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
permissions:
|
||||
issues: read
|
||||
|
||||
jobs:
|
||||
call-label-action:
|
||||
uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
24
.github/workflows/jira-transition.yml
vendored
Normal file
24
.github/workflows/jira-transition.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# **what?**
|
||||
# Transition a Jira issue to a new state
|
||||
# Only supports these GitHub Issue transitions:
|
||||
# closed, deleted, reopened
|
||||
|
||||
# **why?**
|
||||
# Jira needs to be kept up-to-date
|
||||
|
||||
# **when?**
|
||||
# On issue closing, deletion, reopened
|
||||
|
||||
name: Jira Issue Transition
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [closed, deleted, reopened]
|
||||
|
||||
jobs:
|
||||
call-label-action:
|
||||
uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
232
.github/workflows/main.yml
vendored
Normal file
232
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
# **what?**
|
||||
# Runs code quality checks, unit tests, integration tests and
|
||||
# verifies python build on all code commited to the repository. This workflow
|
||||
# should not require any secrets since it runs for PRs from forked repos. By
|
||||
# default, secrets are not passed to workflows running from a forked repos.
|
||||
|
||||
# **why?**
|
||||
# Ensure code for dbt meets a certain quality standard.
|
||||
|
||||
# **when?**
|
||||
# This will run for all PRs, when code is pushed to a release
|
||||
# branch, and when manually triggered.
|
||||
|
||||
name: Tests and Code Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install pre-commit
|
||||
pre-commit --version
|
||||
python -m pip install mypy==0.942
|
||||
mypy --version
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install -r dev-requirements.txt
|
||||
dbt --version
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: pre-commit run --all-files --show-diff-on-failure
|
||||
|
||||
unit:
|
||||
name: unit test / python ${{ matrix.python-version }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||
path: unit_results.csv
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
env:
|
||||
TOXENV: integration
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Set up postgres (linux)
|
||||
if: runner.os == 'Linux'
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: Set up postgres (macos)
|
||||
if: runner.os == 'macOS'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: Set up postgres (windows)
|
||||
if: runner.os == 'Windows'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: Install python tools
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tests
|
||||
run: tox
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
|
||||
path: integration_results.csv
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
python -m pip --version
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||
run: |
|
||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
62
.github/workflows/release-branch-tests.yml
vendored
Normal file
62
.github/workflows/release-branch-tests.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# **what?**
|
||||
# The purpose of this workflow is to trigger CI to run for each
|
||||
# release branch and main branch on a regular cadence. If the CI workflow
|
||||
# fails for a branch, it will post to dev-core-alerts to raise awareness.
|
||||
# The 'aurelien-baudet/workflow-dispatch' Action triggers the existing
|
||||
# CI worklow file on the given branch to run so that even if we change the
|
||||
# CI workflow file in the future, the one that is tailored for the given
|
||||
# release branch will be used.
|
||||
|
||||
# **why?**
|
||||
# Ensures release branches and main are always shippable and not broken.
|
||||
# Also, can catch any dependencies shifting beneath us that might
|
||||
# introduce breaking changes (could also impact Cloud).
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 9:00, 13:00, 18:00 UTC everyday.
|
||||
# Manual trigger can also test on demand
|
||||
|
||||
name: Release branch scheduled testing
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 9,13,18 * * *' # 9:00, 13:00, 18:00 UTC
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
kick-off-ci:
|
||||
name: Kick-off CI
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
# must run CI 1 branch at a time b/c the workflow-dispatch Action polls for
|
||||
# latest run for results and it gets confused when we kick off multiple runs
|
||||
# at once. There is a race condition so we will just run in sequential order.
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, main]
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
id: trigger-step
|
||||
uses: aurelien-baudet/workflow-dispatch@v2.1.1
|
||||
with:
|
||||
workflow: main.yml
|
||||
ref: ${{ matrix.branch }}
|
||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
|
||||
- name: Post failure to Slack
|
||||
uses: ravsamhq/notify-slack-action@v1
|
||||
if: ${{ always() && !contains(steps.trigger-step.outputs.workflow-conclusion,'success') }}
|
||||
with:
|
||||
status: ${{ job.status }}
|
||||
notification_title: 'dbt-core scheduled run of "${{ matrix.branch }}" branch not successful'
|
||||
message_format: ':x: CI on branch "${{ matrix.branch }}" ${{ steps.trigger-step.outputs.workflow-conclusion }}'
|
||||
footer: 'Linked failed CI run ${{ steps.trigger-step.outputs.workflow-url }}'
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
116
.github/workflows/release-docker.yml
vendored
Normal file
116
.github/workflows/release-docker.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
# **what?**
|
||||
# This workflow will generate a series of docker images for dbt and push them to the github container registry
|
||||
|
||||
# **why?**
|
||||
# Docker images for dbt are used in a number of important places throughout the dbt ecosystem. This is how we keep those images up-to-date.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually
|
||||
|
||||
# **next steps**
|
||||
# - build this into the release workflow (or conversly, break out the different release methods into their own workflow files)
|
||||
|
||||
name: Docker release
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to release. _One_ of [dbt-core, dbt-redshift, dbt-bigquery, dbt-snowflake, dbt-spark, dbt-postgres]
|
||||
required: true
|
||||
version_number:
|
||||
description: The release version number (i.e. 1.0.0b1). Do not include `latest` tags or a leading `v`!
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
get_version_meta:
|
||||
name: Get version meta
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
major: ${{ steps.version.outputs.major }}
|
||||
minor: ${{ steps.version.outputs.minor }}
|
||||
patch: ${{ steps.version.outputs.patch }}
|
||||
latest: ${{ steps.latest.outputs.latest }}
|
||||
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Split version
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "::set-output name=major::$MAJOR"
|
||||
echo "::set-output name=minor::$MINOR"
|
||||
echo "::set-output name=patch::$PATCH"
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.version_number }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
halt_on_missing: False
|
||||
|
||||
setup_image_builder:
|
||||
name: Set up docker image builder
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get_version_meta]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
build_and_push:
|
||||
name: Build images and push to GHCR
|
||||
runs-on: ubuntu-latest
|
||||
needs: [setup_image_builder, get_version_meta]
|
||||
steps:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v2
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v2
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:latest
|
||||
199
.github/workflows/release.yml
vendored
Normal file
199
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
# **what?**
|
||||
# Take the given commit, run unit tests specifically on that sha, build and
|
||||
# package it, and then release to GitHub and PyPi with that specific build
|
||||
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
|
||||
# **when?**
|
||||
# This will only run manually with a given sha and version
|
||||
|
||||
name: Release to GitHub and PyPi
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: 'The last commit sha in the release'
|
||||
required: true
|
||||
version_number:
|
||||
description: 'The release version number (i.e. 1.0.0b1)'
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
name: Unit test
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: |
|
||||
dist/
|
||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
||||
|
||||
test-build:
|
||||
name: verify packages
|
||||
|
||||
needs: [build, unit]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
|
||||
needs: test-build
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: '.'
|
||||
|
||||
# Need to set an output variable because env variables can't be taken as input
|
||||
# This is needed for the next step with releasing to GitHub
|
||||
- name: Find release type
|
||||
id: release_type
|
||||
env:
|
||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||
run: |
|
||||
echo ::set-output name=isPrerelease::$IS_PRERELEASE
|
||||
|
||||
- name: Creating GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: dbt-core v${{github.event.inputs.version_number}}
|
||||
tag_name: v${{github.event.inputs.version_number}}
|
||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
||||
target_commitish: ${{github.event.inputs.sha}}
|
||||
body: |
|
||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
||||
files: |
|
||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
||||
|
||||
pypi-release:
|
||||
name: Pypi release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
needs: github-release
|
||||
|
||||
environment: PypiProd
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: 'dist'
|
||||
|
||||
- name: Publish distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
87
.github/workflows/schema-check.yml
vendored
Normal file
87
.github/workflows/schema-check.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
# **what?**
|
||||
# Compares the schema of the dbt version of the given ref vs
|
||||
# the latest official schema releases found in schemas.getdbt.com.
|
||||
# If there are differences, the workflow will fail and upload the
|
||||
# diff as an artifact. The metadata team should be alerted to the change.
|
||||
#
|
||||
# **why?**
|
||||
# Reaction work may need to be done if artifact schema changes
|
||||
# occur so we want to proactively alert to it.
|
||||
#
|
||||
# **when?**
|
||||
# On pushes to `develop` and release branches. Manual runs are also enabled.
|
||||
name: Artifact Schema Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request: #TODO: remove before merging
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
|
||||
env:
|
||||
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
||||
DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt
|
||||
SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com
|
||||
|
||||
jobs:
|
||||
checking-schemas:
|
||||
name: "Checking schemas"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
|
||||
- name: Checkout schemas.getdbt.com repo
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
repository: dbt-labs/schemas.getdbt.com
|
||||
ref: 'main'
|
||||
ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }}
|
||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
|
||||
- name: Generate current schema
|
||||
run: |
|
||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||
|
||||
# Copy generated schema files into the schemas.getdbt.com repo
|
||||
# Do a git diff to find any changes
|
||||
# Ignore any date or version changes though
|
||||
- name: Compare schemas
|
||||
run: |
|
||||
cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
cd ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary)
|
||||
if [[ $(echo diff_results) ]]; then
|
||||
echo $diff_results
|
||||
echo "Schema changes detected!"
|
||||
git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||
exit 1
|
||||
else
|
||||
echo "No schema changes detected"
|
||||
fi
|
||||
|
||||
- name: Upload schema diff
|
||||
uses: actions/upload-artifact@v2.2.4
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
name: 'schema_schanges.txt'
|
||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
||||
17
.github/workflows/stale.yml
vendored
Normal file
17
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: "Close stale issues and PRs"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
||||
with:
|
||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
|
||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||
days-before-stale: 180
|
||||
78
.github/workflows/structured-logging-schema-check.yml
vendored
Normal file
78
.github/workflows/structured-logging-schema-check.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# This Action checks makes a dbt run to sample json structured logs
|
||||
# and checks that they conform to the currently documented schema.
|
||||
#
|
||||
# If this action fails it either means we have unintentionally deviated
|
||||
# from our documented structured logging schema, or we need to bump the
|
||||
# version of our structured logging and add new documentation to
|
||||
# communicate these changes.
|
||||
|
||||
name: Structured Logging Schema Check
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
# points tests to the log file
|
||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||
# tells integration tests to output into json format
|
||||
DBT_LOG_FORMAT: "json"
|
||||
# Additional test users
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip --version
|
||||
pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Set up postgres
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: ls
|
||||
run: ls
|
||||
|
||||
# integration tests generate a ton of logs in different files. the next step will find them all.
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
|
||||
# apply our schema tests to every log event from the previous step
|
||||
# skips any output that isn't valid json
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: run
|
||||
args: --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
1
.github/workflows/test/.actrc
vendored
Normal file
1
.github/workflows/test/.actrc
vendored
Normal file
@@ -0,0 +1 @@
|
||||
-P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest
|
||||
1
.github/workflows/test/.gitignore
vendored
Normal file
1
.github/workflows/test/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.secrets
|
||||
1
.github/workflows/test/.secrets.EXAMPLE
vendored
Normal file
1
.github/workflows/test/.secrets.EXAMPLE
vendored
Normal file
@@ -0,0 +1 @@
|
||||
GITHUB_TOKEN=GH_PERSONAL_ACCESS_TOKEN_GOES_HERE
|
||||
6
.github/workflows/test/inputs/release_docker.json
vendored
Normal file
6
.github/workflows/test/inputs/release_docker.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"inputs": {
|
||||
"version_number": "1.0.1",
|
||||
"package": "dbt-postgres"
|
||||
}
|
||||
}
|
||||
33
.github/workflows/triage-labels.yml
vendored
Normal file
33
.github/workflows/triage-labels.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# **what?**
|
||||
# When the core team triages, we sometimes need more information from the issue creator. In
|
||||
# those cases we remove the `triage` label and add the `awaiting_response` label. Once we
|
||||
# recieve a response in the form of a comment, we want the `awaiting_response` label removed
|
||||
# in favor of the `triage` label so we are aware that the issue needs action.
|
||||
|
||||
# **why?**
|
||||
# To help with out team triage issue tracking
|
||||
|
||||
# **when?**
|
||||
# This will run when a comment is added to an issue and that issue has to `awaiting_response` label.
|
||||
|
||||
name: Update Triage Label
|
||||
|
||||
on: issue_comment
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
triage_label:
|
||||
if: contains(github.event.issue.labels.*.name, 'awaiting_response')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: initial labeling
|
||||
uses: andymckay/labeler@master
|
||||
with:
|
||||
add-labels: "triage"
|
||||
remove-labels: "awaiting_response"
|
||||
111
.github/workflows/version-bump.yml
vendored
Normal file
111
.github/workflows/version-bump.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
# **what?**
|
||||
# This workflow will take a version number and a dry run flag. With that
|
||||
# it will run versionbump to update the version number everywhere in the
|
||||
# code base and then generate an update Docker requirements file. If this
|
||||
# is a dry run, a draft PR will open with the changes. If this isn't a dry
|
||||
# run, the changes will be committed to the branch this is run on.
|
||||
|
||||
# **why?**
|
||||
# This is to aid in releasing dbt and making sure we have updated
|
||||
# the versions and Docker requirements in all places.
|
||||
|
||||
# **when?**
|
||||
# This is triggered either manually OR
|
||||
# from the repository_dispatch event "version-bump" which is sent from
|
||||
# the dbt-release repo Action
|
||||
|
||||
name: Version Bump
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_number:
|
||||
description: 'The version number to bump to'
|
||||
required: true
|
||||
is_dry_run:
|
||||
description: 'Creates a draft PR to allow testing instead of committing to a branch'
|
||||
required: true
|
||||
default: 'true'
|
||||
repository_dispatch:
|
||||
types: [version-bump]
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set version and dry run values
|
||||
id: variables
|
||||
env:
|
||||
VERSION_NUMBER: "${{ github.event.client_payload.version_number == '' && github.event.inputs.version_number || github.event.client_payload.version_number }}"
|
||||
IS_DRY_RUN: "${{ github.event.client_payload.is_dry_run == '' && github.event.inputs.is_dry_run || github.event.client_payload.is_dry_run }}"
|
||||
run: |
|
||||
echo Repository dispatch event version: ${{ github.event.client_payload.version_number }}
|
||||
echo Repository dispatch event dry run: ${{ github.event.client_payload.is_dry_run }}
|
||||
echo Workflow dispatch event version: ${{ github.event.inputs.version_number }}
|
||||
echo Workflow dispatch event dry run: ${{ github.event.inputs.is_dry_run }}
|
||||
echo ::set-output name=VERSION_NUMBER::$VERSION_NUMBER
|
||||
echo ::set-output name=IS_DRY_RUN::$IS_DRY_RUN
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
|
||||
- name: Create PR branch
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||
run: |
|
||||
git checkout -b bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||
git push origin bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||
git branch --set-upstream-to=origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||
|
||||
# - name: Generate Docker requirements
|
||||
# run: |
|
||||
# source env/bin/activate
|
||||
# pip install -r requirements.txt
|
||||
# pip freeze -l > docker/requirements/requirements.txt
|
||||
# git status
|
||||
|
||||
- name: Bump version
|
||||
run: |
|
||||
source env/bin/activate
|
||||
pip install -r dev-requirements.txt
|
||||
env/bin/bumpversion --allow-dirty --new-version ${{steps.variables.outputs.VERSION_NUMBER}} major
|
||||
git status
|
||||
|
||||
- name: Commit version bump directly
|
||||
uses: EndBug/add-and-commit@v7
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'false' }}
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
|
||||
- name: Commit version bump to branch
|
||||
uses: EndBug/add-and-commit@v7
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
push: 'origin origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
draft: true
|
||||
base: ${{github.ref}}
|
||||
title: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -49,9 +49,8 @@ coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
test.env
|
||||
*.pytest_cache/
|
||||
|
||||
# Mypy
|
||||
.mypy_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -66,10 +65,10 @@ docs/_build/
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
#Ipython Notebook
|
||||
# Ipython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
#Emacs
|
||||
# Emacs
|
||||
*~
|
||||
|
||||
# Sublime Text
|
||||
@@ -78,6 +77,7 @@ target/
|
||||
# Vim
|
||||
*.sw*
|
||||
|
||||
# Pyenv
|
||||
.python-version
|
||||
|
||||
# Vim
|
||||
@@ -85,10 +85,12 @@ target/
|
||||
|
||||
# pycharm
|
||||
.idea/
|
||||
venv/
|
||||
|
||||
# AWS credentials
|
||||
.aws/
|
||||
|
||||
# MacOS
|
||||
.DS_Store
|
||||
|
||||
# vscode
|
||||
|
||||
@@ -1,20 +1,68 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||
exclude: ^test/
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
python: python3.8
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.3.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://gitlab.com/PyCQA/flake8
|
||||
rev: 3.9.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.812
|
||||
hooks:
|
||||
- id: mypy
|
||||
files: ^core/dbt/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
args: [--unsafe]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
exclude_types:
|
||||
- "markdown"
|
||||
- id: check-case-conflict
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- "--line-length=99"
|
||||
- "--target-version=py38"
|
||||
- id: black
|
||||
alias: black-check
|
||||
stages: [manual]
|
||||
args:
|
||||
- "--line-length=99"
|
||||
- "--target-version=py38"
|
||||
- "--check"
|
||||
- "--diff"
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 4.0.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
- id: flake8
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.942
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
#
|
||||
# By using `language: system` we run this hook in the local
|
||||
# environment instead of a pre-commit isolated one. This is needed
|
||||
# to ensure mypy correctly parses the project.
|
||||
|
||||
# It may cause trouble
|
||||
# in that it adds environmental variables out of our control to the
|
||||
# mix. Unfortunately, there's nothing we can do about per pre-commit's
|
||||
# author.
|
||||
# See https://github.com/pre-commit/pre-commit/issues/730 for details.
|
||||
args: [--show-error-codes]
|
||||
files: ^core/dbt/
|
||||
language: system
|
||||
- id: mypy
|
||||
alias: mypy-check
|
||||
stages: [manual]
|
||||
args: [--show-error-codes, --pretty]
|
||||
files: ^core/dbt/
|
||||
language: system
|
||||
|
||||
@@ -2,31 +2,36 @@ The core function of dbt is SQL compilation and execution. Users create projects
|
||||
|
||||
## dbt-core
|
||||
|
||||
Most of the python code in the repository is within the `core/dbt` directory. Currently the main subdirectories are:
|
||||
- [`adapters`](core/dbt/adapters): Define base classes for behavior that is likely to differ across databases
|
||||
- [`clients`](core/dbt/clients): Interface with dependencies (agate, jinja) or across operating systems
|
||||
- [`config`](core/dbt/config): Reconcile user-supplied configuration from connection profiles, project files, and Jinja macros
|
||||
- [`context`](core/dbt/context): Build and expose dbt-specific Jinja functionality
|
||||
- [`contracts`](core/dbt/contracts): Define Python objects (dataclasses) that dbt expects to create and validate
|
||||
- [`deps`](core/dbt/deps): Package installation and dependency resolution
|
||||
- [`graph`](core/dbt/graph): Produce a `networkx` DAG of project resources, and selecting those resources given user-supplied criteria
|
||||
- [`include`](core/dbt/include): The dbt "global project," which defines default implementations of Jinja2 macros
|
||||
- [`parser`](core/dbt/parser): Read project files, validate, construct python objects
|
||||
- [`rpc`](core/dbt/rpc): Provide remote procedure call server for invoking dbt, following JSON-RPC 2.0 spec
|
||||
- [`task`](core/dbt/task): Set forth the actions that dbt can perform when invoked
|
||||
Most of the python code in the repository is within the `core/dbt` directory.
|
||||
- [`single python files`](core/dbt/README.md): A number of individual files, such as 'compilation.py' and 'exceptions.py'
|
||||
|
||||
The main subdirectories of core/dbt:
|
||||
- [`adapters`](core/dbt/adapters/README.md): Define base classes for behavior that is likely to differ across databases
|
||||
- [`clients`](core/dbt/clients/README.md): Interface with dependencies (agate, jinja) or across operating systems
|
||||
- [`config`](core/dbt/config/README.md): Reconcile user-supplied configuration from connection profiles, project files, and Jinja macros
|
||||
- [`context`](core/dbt/context/README.md): Build and expose dbt-specific Jinja functionality
|
||||
- [`contracts`](core/dbt/contracts/README.md): Define Python objects (dataclasses) that dbt expects to create and validate
|
||||
- [`deps`](core/dbt/deps/README.md): Package installation and dependency resolution
|
||||
- [`events`](core/dbt/events/README.md): Logging events
|
||||
- [`graph`](core/dbt/graph/README.md): Produce a `networkx` DAG of project resources, and selecting those resources given user-supplied criteria
|
||||
- [`include`](core/dbt/include/README.md): The dbt "global project," which defines default implementations of Jinja2 macros
|
||||
- [`parser`](core/dbt/parser/README.md): Read project files, validate, construct python objects
|
||||
- [`task`](core/dbt/task/README.md): Set forth the actions that dbt can perform when invoked
|
||||
|
||||
Legacy tests are found in the 'test' directory:
|
||||
- [`unit tests`](core/dbt/test/unit/README.md): Unit tests
|
||||
- [`integration tests`](core/dbt/test/integration/README.md): Integration tests
|
||||
|
||||
### Invoking dbt
|
||||
|
||||
There are two supported ways of invoking dbt: from the command line and using an RPC server.
|
||||
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task/rpc should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
|
||||
core/dbt/include/index.html
|
||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||
|
||||
## Adapters
|
||||
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. The four core adapters that are in the main repository, contained within the [`plugins`](plugins) subdirectory, are: Postgres Redshift, Snowflake and BigQuery. Other warehouses use adapter plugins defined in separate repositories (e.g. [dbt-spark](https://github.com/fishtown-analytics/dbt-spark), [dbt-presto](https://github.com/fishtown-analytics/dbt-presto)).
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. For testing and development purposes, the dbt-postgres plugin lives alongside the dbt-core codebase, in the [`plugins`](plugins) subdirectory. Like other adapter plugins, it is a self-contained codebase and package that builds on top of dbt-core.
|
||||
|
||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||
|
||||
@@ -46,4 +51,4 @@ The [`test/`](test/) subdirectory includes unit and integration tests that run a
|
||||
|
||||
- [docker](docker/): All dbt versions are published as Docker images on DockerHub. This subfolder contains the `Dockerfile` (constant) and `requirements.txt` (one for each version).
|
||||
- [etc](etc/): Images for README
|
||||
- [scripts](scripts/): Helper scripts for testing, releasing, and producing JSON schemas. These are not included in distributions of dbt, not are they rigorously tested—they're just handy tools for the dbt maintainers :)
|
||||
- [scripts](scripts/): Helper scripts for testing, releasing, and producing JSON schemas. These are not included in distributions of dbt, nor are they rigorously tested—they're just handy tools for the dbt maintainers :)
|
||||
|
||||
2715
CHANGELOG.md
Normal file → Executable file
2715
CHANGELOG.md
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
258
CONTRIBUTING.md
258
CONTRIBUTING.md
@@ -1,227 +1,193 @@
|
||||
# Contributing to dbt
|
||||
# Contributing to `dbt-core`
|
||||
|
||||
`dbt-core` is open source software. It is what it is today because community members have opened issues, provided feedback, and [contributed to the knowledge loop](https://www.getdbt.com/dbt-labs/values/). Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||
|
||||
1. [About this document](#about-this-document)
|
||||
2. [Proposing a change](#proposing-a-change)
|
||||
3. [Getting the code](#getting-the-code)
|
||||
4. [Setting up an environment](#setting-up-an-environment)
|
||||
5. [Running dbt in development](#running-dbt-in-development)
|
||||
6. [Testing](#testing)
|
||||
7. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
2. [Getting the code](#getting-the-code)
|
||||
3. [Setting up an environment](#setting-up-an-environment)
|
||||
4. [Running `dbt` in development](#running-dbt-core-in-development)
|
||||
5. [Testing dbt-core](#testing)
|
||||
6. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
|
||||
## About this document
|
||||
|
||||
This document is a guide intended for folks interested in contributing to dbt. Below, we document the process by which members of the community should create issues and submit pull requests (PRs) in this repository. It is not intended as a guide for using dbt, and it assumes a certain level of familiarity with Python concepts such as virtualenvs, `pip`, python modules, filesystems, and so on. This guide assumes you are using macOS or Linux and are comfortable with the command line.
|
||||
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
||||
|
||||
If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the #development channel on [slack](community.getdbt.com).
|
||||
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||
|
||||
### Signing the CLA
|
||||
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
||||
|
||||
Please note that all contributors to dbt must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the dbt codebase. If you are unable to sign the CLA, then the dbt maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones.
|
||||
### Notes
|
||||
|
||||
## Proposing a change
|
||||
|
||||
dbt is Apache 2.0-licensed open source software. dbt is what it is today because community members like you have opened issues, provided feedback, and contributed to the knowledge loop for the entire communtiy. Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||
|
||||
### Defining the problem
|
||||
|
||||
If you have an idea for a new feature or if you've discovered a bug in dbt, the first step is to open an issue. Please check the list of [open issues](https://github.com/fishtown-analytics/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The dbt maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
|
||||
**Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed.
|
||||
|
||||
### Discussing the idea
|
||||
|
||||
After you open an issue, a dbt maintainer will follow up by commenting on your issue (usually within 1-3 days) to explore your idea further and advise on how to implement the suggested changes. In many cases, community members will chime in with their own thoughts on the problem statement. If you as the issue creator are interested in submitting a Pull Request to address the issue, you should indicate this in the body of the issue. The dbt maintainers are _always_ happy to help contributors with the implementation of fixes and features, so please also indicate if there's anything you're unsure about or could use guidance around in the issue.
|
||||
|
||||
### Submitting a change
|
||||
|
||||
If an issue is appropriately well scoped and describes a beneficial change to the dbt codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this.
|
||||
|
||||
The dbt maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/fishtown-analytics/dbt/contribute) page.
|
||||
|
||||
Here's a good workflow:
|
||||
- Comment on the open issue, expressing your interest in contributing the required code change
|
||||
- Outline your planned implementation. If you want help getting started, ask!
|
||||
- Follow the steps outlined below to develop locally. Once you have opened a PR, one of the dbt maintainers will work with you to review your code.
|
||||
- Add a test! Tests are crucial for both fixes and new features alike. We want to make sure that code works as intended, and that it avoids any bugs previously encountered. Currently, the best resource for understanding dbt's [unit](test/unit) and [integration](test/integration) tests is the tests themselves. One of the maintainers can help by pointing out relevant examples.
|
||||
|
||||
In some cases, the right resolution to an open issue might be tangential to the dbt codebase. The right path forward might be a documentation update or a change that can be made in user-space. In other cases, the issue might describe functionality that the dbt maintainers are unwilling or unable to incorporate into the dbt codebase. When it is determined that an open issue describes functionality that will not translate to a code change in the dbt repository, the issue will be tagged with the `wontfix` label (see below) and closed.
|
||||
|
||||
### Using issue labels
|
||||
|
||||
The dbt maintainers use labels to categorize open issues. Some labels indicate the databases impacted by the issue, while others describe the domain in the dbt codebase germane to the discussion. While most of these labels are self-explanatory (eg. `snowflake` or `bigquery`), there are others that are worth describing.
|
||||
|
||||
| tag | description |
|
||||
| --- | ----------- |
|
||||
| [triage](https://github.com/fishtown-analytics/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a dbt maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/fishtown-analytics/dbt/labels/bug) | This issue represents a defect or regression in dbt |
|
||||
| [enhancement](https://github.com/fishtown-analytics/dbt/labels/enhancement) | This issue represents net-new functionality in dbt |
|
||||
| [good first issue](https://github.com/fishtown-analytics/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the dbt codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/fishtown-analytics/dbt/labels/help%20wanted) / [discussion](https://github.com/fishtown-analytics/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/fishtown-analytics/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The dbt maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/fishtown-analytics/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The dbt maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/fishtown-analytics/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by dbt maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/fishtown-analytics/dbt/labels/wontfix) | This issue does not require a code change in the dbt repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
|
||||
#### Branching Strategy
|
||||
|
||||
dbt has three types of branches:
|
||||
|
||||
- **Trunks** are where active development of the next release takes place. There is one trunk named `develop` at the time of writing this, and will be the default branch of the repository.
|
||||
- **Release Branches** track a specific, not yet complete release of dbt. Each minor version release has a corresponding release branch. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of dbt.
|
||||
- **Feature Branches** track individual features and fixes. On completion they should be merged into the trunk brnach or a specific release branch.
|
||||
- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`).
|
||||
- **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones.
|
||||
- **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...)
|
||||
|
||||
## Getting the code
|
||||
|
||||
### Installing git
|
||||
|
||||
You will need `git` in order to download and modify the dbt source code. On macOS, the best way to download git is to just install [Xcode](https://developer.apple.com/support/xcode/).
|
||||
You will need `git` in order to download and modify the `dbt-core` source code. On macOS, the best way to download git is to just install [Xcode](https://developer.apple.com/support/xcode/).
|
||||
|
||||
### External contributors
|
||||
|
||||
If you are not a member of the `fishtown-analytics` GitHub organization, you can contribute to dbt by forking the dbt repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt-core` by forking the `dbt-core` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
|
||||
1. fork the dbt repository
|
||||
2. clone your fork locally
|
||||
3. check out a new branch for your proposed changes
|
||||
4. push changes to your fork
|
||||
5. open a pull request against `fishtown-analytics/dbt` from your forked repository
|
||||
1. Fork the `dbt-core` repository
|
||||
2. Clone your fork locally
|
||||
3. Check out a new branch for your proposed changes
|
||||
4. Push changes to your fork
|
||||
5. Open a pull request against `dbt-labs/dbt-core` from your forked repository
|
||||
|
||||
### Core contributors
|
||||
### dbt Labs contributors
|
||||
|
||||
If you are a member of the `fishtown-analytics` GitHub organization, you will have push access to the dbt repo. Rather than
|
||||
forking dbt to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
|
||||
## Setting up an environment
|
||||
|
||||
There are some tools that will be helpful to you in developing locally. While this is the list relevant for dbt development, many of these tools are used commonly across open-source python projects.
|
||||
There are some tools that will be helpful to you in developing locally. While this is the list relevant for `dbt-core` development, many of these tools are used commonly across open-source python projects.
|
||||
|
||||
### Tools
|
||||
|
||||
A short list of tools used in dbt testing that will be helpful to your understanding:
|
||||
These are the tools used in `dbt-core` development and testing:
|
||||
|
||||
- [virtualenv](https://virtualenv.pypa.io/en/stable/) to manage dependencies
|
||||
- [tox](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions
|
||||
- [pytest](https://docs.pytest.org/en/latest/) to discover/run tests
|
||||
- [make](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple
|
||||
- [flake8](https://gitlab.com/pycqa/flake8) for code linting
|
||||
- [CircleCI](https://circleci.com/product/) and [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/)
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`black`](https://github.com/psf/black) for code formatting
|
||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
||||
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
|
||||
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
||||
|
||||
A deep understanding of these tools in not required to effectively contribute to dbt, but we recommend checking out the attached documentation if you're interested in learning more about them.
|
||||
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
|
||||
|
||||
#### virtual environments
|
||||
#### Virtual environments
|
||||
|
||||
We strongly recommend using virtual environments when developing code in dbt. We recommend creating this virtualenv
|
||||
in the root of the dbt repository. To create a new virtualenv, run:
|
||||
```
|
||||
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
||||
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
||||
```sh
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
```
|
||||
|
||||
This will create and activate a new Python virtual environment.
|
||||
|
||||
#### docker and docker-compose
|
||||
#### Docker and `docker-compose`
|
||||
|
||||
Docker and docker-compose are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).
|
||||
Docker and `docker-compose` are both used in testing. Specific instructions for you OS can be found [here](https://docs.docker.com/get-docker/).
|
||||
|
||||
|
||||
#### postgres (optional)
|
||||
#### Postgres (optional)
|
||||
|
||||
For testing, and later in the examples in this document, you may want to have `psql` available so you can poke around in the database and see what happened. We recommend that you use [homebrew](https://brew.sh/) for that on macOS, and your package manager on Linux. You can install any version of the postgres client that you'd like. On macOS, with homebrew setup, you can run:
|
||||
|
||||
```
|
||||
```sh
|
||||
brew install postgresql
|
||||
```
|
||||
|
||||
## Running dbt in development
|
||||
## Running `dbt-core` in development
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in section _Setting up an environment_. Next, install dbt (and its dependencies) with:
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
|
||||
|
||||
```
|
||||
pip install -r requirements-editable.txt
|
||||
```sh
|
||||
make dev
|
||||
# or
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
```
|
||||
|
||||
When dbt is installed from source in this way, any changes you make to the dbt source code will be reflected immediately in your next `dbt` run.
|
||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||
|
||||
### Running dbt
|
||||
### Running `dbt-core`
|
||||
|
||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
||||
|
||||
## Testing
|
||||
|
||||
Getting the dbt integration tests set up in your local environment will be very helpful as you start to make changes to your local version of dbt. The section that follows outlines some helpful tips for setting up the test environment.
|
||||
Once you're able to manually test that your code change is working as expected, it's important to run existing automated tests, as well as adding some new ones. These tests will ensure that:
|
||||
- Your code changes do not unexpectedly break other established functionality
|
||||
- Your code changes can handle all known edge cases
|
||||
- The functionality you're adding will _keep_ working in the future
|
||||
|
||||
### Running tests via Docker
|
||||
Although `dbt-core` works with a number of different databases, you won't need to supply credentials for every one of these databases in your test environment. Instead, you can test most `dbt-core` code changes with Python and Postgres.
|
||||
|
||||
dbt's unit and integration tests run in Docker. Because dbt works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, Fishtown Analytics provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup.
|
||||
### Initial setup
|
||||
|
||||
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||
|
||||
### Specifying your test credentials
|
||||
|
||||
dbt uses test credentials specified in a `test.env` file in the root of the repository. This `test.env` file is git-ignored, but please be _extra_ careful to never check in credentials or other sensitive information when developing against dbt. To create your `test.env` file, copy the provided sample file, then supply your relevant credentials:
|
||||
|
||||
```
|
||||
cp test.env.sample test.env
|
||||
```
|
||||
|
||||
We recommend starting with dbt's Postgres tests. These tests cover most of the functionality in dbt, are the fastest to run, and are the easiest to set up. dbt's test suite runs Postgres in a Docker container, so no setup should be required to run these tests.
|
||||
|
||||
If you additionally want to test Snowflake, Bigquery, or Redshift, locally you'll need to get credentials and add them to the `test.env` file. In general, it's most important to have successful unit and Postgres tests. Once you open a PR, dbt will automatically run integration tests for the other three core database adapters. Of course, if you are a BigQuery user, contributing a BigQuery-only feature, it's important to run BigQuery tests as well.
|
||||
|
||||
### Test commands
|
||||
|
||||
dbt's unit tests and Python linter can be run with:
|
||||
|
||||
```
|
||||
make test-unit
|
||||
```
|
||||
|
||||
To run the Postgres + Python 3.6 integration tests, you'll have to do one extra step of setting up the test database:
|
||||
|
||||
```sh
|
||||
make setup-db
|
||||
```
|
||||
or, alternatively:
|
||||
```sh
|
||||
docker-compose up -d database
|
||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
||||
```
|
||||
|
||||
To run a quick test for Python3 integration tests on Postgres, you can run:
|
||||
### Test commands
|
||||
|
||||
There are a few methods for running tests locally.
|
||||
|
||||
#### Makefile
|
||||
|
||||
There are multiple targets in the Makefile to run common test suites and code
|
||||
checks, most notably:
|
||||
|
||||
```sh
|
||||
# Runs unit tests with py38 and code checks in parallel.
|
||||
make test
|
||||
# Runs postgres integration tests with py38 in "fail fast" mode.
|
||||
make integration
|
||||
```
|
||||
make test-quick
|
||||
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
||||
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
||||
|
||||
Check out the other targets in the Makefile to see other commonly used test
|
||||
suites.
|
||||
|
||||
#### `pre-commit`
|
||||
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
||||
|
||||
#### `tox`
|
||||
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
|
||||
#### `pytest`
|
||||
|
||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
python3 -m pytest test/unit/test_graph.py
|
||||
# run a specific unit test
|
||||
python3 -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
# run specific Postgres integration tests (old way)
|
||||
python3 -m pytest -m profile_postgres test/integration/074_postgres_unlogged_table_tests
|
||||
# run specific Postgres integration tests (new way)
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
|
||||
To run tests for a specific database, invoke `tox` directly with the required flags:
|
||||
```
|
||||
# Run Postgres py36 tests
|
||||
docker-compose run test tox -e integration-postgres-py36 -- -x
|
||||
> See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options.
|
||||
|
||||
# Run Snowflake py36 tests
|
||||
docker-compose run test tox -e integration-snowflake-py36 -- -x
|
||||
## Adding CHANGELOG Entry
|
||||
|
||||
# Run BigQuery py36 tests
|
||||
docker-compose run test tox -e integration-bigquery-py36 -- -x
|
||||
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
||||
|
||||
# Run Redshift py36 tests
|
||||
docker-compose run test tox -e integration-redshift-py36 -- -x
|
||||
```
|
||||
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
|
||||
|
||||
To run a specific test by itself:
|
||||
```
|
||||
docker-compose run test tox -e explicit-py36 -- -s -x -m profile_{adapter} {path_to_test_file_or_folder}
|
||||
```
|
||||
E.g.
|
||||
```
|
||||
docker-compose run test tox -e explicit-py36 -- -s -x -m profile_snowflake test/integration/001_simple_copy_test
|
||||
```
|
||||
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
|
||||
|
||||
See the `Makefile` contents for more some other examples of ways to run `tox`.
|
||||
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.
|
||||
|
||||
## Submitting a Pull Request
|
||||
|
||||
Fishtown Analytics provides a sandboxed Redshift, Snowflake, and BigQuery database for use in a CI environment. When pull requests are submitted to the `fishtown-analytics/dbt` repo, GitHub will trigger automated tests in CircleCI and Azure Pipelines.
|
||||
A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
|
||||
A dbt maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
||||
|
||||
Once all tests are passing and your PR has been approved, a dbt maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
FROM ubuntu:18.04
|
||||
##
|
||||
# This dockerfile is used for local development and adapter testing only.
|
||||
# See `/docker` for a generic and production-ready docker file
|
||||
##
|
||||
|
||||
FROM ubuntu:22.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
software-properties-common \
|
||||
&& add-apt-repository ppa:git-core/ppa -y \
|
||||
&& apt-get dist-upgrade -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
netcat \
|
||||
@@ -24,7 +32,7 @@ RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
python \
|
||||
python-dev \
|
||||
python-pip \
|
||||
python3-pip \
|
||||
python3.6 \
|
||||
python3.6-dev \
|
||||
python3-pip \
|
||||
@@ -38,6 +46,9 @@ RUN apt-get update \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-venv \
|
||||
python3.10 \
|
||||
python3.10-dev \
|
||||
python3.10-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
@@ -46,9 +57,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V
|
||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||
|
||||
RUN pip3 install -U "tox==3.14.4" wheel "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
# tox fails if the 'python' interpreter (python2) doesn't have `tox` installed
|
||||
RUN pip install -U "tox==3.14.4" "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
RUN pip3 install -U tox wheel six setuptools
|
||||
|
||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||
# On Linux, run `make .env` to create the .env file for the current user.
|
||||
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
Copyright 2021 dbt Labs, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
116
Makefile
116
Makefile
@@ -1,24 +1,69 @@
|
||||
.PHONY: install test test-unit test-integration
|
||||
.DEFAULT_GOAL:=help
|
||||
|
||||
test: .env
|
||||
@echo "Full test run starting..."
|
||||
@time docker-compose run --rm test tox
|
||||
# Optional flag to run target in a docker container.
|
||||
# (example `make test USE_DOCKER=true`)
|
||||
ifeq ($(USE_DOCKER),true)
|
||||
DOCKER_CMD := docker-compose run --rm test
|
||||
endif
|
||||
|
||||
test-unit: .env
|
||||
@echo "Unit test run starting..."
|
||||
@time docker-compose run --rm test tox -e unit-py36,flake8
|
||||
.PHONY: dev
|
||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
|
||||
test-integration: .env
|
||||
@echo "Integration test run starting..."
|
||||
@time docker-compose run --rm test tox -e integration-postgres-py36,integration-redshift-py36,integration-snowflake-py36,integration-bigquery-py36
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run --hook-stage manual mypy-check | grep -v "INFO"
|
||||
|
||||
test-quick: .env
|
||||
@echo "Integration test run starting, will exit on first failure..."
|
||||
@time docker-compose run --rm test tox -e integration-postgres-py36 -- -x
|
||||
.PHONY: flake8
|
||||
flake8: .env ## Runs flake8 against staged changes to enforce style guide.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run --hook-stage manual flake8-check | grep -v "INFO"
|
||||
|
||||
.PHONY: black
|
||||
black: .env ## Runs black against staged changes to enforce style guide.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run --hook-stage manual black-check -v | grep -v "INFO"
|
||||
|
||||
.PHONY: lint
|
||||
lint: .env ## Runs flake8 and mypy code checks against staged changes.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
|
||||
.PHONY: unit
|
||||
unit: .env ## Runs unit tests with py
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py
|
||||
|
||||
.PHONY: test
|
||||
test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py; \
|
||||
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
@\
|
||||
docker-compose up -d database && \
|
||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
||||
|
||||
# This rule creates a file named .env that is used by docker-compose for passing
|
||||
# the USER_ID and GROUP_ID arguments to the Docker image.
|
||||
.env:
|
||||
.env: ## Setup step for using using docker-compose with make target.
|
||||
@touch .env
|
||||
ifneq ($(OS),Windows_NT)
|
||||
ifneq ($(shell uname -s), Darwin)
|
||||
@@ -26,19 +71,32 @@ ifneq ($(shell uname -s), Darwin)
|
||||
@echo GROUP_ID=$(shell id -g) >> .env
|
||||
endif
|
||||
endif
|
||||
@time docker-compose build
|
||||
|
||||
clean:
|
||||
rm -f .coverage
|
||||
rm -rf .eggs/
|
||||
rm -f .env
|
||||
rm -rf .tox/
|
||||
rm -rf build/
|
||||
rm -rf dbt.egg-info/
|
||||
rm -f dbt_project.yml
|
||||
rm -rf dist/
|
||||
rm -f htmlcov/*.{css,html,js,json,png}
|
||||
rm -rf logs/
|
||||
rm -rf target/
|
||||
find . -type f -name '*.pyc' -delete
|
||||
find . -type d -name '__pycache__' -depth -delete
|
||||
.PHONY: clean
|
||||
clean: ## Resets development environment.
|
||||
@echo 'cleaning repo...'
|
||||
@rm -f .coverage
|
||||
@rm -rf .eggs/
|
||||
@rm -f .env
|
||||
@rm -rf .tox/
|
||||
@rm -rf build/
|
||||
@rm -rf dbt.egg-info/
|
||||
@rm -f dbt_project.yml
|
||||
@rm -rf dist/
|
||||
@rm -f htmlcov/*.{css,html,js,json,png}
|
||||
@rm -rf logs/
|
||||
@rm -rf target/
|
||||
@find . -type f -name '*.pyc' -delete
|
||||
@find . -type d -name '__pycache__' -depth -delete
|
||||
@echo 'done.'
|
||||
|
||||
|
||||
.PHONY: help
|
||||
help: ## Show this help message.
|
||||
@echo 'usage: make [target] [USE_DOCKER=true]'
|
||||
@echo
|
||||
@echo 'targets:'
|
||||
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
@echo
|
||||
@echo 'options:'
|
||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||
|
||||
45
README.md
45
README.md
@@ -1,28 +1,15 @@
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/fishtown-analytics/dbt/6c6649f9129d5d108aa3b0526f634cd8f3a9d1ed/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/>
|
||||
<img src="https://raw.githubusercontent.com/dbt-labs/dbt-core/fa1ea14ddfb1d5ae319d5141844910dd53ab2834/etc/dbt-core.svg" alt="dbt logo" width="750"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://codeclimate.com/github/fishtown-analytics/dbt">
|
||||
<img src="https://codeclimate.com/github/fishtown-analytics/dbt/badges/gpa.svg" alt="Code Climate"/>
|
||||
</a>
|
||||
<a href="https://circleci.com/gh/fishtown-analytics/dbt/tree/master">
|
||||
<img src="https://circleci.com/gh/fishtown-analytics/dbt/tree/master.svg?style=svg" alt="CircleCI" />
|
||||
</a>
|
||||
<a href="https://ci.appveyor.com/project/DrewBanin/dbt/branch/development">
|
||||
<img src="https://ci.appveyor.com/api/projects/status/v01rwd3q91jnwp9m/branch/development?svg=true" alt="AppVeyor" />
|
||||
</a>
|
||||
<a href="https://community.getdbt.com">
|
||||
<img src="https://community.getdbt.com/badge.svg" alt="Slack" />
|
||||
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
||||
<img src="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml/badge.svg?event=push" alt="CI Badge"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
**[dbt](https://www.getdbt.com/)** (data build tool) enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
|
||||
dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis.
|
||||
|
||||

|
||||
|
||||
dbt can be used to [aggregate pageviews into sessions](https://github.com/fishtown-analytics/snowplow), calculate [ad spend ROI](https://github.com/fishtown-analytics/facebook-ads), or report on [email campaign performance](https://github.com/fishtown-analytics/mailchimp).
|
||||

|
||||
|
||||
## Understanding dbt
|
||||
|
||||
@@ -30,28 +17,22 @@ Analysts using dbt can transform their data by simply writing select statements,
|
||||
|
||||
These select statements, or "models", form a dbt project. Models frequently build on top of one another – dbt makes it easy to [manage relationships](https://docs.getdbt.com/docs/ref) between models, and [visualize these relationships](https://docs.getdbt.com/docs/documentation), as well as assure the quality of your transformations through [testing](https://docs.getdbt.com/docs/testing).
|
||||
|
||||

|
||||

|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [documentation](https://docs.getdbt.com/).
|
||||
- Productionize your dbt project with [dbt Cloud](https://www.getdbt.com)
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Find out more
|
||||
## Join the dbt Community
|
||||
|
||||
- Check out the [Introduction to dbt](https://docs.getdbt.com/docs/introduction/).
|
||||
- Read the [dbt Viewpoint](https://docs.getdbt.com/docs/about/viewpoint/).
|
||||
|
||||
## Join thousands of analysts in the dbt community
|
||||
|
||||
- Join the [chat](http://community.getdbt.com/) on Slack.
|
||||
- Find community posts on [dbt Discourse](https://discourse.getdbt.com).
|
||||
- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/)
|
||||
- Read more on the [dbt Community Discourse](https://discourse.getdbt.com)
|
||||
|
||||
## Reporting bugs and contributing code
|
||||
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/fishtown-analytics/dbt/issues/new).
|
||||
- Want to help us build dbt? Check out the [Contributing Getting Started Guide](https://github.com/fishtown-analytics/dbt/blob/HEAD/CONTRIBUTING.md)
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new)
|
||||
- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
# Python package
|
||||
# Create and test a Python package on multiple Python versions.
|
||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev/*
|
||||
- pr/*
|
||||
|
||||
jobs:
|
||||
- job: UnitTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-unit
|
||||
displayName: Run unit tests
|
||||
|
||||
- job: PostgresIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: UnitTest
|
||||
|
||||
steps:
|
||||
- pwsh: |
|
||||
$serviceName = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $serviceName -StartupType Automatic
|
||||
Start-Service -InputObject $serviceName
|
||||
|
||||
& $env:PGBIN\createdb.exe -U postgres dbt
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD 'password';"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;"
|
||||
displayName: Install postgresql and set up database
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-postgres
|
||||
displayName: Run integration tests
|
||||
|
||||
# These three are all similar except secure environment variables, which MUST be passed along to their tasks,
|
||||
# but there's probably a better way to do this!
|
||||
- job: SnowflakeIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: PostgresIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-snowflake
|
||||
env:
|
||||
SNOWFLAKE_TEST_ACCOUNT: $(SNOWFLAKE_TEST_ACCOUNT)
|
||||
SNOWFLAKE_TEST_PASSWORD: $(SNOWFLAKE_TEST_PASSWORD)
|
||||
SNOWFLAKE_TEST_USER: $(SNOWFLAKE_TEST_USER)
|
||||
SNOWFLAKE_TEST_WAREHOUSE: $(SNOWFLAKE_TEST_WAREHOUSE)
|
||||
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: $(SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN)
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: $(SNOWFLAKE_TEST_OAUTH_CLIENT_ID)
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: $(SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: BigQueryIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: PostgresIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
- script: python -m tox -e pywin-bigquery
|
||||
env:
|
||||
BIGQUERY_SERVICE_ACCOUNT_JSON: $(BIGQUERY_SERVICE_ACCOUNT_JSON)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: RedshiftIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: PostgresIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-redshift
|
||||
env:
|
||||
REDSHIFT_TEST_DBNAME: $(REDSHIFT_TEST_DBNAME)
|
||||
REDSHIFT_TEST_PASS: $(REDSHIFT_TEST_PASS)
|
||||
REDSHIFT_TEST_USER: $(REDSHIFT_TEST_USER)
|
||||
REDSHIFT_TEST_PORT: $(REDSHIFT_TEST_PORT)
|
||||
REDSHIFT_TEST_HOST: $(REDSHIFT_TEST_HOST)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: BuildWheel
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn:
|
||||
- UnitTest
|
||||
- PostgresIntegrationTest
|
||||
- RedshiftIntegrationTest
|
||||
- SnowflakeIntegrationTest
|
||||
- BigQueryIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
- script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r requirements-dev.txt
|
||||
displayName: Install dependencies
|
||||
- task: ShellScript@2
|
||||
inputs:
|
||||
scriptPath: scripts/build-wheels.sh
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
contents: 'dist\?(*.whl|*.tar.gz)'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: dists
|
||||
75
converter.py
75
converter.py
@@ -1,75 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import json
|
||||
import yaml
|
||||
import sys
|
||||
import argparse
|
||||
from datetime import datetime, timezone
|
||||
import dbt.clients.registry as registry
|
||||
|
||||
|
||||
def yaml_type(fname):
|
||||
with open(fname) as f:
|
||||
return yaml.load(f)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--project", type=yaml_type, default="dbt_project.yml")
|
||||
parser.add_argument("--namespace", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_full_name(args):
|
||||
return "{}/{}".format(args.namespace, args.project["name"])
|
||||
|
||||
|
||||
def init_project_in_packages(args, packages):
|
||||
full_name = get_full_name(args)
|
||||
if full_name not in packages:
|
||||
packages[full_name] = {
|
||||
"name": args.project["name"],
|
||||
"namespace": args.namespace,
|
||||
"latest": args.project["version"],
|
||||
"assets": {},
|
||||
"versions": {},
|
||||
}
|
||||
return packages[full_name]
|
||||
|
||||
|
||||
def add_version_to_package(args, project_json):
|
||||
project_json["versions"][args.project["version"]] = {
|
||||
"id": "{}/{}".format(get_full_name(args), args.project["version"]),
|
||||
"name": args.project["name"],
|
||||
"version": args.project["version"],
|
||||
"description": "",
|
||||
"published_at": datetime.now(timezone.utc).astimezone().isoformat(),
|
||||
"packages": args.project.get("packages") or [],
|
||||
"works_with": [],
|
||||
"_source": {
|
||||
"type": "github",
|
||||
"url": "",
|
||||
"readme": "",
|
||||
},
|
||||
"downloads": {
|
||||
"tarball": "",
|
||||
"format": "tgz",
|
||||
"sha1": "",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
packages = registry.packages()
|
||||
project_json = init_project_in_packages(args, packages)
|
||||
if args.project["version"] in project_json["versions"]:
|
||||
raise Exception(
|
||||
"Version {} already in packages JSON".format(args.project["version"]),
|
||||
file=sys.stderr,
|
||||
)
|
||||
add_version_to_package(args, project_json)
|
||||
print(json.dumps(packages, indent=2))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1 +1 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
||||
|
||||
39
core/README.md
Normal file
39
core/README.md
Normal file
@@ -0,0 +1,39 @@
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/dbt-labs/dbt-core/fa1ea14ddfb1d5ae319d5141844910dd53ab2834/etc/dbt-core.svg" alt="dbt logo" width="750"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
||||
<img src="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml/badge.svg?event=push" alt="CI Badge"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
|
||||

|
||||
|
||||
## Understanding dbt
|
||||
|
||||
Analysts using dbt can transform their data by simply writing select statements, while dbt handles turning these statements into tables and views in a data warehouse.
|
||||
|
||||
These select statements, or "models", form a dbt project. Models frequently build on top of one another – dbt makes it easy to [manage relationships](https://docs.getdbt.com/docs/ref) between models, and [visualize these relationships](https://docs.getdbt.com/docs/documentation), as well as assure the quality of your transformations through [testing](https://docs.getdbt.com/docs/testing).
|
||||
|
||||

|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Join the dbt Community
|
||||
|
||||
- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/)
|
||||
- Read more on the [dbt Community Discourse](https://discourse.getdbt.com)
|
||||
|
||||
## Reporting bugs and contributing code
|
||||
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new)
|
||||
- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct).
|
||||
51
core/dbt/README.md
Normal file
51
core/dbt/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# core/dbt directory README
|
||||
|
||||
## The following are individual files in this directory.
|
||||
|
||||
### deprecations.py
|
||||
|
||||
### flags.py
|
||||
|
||||
### main.py
|
||||
|
||||
### tracking.py
|
||||
|
||||
### version.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### links.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### ui.py
|
||||
|
||||
### compilation.py
|
||||
|
||||
### dataclass_schema.py
|
||||
|
||||
### exceptions.py
|
||||
|
||||
### hooks.py
|
||||
|
||||
### logger.py
|
||||
|
||||
### profiler.py
|
||||
|
||||
### utils.py
|
||||
|
||||
|
||||
## The subdirectories will be documented in a README in the subdirectory
|
||||
* config
|
||||
* include
|
||||
* adapters
|
||||
* context
|
||||
* deps
|
||||
* graph
|
||||
* task
|
||||
* clients
|
||||
* events
|
||||
7
core/dbt/__init__.py
Normal file
7
core/dbt/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# N.B.
|
||||
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||
# The matching statement is in plugins/postgres/dbt/__init__.py
|
||||
|
||||
from pkgutil import extend_path
|
||||
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
30
core/dbt/adapters/README.md
Normal file
30
core/dbt/adapters/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Adapters README
|
||||
|
||||
The Adapters module is responsible for defining database connection methods, caching information from databases, how relations are defined, and the two major connection types we have - base and sql.
|
||||
|
||||
# Directories
|
||||
|
||||
## `base`
|
||||
|
||||
Defines the base implementation Adapters can use to build out full functionality.
|
||||
|
||||
## `sql`
|
||||
|
||||
Defines a sql implementation for adapters that initially inherits the above base implementation and comes with some premade methods and macros that can be overwritten as needed per adapter. (most common type of adapter.)
|
||||
|
||||
# Files
|
||||
|
||||
## `cache.py`
|
||||
|
||||
Cached information from the database.
|
||||
|
||||
## `factory.py`
|
||||
Defines how we generate adapter objects
|
||||
|
||||
## `protocol.py`
|
||||
|
||||
Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods.
|
||||
|
||||
## `reference_keys.py`
|
||||
|
||||
Configures naming scheme for cache elements to be universal.
|
||||
7
core/dbt/adapters/__init__.py
Normal file
7
core/dbt/adapters/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# N.B.
|
||||
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||
# The matching statement is in plugins/postgres/dbt/adapters/__init__.py
|
||||
|
||||
from pkgutil import extend_path
|
||||
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
@@ -39,21 +39,14 @@ class Column:
|
||||
@property
|
||||
def data_type(self) -> str:
|
||||
if self.is_string():
|
||||
return Column.string_type(self.string_size())
|
||||
return self.string_type(self.string_size())
|
||||
elif self.is_numeric():
|
||||
return Column.numeric_type(
|
||||
self.dtype, self.numeric_precision, self.numeric_scale
|
||||
)
|
||||
return self.numeric_type(self.dtype, self.numeric_precision, self.numeric_scale)
|
||||
else:
|
||||
return self.dtype
|
||||
|
||||
def is_string(self) -> bool:
|
||||
return self.dtype.lower() in [
|
||||
"text",
|
||||
"character varying",
|
||||
"character",
|
||||
"varchar",
|
||||
]
|
||||
return self.dtype.lower() in ["text", "character varying", "character", "varchar"]
|
||||
|
||||
def is_number(self):
|
||||
return any([self.is_integer(), self.is_numeric(), self.is_float()])
|
||||
|
||||
@@ -1,10 +1,24 @@
|
||||
import abc
|
||||
import os
|
||||
from time import sleep
|
||||
import sys
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from threading import get_ident
|
||||
from typing import Dict, Tuple, Hashable, Optional, ContextManager, List, Union
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Tuple,
|
||||
Hashable,
|
||||
Optional,
|
||||
ContextManager,
|
||||
List,
|
||||
Type,
|
||||
Union,
|
||||
Iterable,
|
||||
Callable,
|
||||
)
|
||||
|
||||
import agate
|
||||
|
||||
@@ -21,9 +35,23 @@ from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events import AdapterLogger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpen,
|
||||
ConnectionLeftOpen2,
|
||||
ConnectionClosed,
|
||||
ConnectionClosed2,
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt import flags
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
|
||||
|
||||
class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Methods to implement:
|
||||
@@ -60,9 +88,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionException(
|
||||
key, list(self.thread_connections)
|
||||
)
|
||||
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
@@ -141,20 +167,104 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if conn.name == conn_name and conn.state == "open":
|
||||
return conn
|
||||
|
||||
logger.debug('Acquiring new {} connection "{}".'.format(self.TYPE, conn_name))
|
||||
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
|
||||
|
||||
if conn.state == "open":
|
||||
logger.debug(
|
||||
"Re-using an available connection from the pool (formerly {}).".format(
|
||||
conn.name
|
||||
)
|
||||
)
|
||||
fire_event(ConnectionReused(conn_name=conn_name))
|
||||
else:
|
||||
conn.handle = LazyHandle(self.open)
|
||||
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
def retry_connection(
|
||||
cls,
|
||||
connection: Connection,
|
||||
connect: Callable[[], AdapterHandle],
|
||||
logger: AdapterLogger,
|
||||
retryable_exceptions: Iterable[Type[Exception]],
|
||||
retry_limit: int = 1,
|
||||
retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1,
|
||||
_attempts: int = 0,
|
||||
) -> Connection:
|
||||
"""Given a Connection, set its handle by calling connect.
|
||||
|
||||
The calls to connect will be retried up to retry_limit times to deal with transient
|
||||
connection errors. By default, one retry will be attempted if retryable_exceptions is set.
|
||||
|
||||
:param Connection connection: An instance of a Connection that needs a handle to be set,
|
||||
usually when attempting to open it.
|
||||
:param connect: A callable that returns the appropiate connection handle for a
|
||||
given adapter. This callable will be retried retry_limit times if a subclass of any
|
||||
Exception in retryable_exceptions is raised by connect.
|
||||
:type connect: Callable[[], AdapterHandle]
|
||||
:param AdapterLogger logger: A logger to emit messages on retry attempts or errors. When
|
||||
handling expected errors, we call debug, and call warning on unexpected errors or when
|
||||
all retry attempts have been exhausted.
|
||||
:param retryable_exceptions: An iterable of exception classes that if raised by
|
||||
connect should trigger a retry.
|
||||
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||
is exceeded before a successful call, a FailedToConnectException will be raised.
|
||||
Must be non-negative.
|
||||
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||
or float to be passed to time.sleep.
|
||||
:type retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectException(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
if retry_limit < 0 or retry_limit > sys.getrecursionlimit():
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
connection.state = ConnectionState.OPEN
|
||||
return connection
|
||||
|
||||
except tuple(retryable_exceptions) as e:
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
f"{retry_limit} attempts remaining. Retrying in {timeout} seconds.\n"
|
||||
f"Error:\n{e}"
|
||||
)
|
||||
|
||||
sleep(timeout)
|
||||
return cls.retry_connection(
|
||||
connection=connection,
|
||||
connect=connect,
|
||||
logger=logger,
|
||||
retry_limit=retry_limit - 1,
|
||||
retry_timeout=retry_timeout,
|
||||
retryable_exceptions=retryable_exceptions,
|
||||
_attempts=_attempts + 1,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
@@ -162,7 +272,8 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def open(cls, connection: Connection) -> Connection:
|
||||
"""Open the given connection on the adapter and return it.
|
||||
|
||||
@@ -172,9 +283,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`open` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -195,13 +304,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection.state not in {"closed", "init"}:
|
||||
logger.debug(
|
||||
"Connection '{}' was left open.".format(connection.name)
|
||||
)
|
||||
fire_event(ConnectionLeftOpen(conn_name=connection.name))
|
||||
else:
|
||||
logger.debug(
|
||||
"Connection '{}' was properly closed.".format(connection.name)
|
||||
)
|
||||
fire_event(ConnectionClosed(conn_name=connection.name))
|
||||
self.close(connection)
|
||||
|
||||
# garbage collect these connections
|
||||
@@ -227,52 +332,40 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
try:
|
||||
connection.handle.rollback()
|
||||
except Exception:
|
||||
logger.debug("Failed to rollback {}".format(connection.name), exc_info=True)
|
||||
fire_event(RollbackFailed(conn_name=connection.name))
|
||||
|
||||
@classmethod
|
||||
def _close_handle(cls, connection: Connection) -> None:
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, "close"):
|
||||
logger.debug(f"On {connection.name}: Close")
|
||||
fire_event(ConnectionClosed2(conn_name=connection.name))
|
||||
connection.handle.close()
|
||||
else:
|
||||
logger.debug(f"On {connection.name}: No close available on handle")
|
||||
fire_event(ConnectionLeftOpen2(conn_name=connection.name))
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"In _rollback, got {connection} - not a Connection!"
|
||||
)
|
||||
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
|
||||
logger.debug(f"On {connection.name}: ROLLBACK")
|
||||
fire_event(Rollback(conn_name=connection.name))
|
||||
cls._rollback_handle(connection)
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@classmethod
|
||||
def close(cls, connection: Connection) -> Connection:
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"In close, got {connection} - not a Connection!"
|
||||
)
|
||||
|
||||
# if the connection is in closed or init, there's nothing to do
|
||||
if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}:
|
||||
return connection
|
||||
|
||||
if connection.transaction_open and connection.handle:
|
||||
logger.debug("On {}: ROLLBACK".format(connection.name))
|
||||
fire_event(Rollback(conn_name=connection.name))
|
||||
cls._rollback_handle(connection)
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -295,15 +388,15 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
:param str sql: The sql to execute.
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
|
||||
@@ -30,9 +30,7 @@ from dbt.exceptions import (
|
||||
NotImplementedException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt import flags
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterConfig,
|
||||
ConnectionManagerProtocol,
|
||||
@@ -43,8 +41,8 @@ from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import CacheMiss, ListRelations
|
||||
from dbt.utils import filter_null_values, executor
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
@@ -56,7 +54,7 @@ from dbt.adapters.base.relation import (
|
||||
SchemaSearchMap,
|
||||
)
|
||||
from dbt.adapters.base import Column as BaseColumn
|
||||
from dbt.adapters.cache import RelationsCache
|
||||
from dbt.adapters.cache import RelationsCache, _make_key
|
||||
|
||||
|
||||
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
|
||||
@@ -132,9 +130,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
methods are marked with a (passable) in their docstrings. Check docstrings
|
||||
for type information, etc.
|
||||
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}". in the
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}" in the
|
||||
adapter's internal project.
|
||||
|
||||
To invoke a method in an adapter macro, call it on the 'adapter' Jinja
|
||||
object using dot syntax.
|
||||
|
||||
To invoke a method in model code, add the @available decorator atop a method
|
||||
declaration. Methods are invoked as macros.
|
||||
|
||||
Methods:
|
||||
- exception_handler
|
||||
- date_function
|
||||
@@ -155,6 +159,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
- convert_datetime_type
|
||||
- convert_date_type
|
||||
- convert_time_type
|
||||
- standardize_grants_dict
|
||||
|
||||
Macros:
|
||||
- get_catalog
|
||||
@@ -223,7 +228,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL. This is a thin wrapper around
|
||||
ConnectionManager.execute.
|
||||
|
||||
@@ -231,8 +236,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
|
||||
|
||||
@@ -272,16 +277,19 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
return self._macro_manifest_lazy
|
||||
|
||||
def load_macro_manifest(self) -> MacroManifest:
|
||||
def load_macro_manifest(self, base_macros_only=False) -> MacroManifest:
|
||||
# base_macros_only is for the test framework
|
||||
if self._macro_manifest_lazy is None:
|
||||
# avoid a circular import
|
||||
from dbt.parser.manifest import load_macro_manifest
|
||||
from dbt.parser.manifest import ManifestLoader
|
||||
|
||||
manifest = load_macro_manifest(
|
||||
self.config, self.connections.set_query_header
|
||||
manifest = ManifestLoader.load_macros(
|
||||
self.config, self.connections.set_query_header, base_macros_only=base_macros_only
|
||||
)
|
||||
self._macro_manifest_lazy = manifest
|
||||
return self._macro_manifest_lazy
|
||||
# TODO CT-211
|
||||
self._macro_manifest_lazy = manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
return self._macro_manifest_lazy # type: ignore[return-value]
|
||||
|
||||
def clear_macro_manifest(self):
|
||||
if self._macro_manifest_lazy is not None:
|
||||
@@ -293,13 +301,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
|
||||
"""Check if the schema is cached, and by default logs if it is not."""
|
||||
|
||||
if flags.USE_CACHE is False:
|
||||
return False
|
||||
elif (database, schema) not in self.cache:
|
||||
logger.debug(
|
||||
'On "{}": cache miss for schema "{}.{}", this is inefficient'.format(
|
||||
self.nice_connection_name(), database, schema
|
||||
)
|
||||
if (database, schema) not in self.cache:
|
||||
fire_event(
|
||||
CacheMiss(conn_name=self.nice_connection_name(), database=database, schema=schema)
|
||||
)
|
||||
return False
|
||||
else:
|
||||
@@ -313,10 +317,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return {
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
for node in manifest.nodes.values()
|
||||
if (
|
||||
node.resource_type in NodeType.executable()
|
||||
and not node.is_ephemeral_model
|
||||
)
|
||||
if (node.is_relational and not node.is_ephemeral_model)
|
||||
}
|
||||
|
||||
def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:
|
||||
@@ -330,7 +331,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[CompileResultNode] = chain(
|
||||
manifest.nodes.values(),
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model)
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
@@ -342,14 +347,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _relations_cache_for_schemas(self, manifest: Manifest) -> None:
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Set[BaseRelation] = None
|
||||
) -> None:
|
||||
"""Populate the relations cache for the given schemas. Returns an
|
||||
iterable of the schemas populated, as strings.
|
||||
"""
|
||||
if not flags.USE_CACHE:
|
||||
return
|
||||
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
if not cache_schemas:
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[List[BaseRelation]]] = []
|
||||
for cache_schema in cache_schemas:
|
||||
@@ -375,28 +380,24 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(self, manifest: Manifest, clear: bool = False) -> None:
|
||||
def set_relations_cache(
|
||||
self, manifest: Manifest, clear: bool = False, required_schemas: Set[BaseRelation] = None
|
||||
) -> None:
|
||||
"""Run a query that gets a populated cache of the relations in the
|
||||
database and set the cache on this adapter.
|
||||
"""
|
||||
if not flags.USE_CACHE:
|
||||
return
|
||||
|
||||
with self.cache.lock:
|
||||
if clear:
|
||||
self.cache.clear()
|
||||
self._relations_cache_for_schemas(manifest)
|
||||
self._relations_cache_for_schemas(manifest, required_schemas)
|
||||
|
||||
@available
|
||||
def cache_added(self, relation: Optional[BaseRelation]) -> str:
|
||||
"""Cache a new relation in dbt. It will show up in `list relations`."""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise_compiler_error(
|
||||
"Attempted to cache a null relation for {}".format(name)
|
||||
)
|
||||
if flags.USE_CACHE:
|
||||
self.cache.add(relation)
|
||||
raise_compiler_error("Attempted to cache a null relation for {}".format(name))
|
||||
self.cache.add(relation)
|
||||
# so jinja doesn't render things
|
||||
return ""
|
||||
|
||||
@@ -407,11 +408,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise_compiler_error(
|
||||
"Attempted to drop a null relation for {}".format(name)
|
||||
)
|
||||
if flags.USE_CACHE:
|
||||
self.cache.drop(relation)
|
||||
raise_compiler_error("Attempted to drop a null relation for {}".format(name))
|
||||
self.cache.drop(relation)
|
||||
return ""
|
||||
|
||||
@available
|
||||
@@ -431,25 +429,22 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
|
||||
)
|
||||
|
||||
if flags.USE_CACHE:
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
return ""
|
||||
|
||||
###
|
||||
# Abstract methods for database-specific values, attributes, and types
|
||||
###
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def date_function(cls) -> str:
|
||||
"""Get the date function used by this adapter's database."""
|
||||
raise NotImplementedException(
|
||||
"`date_function` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`date_function` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
raise NotImplementedException(
|
||||
"`is_cancelable` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
|
||||
|
||||
###
|
||||
# Abstract methods about schemas
|
||||
@@ -457,9 +452,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
"""Get a list of existing schemas in database"""
|
||||
raise NotImplementedException(
|
||||
"`list_schemas` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
|
||||
|
||||
@available.parse(lambda *a, **k: False)
|
||||
def check_schema_exists(self, database: str, schema: str) -> bool:
|
||||
@@ -482,30 +475,22 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
*Implementors must call self.cache.drop() to preserve cache state!*
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`drop_relation` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def truncate_relation(self, relation: BaseRelation) -> None:
|
||||
"""Truncate the given relation."""
|
||||
raise NotImplementedException(
|
||||
"`truncate_relation` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def rename_relation(
|
||||
self, from_relation: BaseRelation, to_relation: BaseRelation
|
||||
) -> None:
|
||||
def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
|
||||
"""Rename the relation from from_relation to to_relation.
|
||||
|
||||
Implementors must call self.cache.rename() to preserve cache state.
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`rename_relation` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_list
|
||||
@@ -540,9 +525,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def list_relations_without_caching(
|
||||
self, schema_relation: BaseRelation
|
||||
) -> List[BaseRelation]:
|
||||
def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]:
|
||||
"""List relations in the given schema, bypassing the cache.
|
||||
|
||||
This is used as the underlying behavior to fill the cache.
|
||||
@@ -556,6 +539,33 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"`list_relations_without_caching` is not implemented for this " "adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods about grants
|
||||
###
|
||||
@available
|
||||
def standardize_grants_dict(self, grants_table: agate.Table) -> dict:
|
||||
"""Translate the result of `show grants` (or equivalent) to match the
|
||||
grants which a user would configure in their project.
|
||||
|
||||
Ideally, the SQL to show grants should also be filtering:
|
||||
filter OUT any grants TO the current user/role (e.g. OWNERSHIP).
|
||||
If that's not possible in SQL, it can be done in this method instead.
|
||||
|
||||
:param grants_table: An agate table containing the query result of
|
||||
the SQL returned by get_show_grant_sql
|
||||
:return: A standardized dictionary matching the `grants` config
|
||||
:rtype: dict
|
||||
"""
|
||||
grants_dict: Dict[str, List[str]] = {}
|
||||
for row in grants_table:
|
||||
grantee = row["grantee"]
|
||||
privilege = row["privilege_type"]
|
||||
if privilege in grants_dict.keys():
|
||||
grants_dict[privilege].append(grantee)
|
||||
else:
|
||||
grants_dict.update({privilege: [grantee]})
|
||||
return grants_dict
|
||||
|
||||
###
|
||||
# Provided methods about relations
|
||||
###
|
||||
@@ -582,21 +592,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
expected_type=self.Relation,
|
||||
)
|
||||
|
||||
from_columns = {
|
||||
col.name: col for col in self.get_columns_in_relation(from_relation)
|
||||
}
|
||||
from_columns = {col.name: col for col in self.get_columns_in_relation(from_relation)}
|
||||
|
||||
to_columns = {
|
||||
col.name: col for col in self.get_columns_in_relation(to_relation)
|
||||
}
|
||||
to_columns = {col.name: col for col in self.get_columns_in_relation(to_relation)}
|
||||
|
||||
missing_columns = set(from_columns.keys()) - set(to_columns.keys())
|
||||
|
||||
return [
|
||||
col
|
||||
for (col_name, col) in from_columns.items()
|
||||
if col_name in missing_columns
|
||||
]
|
||||
return [col for (col_name, col) in from_columns.items() if col_name in missing_columns]
|
||||
|
||||
@available.parse_none
|
||||
def valid_snapshot_target(self, relation: BaseRelation) -> None:
|
||||
@@ -663,33 +665,26 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
self.expand_column_types(from_relation, to_relation)
|
||||
|
||||
def list_relations(
|
||||
self, database: Optional[str], schema: str
|
||||
) -> List[BaseRelation]:
|
||||
def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelation]:
|
||||
if self._schema_is_cached(database, schema):
|
||||
return self.cache.get_relations(database, schema)
|
||||
|
||||
schema_relation = self.Relation.create(
|
||||
database=database,
|
||||
schema=schema,
|
||||
identifier="",
|
||||
quote_policy=self.config.quoting,
|
||||
database=database, schema=schema, identifier="", quote_policy=self.config.quoting
|
||||
).without_identifier()
|
||||
|
||||
# we can't build the relations cache because we don't have a
|
||||
# manifest so we can't run any operations.
|
||||
relations = self.list_relations_without_caching(schema_relation)
|
||||
|
||||
logger.debug(
|
||||
"with database={}, schema={}, relations={}".format(
|
||||
database, schema, relations
|
||||
fire_event(
|
||||
ListRelations(
|
||||
database=database, schema=schema, relations=[_make_key(x) for x in relations]
|
||||
)
|
||||
)
|
||||
|
||||
return relations
|
||||
|
||||
def _make_match_kwargs(
|
||||
self, database: str, schema: str, identifier: str
|
||||
) -> Dict[str, str]:
|
||||
def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dict[str, str]:
|
||||
quoting = self.config.quoting
|
||||
if identifier is not None and quoting["identifier"] is False:
|
||||
identifier = identifier.lower()
|
||||
@@ -727,9 +722,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return matches
|
||||
|
||||
@available.parse_none
|
||||
def get_relation(
|
||||
self, database: str, schema: str, identifier: str
|
||||
) -> Optional[BaseRelation]:
|
||||
def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]:
|
||||
relations_list = self.list_relations(database, schema)
|
||||
|
||||
matches = self._make_match(relations_list, database, schema, identifier)
|
||||
@@ -762,20 +755,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse_none
|
||||
def create_schema(self, relation: BaseRelation):
|
||||
"""Create the given schema if it does not exist."""
|
||||
raise NotImplementedException(
|
||||
"`create_schema` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`create_schema` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def drop_schema(self, relation: BaseRelation):
|
||||
"""Drop the given schema (and everything in it) if it exists."""
|
||||
raise NotImplementedException(
|
||||
"`drop_schema` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def quote(cls, identifier: str) -> str:
|
||||
"""Quote the given identifier, as appropriate for the database."""
|
||||
raise NotImplementedException("`quote` is not implemented for this adapter!")
|
||||
@@ -801,12 +791,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available
|
||||
def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str:
|
||||
# this is the default for now
|
||||
quote_columns: bool = False
|
||||
quote_columns: bool = True
|
||||
if isinstance(quote_config, bool):
|
||||
quote_columns = quote_config
|
||||
elif quote_config is None:
|
||||
deprecations.warn("column-quoting-unset")
|
||||
pass
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f'The seed configuration value of "quote_columns" has an '
|
||||
@@ -822,7 +811,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# Conversions: These must be implemented by concrete implementations, for
|
||||
# converting agate types into their sql equivalents.
|
||||
###
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Text
|
||||
type for the given agate table and column index.
|
||||
@@ -831,11 +821,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_text_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
@@ -844,11 +833,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_number_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Boolean
|
||||
type for the given agate table and column index.
|
||||
@@ -861,7 +849,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"`convert_boolean_type` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.DateTime
|
||||
type for the given agate table and column index.
|
||||
@@ -874,7 +863,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"`convert_datetime_type` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Date
|
||||
type for the given agate table and column index.
|
||||
@@ -883,11 +873,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_date_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the
|
||||
agate.TimeDelta type for the given agate table and column index.
|
||||
@@ -896,9 +885,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_time_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@@ -906,9 +893,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return cls.convert_agate_type(agate_table, col_idx)
|
||||
|
||||
@classmethod
|
||||
def convert_agate_type(
|
||||
cls, agate_table: agate.Table, col_idx: int
|
||||
) -> Optional[str]:
|
||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||
agate_type: Type = agate_table.column_types[col_idx]
|
||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||
(agate.Text, cls.convert_text_type),
|
||||
@@ -934,7 +919,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
project: Optional[str] = None,
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Dict[str, Any] = None,
|
||||
release: bool = False,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> agate.Table:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
@@ -948,19 +932,18 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
execution context.
|
||||
:param kwargs: An optional dict of keyword args used to pass to the
|
||||
macro.
|
||||
:param release: Ignored.
|
||||
"""
|
||||
if release is not False:
|
||||
deprecations.warn("execute-macro-release")
|
||||
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
if context_override is None:
|
||||
context_override = {}
|
||||
|
||||
if manifest is None:
|
||||
manifest = self._macro_manifest
|
||||
|
||||
macro = manifest.find_macro_by_name(
|
||||
# TODO CT-211
|
||||
manifest = self._macro_manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
macro = manifest.find_macro_by_name( # type: ignore[union-attr]
|
||||
macro_name, self.config.project_name, project
|
||||
)
|
||||
if macro is None:
|
||||
@@ -974,12 +957,16 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
macro_name, package_name
|
||||
)
|
||||
)
|
||||
# This causes a reference cycle, as generate_runtime_macro()
|
||||
# This causes a reference cycle, as generate_runtime_macro_context()
|
||||
# ends up calling get_adapter, so the import has to be here.
|
||||
from dbt.context.providers import generate_runtime_macro
|
||||
from dbt.context.providers import generate_runtime_macro_context
|
||||
|
||||
macro_context = generate_runtime_macro(
|
||||
macro=macro, config=self.config, manifest=manifest, package_name=project
|
||||
macro_context = generate_runtime_macro_context(
|
||||
# TODO CT-211
|
||||
macro=macro,
|
||||
config=self.config,
|
||||
manifest=manifest, # type: ignore[arg-type]
|
||||
package_name=project,
|
||||
)
|
||||
macro_context.update(context_override)
|
||||
|
||||
@@ -990,9 +977,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def _catalog_filter_table(
|
||||
cls, table: agate.Table, manifest: Manifest
|
||||
) -> agate.Table:
|
||||
def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate.Table:
|
||||
"""Filter the table as appropriate for catalog entries. Subclasses can
|
||||
override this to change filtering rules on a per-adapter basis.
|
||||
"""
|
||||
@@ -1061,9 +1046,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
}
|
||||
|
||||
# run the macro
|
||||
table = self.execute_macro(
|
||||
FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest
|
||||
)
|
||||
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||
# the current time according to the db.
|
||||
if len(table) != 1 or len(table[0]) != 2:
|
||||
@@ -1130,9 +1113,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
clause += f" where {where_clause}"
|
||||
return clause
|
||||
|
||||
def timestamp_add_sql(
|
||||
self, add_to: str, number: int = 1, interval: str = "hour"
|
||||
) -> str:
|
||||
def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str:
|
||||
# for backwards compatibility, we're compelled to set some sort of
|
||||
# default. A lot of searching has lead me to believe that the
|
||||
# '+ interval' syntax used in postgres/redshift is relatively common
|
||||
|
||||
@@ -99,9 +99,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
# but having it doesn't hurt on the python side (and omitting it could
|
||||
# hurt for obscure metaclass reasons, for all I know)
|
||||
cls = abc.ABCMeta.__new__( # type: ignore
|
||||
mcls, name, bases, namespace, **kwargs
|
||||
)
|
||||
cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) # type: ignore
|
||||
|
||||
# this is very much inspired by ABCMeta's own implementation
|
||||
|
||||
|
||||
@@ -48,9 +48,7 @@ class _QueryComment(local):
|
||||
if isinstance(comment, str) and "*/" in comment:
|
||||
# tell the user "no" so they don't hurt themselves by writing
|
||||
# garbage
|
||||
raise RuntimeException(
|
||||
f'query comment contains illegal value "*/": {comment}'
|
||||
)
|
||||
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
|
||||
self.query_comment = comment
|
||||
self.append = append
|
||||
|
||||
|
||||
@@ -92,14 +92,13 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
for k, v in search.items():
|
||||
if not self._is_exactish_match(k, v):
|
||||
exact_match = False
|
||||
|
||||
if self.path.get_lowered_part(k) != v.lower():
|
||||
approximate_match = False
|
||||
if str(self.path.get_lowered_part(k)).strip(self.quote_character) != v.lower().strip(
|
||||
self.quote_character
|
||||
):
|
||||
approximate_match = False # type: ignore[union-attr]
|
||||
|
||||
if approximate_match and not exact_match:
|
||||
target = self.create(
|
||||
database=database, schema=schema, identifier=identifier
|
||||
)
|
||||
target = self.create(database=database, schema=schema, identifier=identifier)
|
||||
dbt.exceptions.approximate_relation_match(target, self)
|
||||
|
||||
return exact_match
|
||||
@@ -164,9 +163,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
"""
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(
|
||||
self,
|
||||
) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
@@ -187,9 +184,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(
|
||||
cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any
|
||||
) -> Self:
|
||||
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
@@ -254,9 +249,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, ParsedSourceDefinition):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedSourceDefinition but got {}".format(
|
||||
type(node)
|
||||
)
|
||||
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
@@ -365,9 +358,7 @@ class InformationSchema(BaseRelation):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_path(
|
||||
cls, relation: BaseRelation, information_schema_view: Optional[str]
|
||||
) -> Path:
|
||||
def get_path(cls, relation: BaseRelation, information_schema_view: Optional[str]) -> Path:
|
||||
return Path(
|
||||
database=relation.database,
|
||||
schema=relation.schema,
|
||||
@@ -439,13 +430,14 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
|
||||
def flatten(self):
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have duplicates
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
if not allow_multiple_databases:
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {"database": information_schema_name.database, "schema": schema}
|
||||
|
||||
@@ -1,25 +1,27 @@
|
||||
from collections import namedtuple
|
||||
from copy import deepcopy
|
||||
from typing import List, Iterable, Optional, Dict, Set, Tuple, Any
|
||||
import threading
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from dbt.logger import CACHE_LOGGER as logger
|
||||
from dbt.utils import lowercase
|
||||
from dbt.adapters.reference_keys import _make_key, _ReferenceKey
|
||||
import dbt.exceptions
|
||||
|
||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||
|
||||
|
||||
def _make_key(relation) -> _ReferenceKey:
|
||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||
to keep track of quoting
|
||||
"""
|
||||
# databases and schemas can both be None
|
||||
return _ReferenceKey(
|
||||
lowercase(relation.database),
|
||||
lowercase(relation.schema),
|
||||
lowercase(relation.identifier),
|
||||
)
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
AddLink,
|
||||
AddRelation,
|
||||
DropCascade,
|
||||
DropMissingRelation,
|
||||
DropRelation,
|
||||
DumpAfterAddGraph,
|
||||
DumpAfterRenameSchema,
|
||||
DumpBeforeAddGraph,
|
||||
DumpBeforeRenameSchema,
|
||||
RenameSchema,
|
||||
TemporaryRelation,
|
||||
UncachedRelation,
|
||||
UpdateReference,
|
||||
)
|
||||
from dbt.utils import lowercase
|
||||
from dbt.helper_types import Lazy
|
||||
|
||||
|
||||
def dot_separated(key: _ReferenceKey) -> str:
|
||||
@@ -45,9 +47,9 @@ class _CachedRelation:
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
"_CachedRelation(database={}, schema={}, identifier={}, inner={})"
|
||||
).format(self.database, self.schema, self.identifier, self.inner)
|
||||
return ("_CachedRelation(database={}, schema={}, identifier={}, inner={})").format(
|
||||
self.database, self.schema, self.identifier, self.inner
|
||||
)
|
||||
|
||||
@property
|
||||
def database(self) -> Optional[str]:
|
||||
@@ -161,12 +163,6 @@ class _CachedRelation:
|
||||
return [dot_separated(r) for r in self.referenced_by]
|
||||
|
||||
|
||||
def lazy_log(msg, func):
|
||||
if logger.disabled:
|
||||
return
|
||||
logger.debug(msg.format(func()))
|
||||
|
||||
|
||||
class RelationsCache:
|
||||
"""A cache of the relations known to dbt. Keeps track of relationships
|
||||
declared between tables and handles renames/drops as a real database would.
|
||||
@@ -241,10 +237,7 @@ class RelationsCache:
|
||||
# self.relations or any cache entry's referenced_by during iteration
|
||||
# it's a runtime error!
|
||||
with self.lock:
|
||||
return {
|
||||
dot_separated(k): v.dump_graph_entry()
|
||||
for k, v in self.relations.items()
|
||||
}
|
||||
return {dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items()}
|
||||
|
||||
def _setdefault(self, relation: _CachedRelation):
|
||||
"""Add a relation to the cache, or return it if it already exists.
|
||||
@@ -272,9 +265,7 @@ class RelationsCache:
|
||||
return
|
||||
if referenced is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, referenced link key {} not in cache!".format(
|
||||
referenced_key
|
||||
)
|
||||
"in add_link, referenced link key {} not in cache!".format(referenced_key)
|
||||
)
|
||||
|
||||
dependent = self.relations.get(dependent_key)
|
||||
@@ -287,6 +278,7 @@ class RelationsCache:
|
||||
|
||||
referenced.add_reference(dependent)
|
||||
|
||||
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
|
||||
def add_link(self, referenced, dependent):
|
||||
"""Add a link between two relations to the database. If either relation
|
||||
does not exist, it will be added as an "external" relation.
|
||||
@@ -302,28 +294,22 @@ class RelationsCache:
|
||||
:raises InternalError: If either entry does not exist.
|
||||
"""
|
||||
ref_key = _make_key(referenced)
|
||||
dep_key = _make_key(dependent)
|
||||
if (ref_key.database, ref_key.schema) not in self:
|
||||
# if we have not cached the referenced schema at all, we must be
|
||||
# referring to a table outside our control. There's no need to make
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
logger.debug(
|
||||
"{dep!s} references {ref!s} but {ref.database}.{ref.schema} "
|
||||
"is not in the cache, skipping assumed external relation".format(
|
||||
dep=dependent, ref=ref_key
|
||||
)
|
||||
)
|
||||
fire_event(UncachedRelation(dep_key=dep_key, ref_key=ref_key))
|
||||
return
|
||||
if ref_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
referenced = referenced.replace(type=referenced.External)
|
||||
self.add(referenced)
|
||||
|
||||
dep_key = _make_key(dependent)
|
||||
if dep_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
dependent = dependent.replace(type=referenced.External)
|
||||
self.add(dependent)
|
||||
logger.debug("adding link, {!s} references {!s}".format(dep_key, ref_key))
|
||||
fire_event(AddLink(dep_key=dep_key, ref_key=ref_key))
|
||||
with self.lock:
|
||||
self._add_link(ref_key, dep_key)
|
||||
|
||||
@@ -334,14 +320,12 @@ class RelationsCache:
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
cached = _CachedRelation(relation)
|
||||
logger.debug("Adding relation: {!s}".format(cached))
|
||||
|
||||
lazy_log("before adding: {!s}", self.dump_graph)
|
||||
fire_event(AddRelation(relation=_make_key(cached)))
|
||||
fire_event(DumpBeforeAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
|
||||
lazy_log("after adding: {!s}", self.dump_graph)
|
||||
fire_event(DumpAfterAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
def _remove_refs(self, keys):
|
||||
"""Removes all references to all entries in keys. This does not
|
||||
@@ -356,17 +340,17 @@ class RelationsCache:
|
||||
for cached in self.relations.values():
|
||||
cached.release_references(keys)
|
||||
|
||||
def _drop_cascade_relation(self, dropped):
|
||||
def _drop_cascade_relation(self, dropped_key):
|
||||
"""Drop the given relation and cascade it appropriately to all
|
||||
dependent relations.
|
||||
|
||||
:param _CachedRelation dropped: An existing _CachedRelation to drop.
|
||||
"""
|
||||
if dropped not in self.relations:
|
||||
logger.debug("dropped a nonexistent relationship: {!s}".format(dropped))
|
||||
if dropped_key not in self.relations:
|
||||
fire_event(DropMissingRelation(relation=dropped_key))
|
||||
return
|
||||
consequences = self.relations[dropped].collect_consequences()
|
||||
logger.debug("drop {} is cascading to {}".format(dropped, consequences))
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
fire_event(DropCascade(dropped=dropped_key, consequences=consequences))
|
||||
self._remove_refs(consequences)
|
||||
|
||||
def drop(self, relation):
|
||||
@@ -380,10 +364,10 @@ class RelationsCache:
|
||||
:param str schema: The schema of the relation to drop.
|
||||
:param str identifier: The identifier of the relation to drop.
|
||||
"""
|
||||
dropped = _make_key(relation)
|
||||
logger.debug("Dropping relation: {!s}".format(dropped))
|
||||
dropped_key = _make_key(relation)
|
||||
fire_event(DropRelation(dropped=dropped_key))
|
||||
with self.lock:
|
||||
self._drop_cascade_relation(dropped)
|
||||
self._drop_cascade_relation(dropped_key)
|
||||
|
||||
def _rename_relation(self, old_key, new_relation):
|
||||
"""Rename a relation named old_key to new_key, updating references.
|
||||
@@ -404,10 +388,8 @@ class RelationsCache:
|
||||
# update all the relations that refer to it
|
||||
for cached in self.relations.values():
|
||||
if cached.is_referenced_by(old_key):
|
||||
logger.debug(
|
||||
"updated reference from {0} -> {2} to {1} -> {2}".format(
|
||||
old_key, new_key, cached.key()
|
||||
)
|
||||
fire_event(
|
||||
UpdateReference(old_key=old_key, new_key=new_key, cached_key=cached.key())
|
||||
)
|
||||
cached.rename_key(old_key, new_key)
|
||||
|
||||
@@ -438,11 +420,7 @@ class RelationsCache:
|
||||
)
|
||||
|
||||
if old_key not in self.relations:
|
||||
logger.debug(
|
||||
"old key {} not found in self.relations, assuming temporary".format(
|
||||
old_key
|
||||
)
|
||||
)
|
||||
fire_event(TemporaryRelation(key=old_key))
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -460,9 +438,9 @@ class RelationsCache:
|
||||
"""
|
||||
old_key = _make_key(old)
|
||||
new_key = _make_key(new)
|
||||
logger.debug("Renaming relation {!s} to {!s}".format(old_key, new_key))
|
||||
fire_event(RenameSchema(old_key=old_key, new_key=new_key))
|
||||
|
||||
lazy_log("before rename: {!s}", self.dump_graph)
|
||||
fire_event(DumpBeforeRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
with self.lock:
|
||||
if self._check_rename_constraints(old_key, new_key):
|
||||
@@ -470,11 +448,9 @@ class RelationsCache:
|
||||
else:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
lazy_log("after rename: {!s}", self.dump_graph)
|
||||
fire_event(DumpAfterRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
def get_relations(
|
||||
self, database: Optional[str], schema: Optional[str]
|
||||
) -> List[Any]:
|
||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||
"""Case-insensitively yield all relations matching the given schema.
|
||||
|
||||
:param str schema: The case-insensitive schema name to list from.
|
||||
|
||||
@@ -8,10 +8,9 @@ from dbt.include.global_project import (
|
||||
PACKAGE_PATH as GLOBAL_PROJECT_PATH,
|
||||
PROJECT_NAME as GLOBAL_PROJECT_NAME,
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError
|
||||
from dbt.contracts.connection import Credentials, AdapterRequiredConfig
|
||||
|
||||
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterProtocol,
|
||||
AdapterConfig,
|
||||
@@ -65,11 +64,12 @@ class AdapterContainer:
|
||||
# if we failed to import the target module in particular, inform
|
||||
# the user about it via a runtime error
|
||||
if exc.name == "dbt.adapters." + name:
|
||||
fire_event(AdapterImportError(exc=exc))
|
||||
raise RuntimeException(f"Could not find adapter type {name}!")
|
||||
logger.info(f"Error importing adapter: {exc}")
|
||||
# otherwise, the error had to have come from some underlying
|
||||
# library. Log the stack trace.
|
||||
logger.debug("", exc_info=True)
|
||||
|
||||
fire_event(PluginLoadError())
|
||||
raise
|
||||
plugin: AdapterPlugin = mod.Plugin
|
||||
plugin_type = plugin.adapter.type()
|
||||
@@ -140,17 +140,13 @@ class AdapterContainer:
|
||||
raise InternalException(f"No plugin found for {plugin_name}") from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
if plugin.dependencies is None:
|
||||
continue
|
||||
for dep in plugin.dependencies:
|
||||
if dep not in seen:
|
||||
plugin_names.append(dep)
|
||||
return plugins
|
||||
|
||||
def get_adapter_package_names(self, name: Optional[str]) -> List[str]:
|
||||
package_names: List[str] = [
|
||||
p.project_name for p in self.get_adapter_plugins(name)
|
||||
]
|
||||
package_names: List[str] = [p.project_name for p in self.get_adapter_plugins(name)]
|
||||
package_names.append(GLOBAL_PROJECT_NAME)
|
||||
return package_names
|
||||
|
||||
@@ -160,9 +156,7 @@ class AdapterContainer:
|
||||
try:
|
||||
path = self.packages[package_name]
|
||||
except KeyError:
|
||||
raise InternalException(
|
||||
f"No internal package listing found for {package_name}"
|
||||
)
|
||||
raise InternalException(f"No internal package listing found for {package_name}")
|
||||
paths.append(path)
|
||||
return paths
|
||||
|
||||
@@ -181,6 +175,10 @@ def get_adapter(config: AdapterRequiredConfig):
|
||||
return FACTORY.lookup_adapter(config.credentials.type)
|
||||
|
||||
|
||||
def get_adapter_by_type(adapter_type):
|
||||
return FACTORY.lookup_adapter(adapter_type)
|
||||
|
||||
|
||||
def reset_adapters():
|
||||
"""Clear the adapters. This is useful for tests, which change configs."""
|
||||
FACTORY.reset_adapters()
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import (
|
||||
List,
|
||||
Generic,
|
||||
TypeVar,
|
||||
ClassVar,
|
||||
Tuple,
|
||||
Union,
|
||||
Dict,
|
||||
@@ -18,11 +17,7 @@ from typing_extensions import Protocol
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledNode,
|
||||
ManifestNode,
|
||||
NonSourceCompiledNode,
|
||||
)
|
||||
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
@@ -81,7 +76,8 @@ Column_T = TypeVar("Column_T", bound=ColumnProtocol)
|
||||
Compiler_T = TypeVar("Compiler_T", bound=CompilerProtocol)
|
||||
|
||||
|
||||
class AdapterProtocol(
|
||||
# TODO CT-211
|
||||
class AdapterProtocol( # type: ignore[misc]
|
||||
Protocol,
|
||||
Generic[
|
||||
AdapterConfig_T,
|
||||
@@ -91,10 +87,13 @@ class AdapterProtocol(
|
||||
Compiler_T,
|
||||
],
|
||||
):
|
||||
AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]]
|
||||
Column: ClassVar[Type[Column_T]]
|
||||
Relation: ClassVar[Type[Relation_T]]
|
||||
ConnectionManager: ClassVar[Type[ConnectionManager_T]]
|
||||
# N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a
|
||||
# ClassVar due to the restirctiveness of PEP-526
|
||||
# See: https://github.com/python/mypy/issues/5144
|
||||
AdapterSpecificConfigs: Type[AdapterConfig_T]
|
||||
Column: Type[Column_T]
|
||||
Relation: Type[Relation_T]
|
||||
ConnectionManager: Type[ConnectionManager_T]
|
||||
connections: ConnectionManager_T
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig):
|
||||
@@ -158,7 +157,7 @@ class AdapterProtocol(
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
...
|
||||
|
||||
def get_compiler(self) -> Compiler_T:
|
||||
|
||||
24
core/dbt/adapters/reference_keys.py
Normal file
24
core/dbt/adapters/reference_keys.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# this module exists to resolve circular imports with the events module
|
||||
|
||||
from collections import namedtuple
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||
|
||||
|
||||
def lowercase(value: Optional[str]) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return value.lower()
|
||||
|
||||
|
||||
def _make_key(relation: Any) -> _ReferenceKey:
|
||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||
to keep track of quoting
|
||||
"""
|
||||
# databases and schemas can both be None
|
||||
return _ReferenceKey(
|
||||
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
||||
)
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
|
||||
import agate
|
||||
|
||||
@@ -8,8 +8,8 @@ import dbt.clients.agate_helper
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import flags
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||
|
||||
|
||||
class SQLConnectionManager(BaseConnectionManager):
|
||||
@@ -39,10 +39,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
# if the connection failed, the handle will be None so we have
|
||||
# nothing to cancel.
|
||||
if (
|
||||
connection.handle is not None
|
||||
and connection.state == ConnectionState.OPEN
|
||||
):
|
||||
if connection.handle is not None and connection.state == ConnectionState.OPEN:
|
||||
self.cancel(connection)
|
||||
if connection.name is not None:
|
||||
names.append(connection.name)
|
||||
@@ -58,8 +55,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
|
||||
logger.debug('Using {} connection "{}".'.format(self.TYPE, connection.name))
|
||||
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=connection.name))
|
||||
|
||||
with self.exception_handler(sql):
|
||||
if abridge_sql_log:
|
||||
@@ -67,25 +63,23 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
else:
|
||||
log_sql = sql
|
||||
|
||||
logger.debug(
|
||||
"On {connection_name}: {sql}",
|
||||
connection_name=connection.name,
|
||||
sql=log_sql,
|
||||
)
|
||||
fire_event(SQLQuery(conn_name=connection.name, sql=log_sql))
|
||||
pre = time.time()
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
cursor.execute(sql, bindings)
|
||||
logger.debug(
|
||||
"SQL status: {status} in {elapsed:0.2f} seconds",
|
||||
status=self.get_response(cursor),
|
||||
elapsed=(time.time() - pre),
|
||||
|
||||
fire_event(
|
||||
SQLQueryStatus(
|
||||
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
|
||||
)
|
||||
)
|
||||
|
||||
return connection, cursor
|
||||
|
||||
@abc.abstractclassmethod
|
||||
def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`get_response` is not implemented for this adapter!"
|
||||
@@ -95,7 +89,19 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def process_results(
|
||||
cls, column_names: Iterable[str], rows: Iterable[Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
|
||||
# TODO CT-211
|
||||
unique_col_names = dict() # type: ignore[var-annotated]
|
||||
# TODO CT-211
|
||||
for idx in range(len(column_names)): # type: ignore[arg-type]
|
||||
# TODO CT-211
|
||||
col_name = column_names[idx] # type: ignore[index]
|
||||
if col_name in unique_col_names:
|
||||
unique_col_names[col_name] += 1
|
||||
# TODO CT-211
|
||||
column_names[idx] = f"{col_name}_{unique_col_names[col_name]}" # type: ignore[index] # noqa
|
||||
else:
|
||||
# TODO CT-211
|
||||
unique_col_names[column_names[idx]] = 1 # type: ignore[index]
|
||||
return [dict(zip(column_names, row)) for row in rows]
|
||||
|
||||
@classmethod
|
||||
@@ -112,7 +118,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[AdapterResponse, str], agate.Table]:
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql, auto_begin)
|
||||
response = self.get_response(cursor)
|
||||
@@ -130,13 +136,6 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"In begin, got {connection} - not a Connection!"
|
||||
)
|
||||
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
@@ -150,19 +149,13 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"In commit, got {connection} - not a Connection!"
|
||||
)
|
||||
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
"it does not have one open!".format(connection.name)
|
||||
)
|
||||
|
||||
logger.debug("On {}: COMMIT".format(connection.name))
|
||||
fire_event(SQLCommit(conn_name=connection.name))
|
||||
self.add_commit_query()
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -5,8 +5,11 @@ import dbt.clients.agate_helper
|
||||
from dbt.contracts.connection import Connection
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.cache import _make_key
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
||||
|
||||
|
||||
from dbt.adapters.base.relation import BaseRelation
|
||||
|
||||
@@ -24,7 +27,7 @@ ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
|
||||
|
||||
class SQLAdapter(BaseAdapter):
|
||||
"""The default adapter with the common agate conversions and some SQL
|
||||
methods implemented. This adapter has a different much shorter list of
|
||||
methods was implemented. This adapter has a different much shorter list of
|
||||
methods to implement, but some more macros that must be implemented.
|
||||
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}". in the
|
||||
@@ -68,7 +71,8 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
@classmethod
|
||||
def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore
|
||||
# TODO CT-211
|
||||
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined]
|
||||
return "float8" if decimals else "integer"
|
||||
|
||||
@classmethod
|
||||
@@ -99,16 +103,15 @@ class SQLAdapter(BaseAdapter):
|
||||
for column_name, reference_column in reference_columns.items():
|
||||
target_column = target_columns.get(column_name)
|
||||
|
||||
if target_column is not None and target_column.can_expand_to(
|
||||
reference_column
|
||||
):
|
||||
if target_column is not None and target_column.can_expand_to(reference_column):
|
||||
col_string_size = reference_column.string_size()
|
||||
new_type = self.Column.string_type(col_string_size)
|
||||
logger.debug(
|
||||
"Changing col type from {} to {} in table {}",
|
||||
target_column.data_type,
|
||||
new_type,
|
||||
current,
|
||||
fire_event(
|
||||
ColTypeChange(
|
||||
orig_type=target_column.data_type,
|
||||
new_type=new_type,
|
||||
table=_make_key(current),
|
||||
)
|
||||
)
|
||||
|
||||
self.alter_column_type(current, column_name, new_type)
|
||||
@@ -152,7 +155,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def create_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
logger.debug('Creating schema "{}"', relation)
|
||||
fire_event(SchemaCreation(relation=_make_key(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
@@ -163,11 +166,12 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def drop_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
logger.debug('Dropping schema "{}".', relation)
|
||||
fire_event(SchemaDrop(relation=_make_key(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||
self.commit_if_has_connection()
|
||||
# we can update the cache here
|
||||
self.cache.drop_schema(relation.database, relation.schema)
|
||||
|
||||
@@ -200,9 +204,7 @@ class SQLAdapter(BaseAdapter):
|
||||
return '"{}"'.format(identifier)
|
||||
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
results = self.execute_macro(
|
||||
LIST_SCHEMAS_MACRO_NAME, kwargs={"database": database}
|
||||
)
|
||||
results = self.execute_macro(LIST_SCHEMAS_MACRO_NAME, kwargs={"database": database})
|
||||
|
||||
return [row[0] for row in results]
|
||||
|
||||
@@ -217,3 +219,25 @@ class SQLAdapter(BaseAdapter):
|
||||
kwargs = {"information_schema": information_schema, "schema": schema}
|
||||
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
|
||||
return results[0][0] > 0
|
||||
|
||||
# This is for use in the test suite
|
||||
def run_sql_for_tests(self, sql, fetch, conn):
|
||||
cursor = conn.handle.cursor()
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
if hasattr(conn.handle, "commit"):
|
||||
conn.handle.commit()
|
||||
if fetch == "one":
|
||||
return cursor.fetchone()
|
||||
elif fetch == "all":
|
||||
return cursor.fetchall()
|
||||
else:
|
||||
return
|
||||
except BaseException as e:
|
||||
if conn.handle and not getattr(conn.handle, "closed", True):
|
||||
conn.handle.rollback()
|
||||
print(sql)
|
||||
print(e)
|
||||
raise
|
||||
finally:
|
||||
conn.transaction_open = False
|
||||
|
||||
1
core/dbt/clients/README.md
Normal file
1
core/dbt/clients/README.md
Normal file
@@ -0,0 +1 @@
|
||||
# Clients README
|
||||
@@ -18,9 +18,7 @@ class BlockData:
|
||||
|
||||
|
||||
class BlockTag:
|
||||
def __init__(
|
||||
self, block_type_name, block_name, contents=None, full_block=None, **kw
|
||||
):
|
||||
def __init__(self, block_type_name, block_name, contents=None, full_block=None, **kw):
|
||||
self.block_type_name = block_type_name
|
||||
self.block_name = block_name
|
||||
self.contents = contents
|
||||
@@ -56,9 +54,7 @@ _NAME_PATTERN = r"[A-Za-z_][A-Za-z_0-9]*"
|
||||
|
||||
COMMENT_START_PATTERN = regex(r"(?:(?P<comment_start>(\s*\{\#)))")
|
||||
COMMENT_END_PATTERN = regex(r"(.*?)(\s*\#\})")
|
||||
RAW_START_PATTERN = regex(
|
||||
r"(?:\s*\{\%\-|\{\%)\s*(?P<raw_start>(raw))\s*(?:\-\%\}\s*|\%\})"
|
||||
)
|
||||
RAW_START_PATTERN = regex(r"(?:\s*\{\%\-|\{\%)\s*(?P<raw_start>(raw))\s*(?:\-\%\}\s*|\%\})")
|
||||
EXPR_START_PATTERN = regex(r"(?P<expr_start>(\{\{\s*))")
|
||||
EXPR_END_PATTERN = regex(r"(?P<expr_end>(\s*\}\}))")
|
||||
|
||||
@@ -88,9 +84,7 @@ TAG_CLOSE_PATTERN = regex(r"(?:(?P<tag_close>(\-\%\}\s*|\%\})))")
|
||||
|
||||
# stolen from jinja's lexer. Note that we've consumed all prefix whitespace by
|
||||
# the time we want to use this.
|
||||
STRING_PATTERN = regex(
|
||||
r"(?P<string>('([^'\\]*(?:\\.[^'\\]*)*)'|" r'"([^"\\]*(?:\\.[^"\\]*)*)"))'
|
||||
)
|
||||
STRING_PATTERN = regex(r"(?P<string>('([^'\\]*(?:\\.[^'\\]*)*)'|" r'"([^"\\]*(?:\\.[^"\\]*)*)"))')
|
||||
|
||||
QUOTE_START_PATTERN = regex(r"""(?P<quote>(['"]))""")
|
||||
|
||||
@@ -236,10 +230,7 @@ class TagIterator:
|
||||
self.advance(match.end())
|
||||
self._expect_block_close()
|
||||
return Tag(
|
||||
block_type_name=block_type_name,
|
||||
block_name=block_name,
|
||||
start=start_pos,
|
||||
end=self.pos,
|
||||
block_type_name=block_type_name, block_name=block_name, start=start_pos, end=self.pos
|
||||
)
|
||||
|
||||
def find_tags(self):
|
||||
@@ -332,11 +323,7 @@ class BlockIterator:
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"never saw a preceeding {} (@ {})"
|
||||
).format(
|
||||
tag.block_type_name,
|
||||
expected,
|
||||
self.tag_parser.linepos(tag.start),
|
||||
)
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
expected = _CONTROL_FLOW_TAGS[found]
|
||||
if expected != tag.block_type_name:
|
||||
@@ -344,11 +331,7 @@ class BlockIterator:
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"expected {} next (@ {})"
|
||||
).format(
|
||||
tag.block_type_name,
|
||||
expected,
|
||||
self.tag_parser.linepos(tag.start),
|
||||
)
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
|
||||
if tag.block_type_name in allowed_blocks:
|
||||
@@ -385,8 +368,7 @@ class BlockIterator:
|
||||
linecount = self.data[: self.current.end].count("\n") + 1
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Reached EOF without finding a close tag for "
|
||||
"{} (searched from line {})"
|
||||
"Reached EOF without finding a close tag for " "{} (searched from line {})"
|
||||
).format(self.current.block_type_name, linecount)
|
||||
)
|
||||
|
||||
@@ -397,7 +379,5 @@ class BlockIterator:
|
||||
|
||||
def lex_for_blocks(self, allowed_blocks=None, collect_raw_data=True):
|
||||
return list(
|
||||
self.find_blocks(
|
||||
allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data
|
||||
)
|
||||
self.find_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data)
|
||||
)
|
||||
|
||||
@@ -13,6 +13,16 @@ from dbt.exceptions import RuntimeException
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
class Number(agate.data_types.Number):
|
||||
# undo the change in https://github.com/wireservice/agate/pull/733
|
||||
# i.e. do not cast True and False to numeric 1 and 0
|
||||
def cast(self, d):
|
||||
if type(d) == bool:
|
||||
raise agate.exceptions.CastError("Do not cast True to 1 or False to 0.")
|
||||
else:
|
||||
return super().cast(d)
|
||||
|
||||
|
||||
class ISODateTime(agate.data_types.DateTime):
|
||||
def cast(self, d):
|
||||
# this is agate.data_types.DateTime.cast with the "clever" bits removed
|
||||
@@ -33,20 +43,21 @@ class ISODateTime(agate.data_types.DateTime):
|
||||
raise agate.exceptions.CastError('Can not parse value "%s" as datetime.' % d)
|
||||
|
||||
|
||||
def build_type_tester(text_columns: Iterable[str]) -> agate.TypeTester:
|
||||
def build_type_tester(
|
||||
text_columns: Iterable[str], string_null_values: Optional[Iterable[str]] = ("null", "")
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
agate.data_types.Number(null_values=("null", "")),
|
||||
Number(null_values=("null", "")),
|
||||
agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
|
||||
agate.data_types.DateTime(
|
||||
null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
|
||||
ISODateTime(null_values=("null", "")),
|
||||
agate.data_types.Boolean(
|
||||
true_values=("true",), false_values=("false",), null_values=("null", "")
|
||||
),
|
||||
agate.data_types.Text(null_values=("null", "")),
|
||||
agate.data_types.Text(null_values=string_null_values),
|
||||
]
|
||||
force = {k: agate.data_types.Text(null_values=("null", "")) for k in text_columns}
|
||||
force = {k: agate.data_types.Text(null_values=string_null_values) for k in text_columns}
|
||||
return agate.TypeTester(force=force, types=types)
|
||||
|
||||
|
||||
@@ -61,12 +72,15 @@ def table_from_rows(
|
||||
if text_only_columns is None:
|
||||
column_types = DEFAULT_TYPE_TESTER
|
||||
else:
|
||||
column_types = build_type_tester(text_only_columns)
|
||||
# If text_only_columns are present, prevent coercing empty string or
|
||||
# literal 'null' strings to a None representation.
|
||||
column_types = build_type_tester(text_only_columns, string_null_values=())
|
||||
|
||||
return agate.Table(rows, column_names, column_types=column_types)
|
||||
|
||||
|
||||
def table_from_data(data, column_names: Iterable[str]) -> agate.Table:
|
||||
"Convert list of dictionaries into an Agate table"
|
||||
"Convert a list of dictionaries into an Agate table"
|
||||
|
||||
# The agate table is generated from a list of dicts, so the column order
|
||||
# from `data` is not preserved. We can use `select` to reorder the columns
|
||||
@@ -81,19 +95,32 @@ def table_from_data(data, column_names: Iterable[str]) -> agate.Table:
|
||||
|
||||
|
||||
def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table:
|
||||
"Convert list of dictionaries into an Agate table"
|
||||
"""
|
||||
Convert a list of dictionaries into an Agate table. This method does not
|
||||
coerce string values into more specific types (eg. '005' will not be
|
||||
coerced to '5'). Additionally, this method does not coerce values to
|
||||
None (eg. '' or 'null' will retain their string literal representations).
|
||||
"""
|
||||
|
||||
rows = []
|
||||
text_only_columns = set()
|
||||
for _row in data:
|
||||
row = []
|
||||
for value in list(_row.values()):
|
||||
for col_name in column_names:
|
||||
value = _row[col_name]
|
||||
if isinstance(value, (dict, list, tuple)):
|
||||
row.append(json.dumps(value, cls=dbt.utils.JSONEncoder))
|
||||
else:
|
||||
row.append(value)
|
||||
# Represent container types as json strings
|
||||
value = json.dumps(value, cls=dbt.utils.JSONEncoder)
|
||||
text_only_columns.add(col_name)
|
||||
elif isinstance(value, str):
|
||||
text_only_columns.add(col_name)
|
||||
row.append(value)
|
||||
|
||||
rows.append(row)
|
||||
|
||||
return table_from_rows(rows=rows, column_names=column_names)
|
||||
return table_from_rows(
|
||||
rows=rows, column_names=column_names, text_only_columns=text_only_columns
|
||||
)
|
||||
|
||||
|
||||
def empty_table():
|
||||
@@ -157,9 +184,7 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
return result
|
||||
|
||||
|
||||
def _merged_column_types(
|
||||
tables: List[agate.Table],
|
||||
) -> Dict[str, agate.data_types.DataType]:
|
||||
def _merged_column_types(tables: List[agate.Table]) -> Dict[str, agate.data_types.DataType]:
|
||||
# this is a lot like agate.Table.merge, but with handling for all-null
|
||||
# rows being "any type".
|
||||
new_columns: ColumnTypeBuilder = ColumnTypeBuilder()
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
import dbt.exceptions
|
||||
from dbt.clients.system import run_cmd
|
||||
|
||||
NOT_INSTALLED_MSG = """
|
||||
dbt requires the gcloud SDK to be installed to authenticate with BigQuery.
|
||||
Please download and install the SDK, or use a Service Account instead.
|
||||
|
||||
https://cloud.google.com/sdk/
|
||||
"""
|
||||
|
||||
|
||||
def gcloud_installed():
|
||||
try:
|
||||
run_cmd(".", ["gcloud", "--version"])
|
||||
return True
|
||||
except OSError as e:
|
||||
logger.debug(e)
|
||||
return False
|
||||
|
||||
|
||||
def setup_default_credentials():
|
||||
if gcloud_installed():
|
||||
run_cmd(".", ["gcloud", "auth", "application-default", "login"])
|
||||
else:
|
||||
raise dbt.exceptions.RuntimeException(NOT_INSTALLED_MSG)
|
||||
@@ -2,22 +2,77 @@ import re
|
||||
import os.path
|
||||
|
||||
from dbt.clients.system import run_cmd, rmdir
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
import dbt.exceptions
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
GitSparseCheckoutSubdirectory,
|
||||
GitProgressCheckoutRevision,
|
||||
GitProgressUpdatingExistingDependency,
|
||||
GitProgressPullingNewDependency,
|
||||
GitNothingToDo,
|
||||
GitProgressUpdatedCheckoutRange,
|
||||
GitProgressCheckedOutAt,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
CommandResultError,
|
||||
RuntimeException,
|
||||
bad_package_spec,
|
||||
raise_git_cloning_error,
|
||||
raise_git_cloning_problem,
|
||||
)
|
||||
from packaging import version
|
||||
|
||||
|
||||
def clone(repo, cwd, dirname=None, remove_git_dir=False, branch=None):
|
||||
def _is_commit(revision: str) -> bool:
|
||||
# match SHA-1 git commit
|
||||
return bool(re.match(r"\b[0-9a-f]{40}\b", revision))
|
||||
|
||||
|
||||
def _raise_git_cloning_error(repo, revision, error):
|
||||
stderr = error.stderr.strip()
|
||||
if "usage: git" in stderr:
|
||||
stderr = stderr.split("\nusage: git")[0]
|
||||
if re.match("fatal: destination path '(.+)' already exists", stderr):
|
||||
raise_git_cloning_error(error)
|
||||
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
|
||||
|
||||
def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None):
|
||||
has_revision = revision is not None
|
||||
is_commit = _is_commit(revision or "")
|
||||
|
||||
clone_cmd = ["git", "clone", "--depth", "1"]
|
||||
if subdirectory:
|
||||
fire_event(GitSparseCheckoutSubdirectory(subdir=subdirectory))
|
||||
out, _ = run_cmd(cwd, ["git", "--version"], env={"LC_ALL": "C"})
|
||||
git_version = version.parse(re.search(r"\d+\.\d+\.\d+", out.decode("utf-8")).group(0))
|
||||
if not git_version >= version.parse("2.25.0"):
|
||||
# 2.25.0 introduces --sparse
|
||||
raise RuntimeError(
|
||||
"Please update your git version to pull a dbt package "
|
||||
"from a subdirectory: your version is {}, >= 2.25.0 needed".format(git_version)
|
||||
)
|
||||
clone_cmd.extend(["--filter=blob:none", "--sparse"])
|
||||
|
||||
if branch is not None:
|
||||
clone_cmd.extend(["--branch", branch])
|
||||
if has_revision and not is_commit:
|
||||
clone_cmd.extend(["--branch", revision])
|
||||
|
||||
clone_cmd.append(repo)
|
||||
|
||||
if dirname is not None:
|
||||
clone_cmd.append(dirname)
|
||||
try:
|
||||
result = run_cmd(cwd, clone_cmd, env={"LC_ALL": "C"})
|
||||
except CommandResultError as exc:
|
||||
_raise_git_cloning_error(repo, revision, exc)
|
||||
|
||||
result = run_cmd(cwd, clone_cmd, env={"LC_ALL": "C"})
|
||||
if subdirectory:
|
||||
cwd_subdir = os.path.join(cwd, dirname or "")
|
||||
clone_cmd_subdir = ["git", "sparse-checkout", "set", subdirectory]
|
||||
try:
|
||||
run_cmd(cwd_subdir, clone_cmd_subdir)
|
||||
except CommandResultError as exc:
|
||||
_raise_git_cloning_error(repo, revision, exc)
|
||||
|
||||
if remove_git_dir:
|
||||
rmdir(os.path.join(dirname, ".git"))
|
||||
@@ -31,32 +86,37 @@ def list_tags(cwd):
|
||||
return tags
|
||||
|
||||
|
||||
def _checkout(cwd, repo, branch):
|
||||
logger.debug(" Checking out branch {}.".format(branch))
|
||||
def _checkout(cwd, repo, revision):
|
||||
fire_event(GitProgressCheckoutRevision(revision=revision))
|
||||
|
||||
run_cmd(cwd, ["git", "remote", "set-branches", "origin", branch])
|
||||
run_cmd(cwd, ["git", "fetch", "--tags", "--depth", "1", "origin", branch])
|
||||
fetch_cmd = ["git", "fetch", "origin", "--depth", "1"]
|
||||
|
||||
tags = list_tags(cwd)
|
||||
|
||||
# Prefer tags to branches if one exists
|
||||
if branch in tags:
|
||||
spec = "tags/{}".format(branch)
|
||||
if _is_commit(revision):
|
||||
run_cmd(cwd, fetch_cmd + [revision])
|
||||
else:
|
||||
spec = "origin/{}".format(branch)
|
||||
run_cmd(cwd, ["git", "remote", "set-branches", "origin", revision])
|
||||
run_cmd(cwd, fetch_cmd + ["--tags", revision])
|
||||
|
||||
if _is_commit(revision):
|
||||
spec = revision
|
||||
# Prefer tags to branches if one exists
|
||||
elif revision in list_tags(cwd):
|
||||
spec = "tags/{}".format(revision)
|
||||
else:
|
||||
spec = "origin/{}".format(revision)
|
||||
|
||||
out, err = run_cmd(cwd, ["git", "reset", "--hard", spec], env={"LC_ALL": "C"})
|
||||
return out, err
|
||||
|
||||
|
||||
def checkout(cwd, repo, branch=None):
|
||||
if branch is None:
|
||||
branch = "HEAD"
|
||||
def checkout(cwd, repo, revision=None):
|
||||
if revision is None:
|
||||
revision = "HEAD"
|
||||
try:
|
||||
return _checkout(cwd, repo, branch)
|
||||
except dbt.exceptions.CommandResultError as exc:
|
||||
stderr = exc.stderr.decode("utf-8").strip()
|
||||
dbt.exceptions.bad_package_spec(repo, branch, stderr)
|
||||
return _checkout(cwd, repo, revision)
|
||||
except CommandResultError as exc:
|
||||
stderr = exc.stderr.strip()
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
|
||||
|
||||
def get_current_sha(cwd):
|
||||
@@ -69,40 +129,46 @@ def remove_remote(cwd):
|
||||
return run_cmd(cwd, ["git", "remote", "rm", "origin"], env={"LC_ALL": "C"})
|
||||
|
||||
|
||||
def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False, branch=None):
|
||||
def clone_and_checkout(
|
||||
repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None
|
||||
):
|
||||
exists = None
|
||||
try:
|
||||
_, err = clone(repo, cwd, dirname=dirname, remove_git_dir=remove_git_dir)
|
||||
except dbt.exceptions.CommandResultError as exc:
|
||||
err = exc.stderr.decode("utf-8")
|
||||
_, err = clone(
|
||||
repo,
|
||||
cwd,
|
||||
dirname=dirname,
|
||||
remove_git_dir=remove_git_dir,
|
||||
subdirectory=subdirectory,
|
||||
)
|
||||
except CommandResultError as exc:
|
||||
err = exc.stderr
|
||||
exists = re.match("fatal: destination path '(.+)' already exists", err)
|
||||
if not exists: # something else is wrong, raise it
|
||||
raise
|
||||
if not exists:
|
||||
raise_git_cloning_problem(repo)
|
||||
|
||||
directory = None
|
||||
start_sha = None
|
||||
if exists:
|
||||
directory = exists.group(1)
|
||||
logger.debug("Updating existing dependency {}.", directory)
|
||||
fire_event(GitProgressUpdatingExistingDependency(dir=directory))
|
||||
else:
|
||||
matches = re.match("Cloning into '(.+)'", err.decode("utf-8"))
|
||||
if matches is None:
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
f'Error cloning {repo} - never saw "Cloning into ..." from git'
|
||||
)
|
||||
raise RuntimeException(f'Error cloning {repo} - never saw "Cloning into ..." from git')
|
||||
directory = matches.group(1)
|
||||
logger.debug("Pulling new dependency {}.", directory)
|
||||
fire_event(GitProgressPullingNewDependency(dir=directory))
|
||||
full_path = os.path.join(cwd, directory)
|
||||
start_sha = get_current_sha(full_path)
|
||||
checkout(full_path, repo, branch)
|
||||
checkout(full_path, repo, revision)
|
||||
end_sha = get_current_sha(full_path)
|
||||
if exists:
|
||||
if start_sha == end_sha:
|
||||
logger.debug(" Already at {}, nothing to do.", start_sha[:7])
|
||||
fire_event(GitNothingToDo(sha=start_sha[:7]))
|
||||
else:
|
||||
logger.debug(
|
||||
" Updated checkout from {} to {}.", start_sha[:7], end_sha[:7]
|
||||
fire_event(
|
||||
GitProgressUpdatedCheckoutRange(start_sha=start_sha[:7], end_sha=end_sha[:7])
|
||||
)
|
||||
else:
|
||||
logger.debug(" Checked out at {}.", end_sha[:7])
|
||||
return directory
|
||||
fire_event(GitProgressCheckedOutAt(end_sha=end_sha[:7]))
|
||||
return os.path.join(directory, subdirectory or "")
|
||||
|
||||
@@ -7,19 +7,7 @@ import threading
|
||||
from ast import literal_eval
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain, islice
|
||||
from typing import (
|
||||
List,
|
||||
Union,
|
||||
Set,
|
||||
Optional,
|
||||
Dict,
|
||||
Any,
|
||||
Iterator,
|
||||
Type,
|
||||
NoReturn,
|
||||
Tuple,
|
||||
Callable,
|
||||
)
|
||||
from typing import List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple, Callable
|
||||
|
||||
import jinja2
|
||||
import jinja2.ext
|
||||
@@ -32,12 +20,13 @@ from dbt.utils import (
|
||||
get_dbt_macro_name,
|
||||
get_docs_macro_name,
|
||||
get_materialization_macro_name,
|
||||
deep_map,
|
||||
get_test_macro_name,
|
||||
deep_map_render,
|
||||
)
|
||||
|
||||
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
|
||||
from dbt.contracts.graph.compiled import CompiledSchemaTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedSchemaTestNode
|
||||
from dbt.contracts.graph.compiled import CompiledGenericTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedGenericTestNode
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
raise_compiler_error,
|
||||
@@ -45,9 +34,9 @@ from dbt.exceptions import (
|
||||
invalid_materialization_argument,
|
||||
MacroReturn,
|
||||
JinjaRenderingException,
|
||||
UndefinedMacroException,
|
||||
)
|
||||
from dbt import flags
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
|
||||
|
||||
def _linecache_inject(source, write):
|
||||
@@ -70,12 +59,7 @@ def _linecache_inject(source, write):
|
||||
filename = rnd.decode("ascii")
|
||||
|
||||
# put ourselves in the cache
|
||||
cache_entry = (
|
||||
len(source),
|
||||
None,
|
||||
[line + "\n" for line in source.splitlines()],
|
||||
filename,
|
||||
)
|
||||
cache_entry = (len(source), None, [line + "\n" for line in source.splitlines()], filename)
|
||||
# linecache does in fact have an attribute `cache`, thanks
|
||||
linecache.cache[filename] = cache_entry # type: ignore
|
||||
return filename
|
||||
@@ -119,7 +103,7 @@ class NativeSandboxEnvironment(MacroFuzzEnvironment):
|
||||
|
||||
|
||||
class TextMarker(str):
|
||||
"""A special native-env marker that indicates that a value is text and is
|
||||
"""A special native-env marker that indicates a value is text and is
|
||||
not to be evaluated. Use this to prevent your numbery-strings from becoming
|
||||
numbers!
|
||||
"""
|
||||
@@ -169,13 +153,9 @@ def quoted_native_concat(nodes):
|
||||
except (ValueError, SyntaxError, MemoryError):
|
||||
result = raw
|
||||
if isinstance(raw, BoolMarker) and not isinstance(result, bool):
|
||||
raise JinjaRenderingException(
|
||||
f"Could not convert value '{raw!s}' into type 'bool'"
|
||||
)
|
||||
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'bool'")
|
||||
if isinstance(raw, NumberMarker) and not _is_number(result):
|
||||
raise JinjaRenderingException(
|
||||
f"Could not convert value '{raw!s}' into type 'number'"
|
||||
)
|
||||
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'number'")
|
||||
|
||||
return result
|
||||
|
||||
@@ -389,9 +369,7 @@ class MaterializationExtension(jinja2.ext.Extension):
|
||||
|
||||
node.name = get_materialization_macro_name(materialization_name, adapter_name)
|
||||
|
||||
node.body = parser.parse_statements(
|
||||
("name:endmaterialization",), drop_needle=True
|
||||
)
|
||||
node.body = parser.parse_statements(("name:endmaterialization",), drop_needle=True)
|
||||
|
||||
return node
|
||||
|
||||
@@ -410,6 +388,19 @@ class DocumentationExtension(jinja2.ext.Extension):
|
||||
return node
|
||||
|
||||
|
||||
class TestExtension(jinja2.ext.Extension):
|
||||
tags = ["test"]
|
||||
|
||||
def parse(self, parser):
|
||||
node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
|
||||
test_name = parser.parse_assign_target(name_only=True).name
|
||||
|
||||
parser.parse_signature(node)
|
||||
node.name = get_test_macro_name(test_name)
|
||||
node.body = parser.parse_statements(("name:endtest",), drop_needle=True)
|
||||
return node
|
||||
|
||||
|
||||
def _is_dunder_name(name):
|
||||
return name.startswith("__") and name.endswith("__")
|
||||
|
||||
@@ -434,9 +425,7 @@ def create_undefined(node=None):
|
||||
def __getattr__(self, name):
|
||||
if name == "name" or _is_dunder_name(name):
|
||||
raise AttributeError(
|
||||
"'{}' object has no attribute '{}'".format(
|
||||
type(self).__name__, name
|
||||
)
|
||||
"'{}' object has no attribute '{}'".format(type(self).__name__, name)
|
||||
)
|
||||
|
||||
self.name = name
|
||||
@@ -482,6 +471,7 @@ def get_environment(
|
||||
|
||||
args["extensions"].append(MaterializationExtension)
|
||||
args["extensions"].append(DocumentationExtension)
|
||||
args["extensions"].append(TestExtension)
|
||||
|
||||
env_cls: Type[jinja2.Environment]
|
||||
text_filter: Type
|
||||
@@ -506,7 +496,7 @@ def catch_jinja(node=None) -> Iterator[None]:
|
||||
e.translated = False
|
||||
raise CompilationException(str(e), node) from e
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
raise CompilationException(str(e), node) from e
|
||||
raise UndefinedMacroException(str(e), node) from e
|
||||
except CompilationException as exc:
|
||||
exc.add_node(node)
|
||||
raise
|
||||
@@ -569,11 +559,7 @@ def get_rendered(
|
||||
# If this is desirable in the native env as well, we could handle the
|
||||
# native=True case by passing the input string to ast.literal_eval, like
|
||||
# the native renderer does.
|
||||
if (
|
||||
not native
|
||||
and isinstance(string, str)
|
||||
and _HAS_RENDER_CHARS_PAT.search(string) is None
|
||||
):
|
||||
if not native and isinstance(string, str) and _HAS_RENDER_CHARS_PAT.search(string) is None:
|
||||
return string
|
||||
template = get_template(
|
||||
string,
|
||||
@@ -594,7 +580,7 @@ def extract_toplevel_blocks(
|
||||
allowed_blocks: Optional[Set[str]] = None,
|
||||
collect_raw_data: bool = True,
|
||||
) -> List[Union[BlockData, BlockTag]]:
|
||||
"""Extract the top level blocks with matching block types from a jinja
|
||||
"""Extract the top-level blocks with matching block types from a jinja
|
||||
file, with some special handling for block nesting.
|
||||
|
||||
:param data: The data to extract blocks from.
|
||||
@@ -613,12 +599,12 @@ def extract_toplevel_blocks(
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_TEST_KWARGS_NAME = "_dbt_schema_test_kwargs"
|
||||
GENERIC_TEST_KWARGS_NAME = "_dbt_generic_test_kwargs"
|
||||
|
||||
|
||||
def add_rendered_test_kwargs(
|
||||
context: Dict[str, Any],
|
||||
node: Union[ParsedSchemaTestNode, CompiledSchemaTestNode],
|
||||
node: Union[ParsedGenericTestNode, CompiledGenericTestNode],
|
||||
capture_macros: bool = False,
|
||||
) -> None:
|
||||
"""Render each of the test kwargs in the given context using the native
|
||||
@@ -638,11 +624,11 @@ def add_rendered_test_kwargs(
|
||||
# curly braces to make rendering happy
|
||||
value = f"{{{{ {value} }}}}"
|
||||
|
||||
value = get_rendered(
|
||||
value, context, node, capture_macros=capture_macros, native=True
|
||||
)
|
||||
value = get_rendered(value, context, node, capture_macros=capture_macros, native=True)
|
||||
|
||||
return value
|
||||
|
||||
kwargs = deep_map(_convert_function, node.test_metadata.kwargs)
|
||||
context[SCHEMA_TEST_KWARGS_NAME] = kwargs
|
||||
# The test_metadata.kwargs come from the test builder, and were set
|
||||
# when the test node was created in _parse_generic_test.
|
||||
kwargs = deep_map_render(_convert_function, node.test_metadata.kwargs)
|
||||
context[GENERIC_TEST_KWARGS_NAME] = kwargs
|
||||
|
||||
158
core/dbt/clients/jinja_static.py
Normal file
158
core/dbt/clients/jinja_static.py
Normal file
@@ -0,0 +1,158 @@
|
||||
import jinja2
|
||||
from dbt.clients.jinja import get_environment
|
||||
from dbt.exceptions import raise_compiler_error
|
||||
|
||||
|
||||
def statically_extract_macro_calls(string, ctx, db_wrapper=None):
|
||||
# set 'capture_macros' to capture undefined
|
||||
env = get_environment(None, capture_macros=True)
|
||||
parsed = env.parse(string)
|
||||
|
||||
standard_calls = ["source", "ref", "config"]
|
||||
possible_macro_calls = []
|
||||
for func_call in parsed.find_all(jinja2.nodes.Call):
|
||||
func_name = None
|
||||
if hasattr(func_call, "node") and hasattr(func_call.node, "name"):
|
||||
func_name = func_call.node.name
|
||||
else:
|
||||
# func_call for dbt_utils.current_timestamp macro
|
||||
# Call(
|
||||
# node=Getattr(
|
||||
# node=Name(
|
||||
# name='dbt_utils',
|
||||
# ctx='load'
|
||||
# ),
|
||||
# attr='current_timestamp',
|
||||
# ctx='load
|
||||
# ),
|
||||
# args=[],
|
||||
# kwargs=[],
|
||||
# dyn_args=None,
|
||||
# dyn_kwargs=None
|
||||
# )
|
||||
if (
|
||||
hasattr(func_call, "node")
|
||||
and hasattr(func_call.node, "node")
|
||||
and type(func_call.node.node).__name__ == "Name"
|
||||
and hasattr(func_call.node, "attr")
|
||||
):
|
||||
package_name = func_call.node.node.name
|
||||
macro_name = func_call.node.attr
|
||||
if package_name == "adapter":
|
||||
if macro_name == "dispatch":
|
||||
ad_macro_calls = statically_parse_adapter_dispatch(
|
||||
func_call, ctx, db_wrapper
|
||||
)
|
||||
possible_macro_calls.extend(ad_macro_calls)
|
||||
else:
|
||||
# This skips calls such as adapter.parse_index
|
||||
continue
|
||||
else:
|
||||
func_name = f"{package_name}.{macro_name}"
|
||||
else:
|
||||
continue
|
||||
if not func_name:
|
||||
continue
|
||||
if func_name in standard_calls:
|
||||
continue
|
||||
elif ctx.get(func_name):
|
||||
continue
|
||||
else:
|
||||
if func_name not in possible_macro_calls:
|
||||
possible_macro_calls.append(func_name)
|
||||
|
||||
return possible_macro_calls
|
||||
|
||||
|
||||
# Call(
|
||||
# node=Getattr(
|
||||
# node=Name(
|
||||
# name='adapter',
|
||||
# ctx='load'
|
||||
# ),
|
||||
# attr='dispatch',
|
||||
# ctx='load'
|
||||
# ),
|
||||
# args=[
|
||||
# Const(value='test_pkg_and_dispatch')
|
||||
# ],
|
||||
# kwargs=[
|
||||
# Keyword(
|
||||
# key='packages',
|
||||
# value=Call(node=Getattr(node=Name(name='local_utils', ctx='load'),
|
||||
# attr='_get_utils_namespaces', ctx='load'), args=[], kwargs=[],
|
||||
# dyn_args=None, dyn_kwargs=None)
|
||||
# )
|
||||
# ],
|
||||
# dyn_args=None,
|
||||
# dyn_kwargs=None
|
||||
# )
|
||||
def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
possible_macro_calls = []
|
||||
# This captures an adapter.dispatch('<macro_name>') call.
|
||||
|
||||
func_name = None
|
||||
# macro_name positional argument
|
||||
if len(func_call.args) > 0:
|
||||
func_name = func_call.args[0].value
|
||||
if func_name:
|
||||
possible_macro_calls.append(func_name)
|
||||
|
||||
# packages positional argument
|
||||
macro_namespace = None
|
||||
packages_arg = None
|
||||
packages_arg_type = None
|
||||
|
||||
if len(func_call.args) > 1:
|
||||
packages_arg = func_call.args[1]
|
||||
# This can be a List or a Call
|
||||
packages_arg_type = type(func_call.args[1]).__name__
|
||||
|
||||
# keyword arguments
|
||||
if func_call.kwargs:
|
||||
for kwarg in func_call.kwargs:
|
||||
if kwarg.key == "macro_name":
|
||||
# This will remain to enable static resolution
|
||||
if type(kwarg.value).__name__ == "Const":
|
||||
func_name = kwarg.value.value
|
||||
possible_macro_calls.append(func_name)
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f"The macro_name parameter ({kwarg.value.value}) "
|
||||
"to adapter.dispatch was not a string"
|
||||
)
|
||||
elif kwarg.key == "macro_namespace":
|
||||
# This will remain to enable static resolution
|
||||
kwarg_type = type(kwarg.value).__name__
|
||||
if kwarg_type == "Const":
|
||||
macro_namespace = kwarg.value.value
|
||||
else:
|
||||
raise_compiler_error(
|
||||
"The macro_namespace parameter to adapter.dispatch "
|
||||
f"is a {kwarg_type}, not a string"
|
||||
)
|
||||
|
||||
# positional arguments
|
||||
if packages_arg:
|
||||
if packages_arg_type == "List":
|
||||
# This will remain to enable static resolution
|
||||
packages = []
|
||||
for item in packages_arg.items:
|
||||
packages.append(item.value)
|
||||
elif packages_arg_type == "Const":
|
||||
# This will remain to enable static resolution
|
||||
macro_namespace = packages_arg.value
|
||||
|
||||
if db_wrapper:
|
||||
macro = db_wrapper.dispatch(func_name, macro_namespace=macro_namespace).macro
|
||||
func_name = f"{macro.package_name}.{macro.name}"
|
||||
possible_macro_calls.append(func_name)
|
||||
else: # this is only for test/unit/test_macro_calls.py
|
||||
if macro_namespace:
|
||||
packages = [macro_namespace]
|
||||
else:
|
||||
packages = []
|
||||
for package_name in packages:
|
||||
possible_macro_calls.append(f"{package_name}.{func_name}")
|
||||
|
||||
return possible_macro_calls
|
||||
@@ -1,10 +1,20 @@
|
||||
from functools import wraps
|
||||
import functools
|
||||
from typing import Any, Dict, List
|
||||
import requests
|
||||
from dbt.exceptions import RegistryException
|
||||
from dbt.utils import memoized
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
RegistryProgressMakingGETRequest,
|
||||
RegistryProgressGETResponse,
|
||||
RegistryIndexProgressMakingGETRequest,
|
||||
RegistryIndexProgressGETResponse,
|
||||
RegistryResponseUnexpectedType,
|
||||
RegistryResponseMissingTopKeys,
|
||||
RegistryResponseMissingNestedKeys,
|
||||
RegistryResponseExtraNestedKeys,
|
||||
)
|
||||
from dbt.utils import memoized, _connection_exception_retry as connection_exception_retry
|
||||
from dbt import deprecations
|
||||
import os
|
||||
import time
|
||||
|
||||
if os.getenv("DBT_PACKAGE_HUB_URL"):
|
||||
DEFAULT_REGISTRY_BASE_URL = os.getenv("DBT_PACKAGE_HUB_URL")
|
||||
@@ -12,61 +22,142 @@ else:
|
||||
DEFAULT_REGISTRY_BASE_URL = "https://hub.getdbt.com/"
|
||||
|
||||
|
||||
def _get_url(url, registry_base_url=None):
|
||||
def _get_url(name, registry_base_url=None):
|
||||
if registry_base_url is None:
|
||||
registry_base_url = DEFAULT_REGISTRY_BASE_URL
|
||||
url = "api/v1/{}.json".format(name)
|
||||
|
||||
return "{}{}".format(registry_base_url, url)
|
||||
|
||||
|
||||
def _wrap_exceptions(fn):
|
||||
@wraps(fn)
|
||||
def wrapper(*args, **kwargs):
|
||||
max_attempts = 5
|
||||
attempt = 0
|
||||
while True:
|
||||
attempt += 1
|
||||
try:
|
||||
return fn(*args, **kwargs)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
if attempt < max_attempts:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
raise RegistryException("Unable to connect to registry hub") from exc
|
||||
|
||||
return wrapper
|
||||
def _get_with_retries(package_name, registry_base_url=None):
|
||||
get_fn = functools.partial(_get, package_name, registry_base_url)
|
||||
return connection_exception_retry(get_fn, 5)
|
||||
|
||||
|
||||
@_wrap_exceptions
|
||||
def _get(path, registry_base_url=None):
|
||||
url = _get_url(path, registry_base_url)
|
||||
logger.debug("Making package registry request: GET {}".format(url))
|
||||
resp = requests.get(url)
|
||||
logger.debug("Response from registry: GET {} {}".format(url, resp.status_code))
|
||||
def _get(package_name, registry_base_url=None):
|
||||
url = _get_url(package_name, registry_base_url)
|
||||
fire_event(RegistryProgressMakingGETRequest(url=url))
|
||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||
resp = requests.get(url, timeout=30)
|
||||
fire_event(RegistryProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
# The response should always be a dictionary. Anything else is unexpected, raise error.
|
||||
# Raising this error will cause this function to retry (if called within _get_with_retries)
|
||||
# and hopefully get a valid response. This seems to happen when there's an issue with the Hub.
|
||||
# Since we control what we expect the HUB to return, this is safe.
|
||||
# See https://github.com/dbt-labs/dbt-core/issues/4577
|
||||
# and https://github.com/dbt-labs/dbt-core/issues/4849
|
||||
response = resp.json()
|
||||
|
||||
if not isinstance(response, dict): # This will also catch Nonetype
|
||||
error_msg = (
|
||||
f"Request error: Expected a response type of <dict> but got {type(response)} instead"
|
||||
)
|
||||
fire_event(RegistryResponseUnexpectedType(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
# check for expected top level keys
|
||||
expected_keys = {"name", "versions"}
|
||||
if not expected_keys.issubset(response):
|
||||
error_msg = (
|
||||
f"Request error: Expected the response to contain keys {expected_keys} "
|
||||
f"but is missing {expected_keys.difference(set(response))}"
|
||||
)
|
||||
fire_event(RegistryResponseMissingTopKeys(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
# check for the keys we need nested under each version
|
||||
expected_version_keys = {"name", "packages", "downloads"}
|
||||
all_keys = set().union(*(response["versions"][d] for d in response["versions"]))
|
||||
if not expected_version_keys.issubset(all_keys):
|
||||
error_msg = (
|
||||
"Request error: Expected the response for the version to contain keys "
|
||||
f"{expected_version_keys} but is missing {expected_version_keys.difference(all_keys)}"
|
||||
)
|
||||
fire_event(RegistryResponseMissingNestedKeys(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
# all version responses should contain identical keys.
|
||||
has_extra_keys = set().difference(*(response["versions"][d] for d in response["versions"]))
|
||||
if has_extra_keys:
|
||||
error_msg = (
|
||||
"Request error: Keys for all versions do not match. Found extra key(s) "
|
||||
f"of {has_extra_keys}."
|
||||
)
|
||||
fire_event(RegistryResponseExtraNestedKeys(response=response))
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def index(registry_base_url=None):
|
||||
return _get("api/v1/index.json", registry_base_url)
|
||||
_get_cached = memoized(_get_with_retries)
|
||||
|
||||
|
||||
def package(package_name, registry_base_url=None) -> Dict[str, Any]:
|
||||
# returns a dictionary of metadata for all versions of a package
|
||||
response = _get_cached(package_name, registry_base_url)
|
||||
# Either redirectnamespace or redirectname in the JSON response indicate a redirect
|
||||
# redirectnamespace redirects based on package ownership
|
||||
# redirectname redirects based on package name
|
||||
# Both can be present at the same time, or neither. Fails gracefully to old name
|
||||
if ("redirectnamespace" in response) or ("redirectname" in response):
|
||||
|
||||
if ("redirectnamespace" in response) and response["redirectnamespace"] is not None:
|
||||
use_namespace = response["redirectnamespace"]
|
||||
else:
|
||||
use_namespace = response["namespace"]
|
||||
|
||||
if ("redirectname" in response) and response["redirectname"] is not None:
|
||||
use_name = response["redirectname"]
|
||||
else:
|
||||
use_name = response["name"]
|
||||
|
||||
new_nwo = use_namespace + "/" + use_name
|
||||
deprecations.warn("package-redirect", old_name=package_name, new_name=new_nwo)
|
||||
return response["versions"]
|
||||
|
||||
|
||||
def package_version(package_name, version, registry_base_url=None) -> Dict[str, Any]:
|
||||
# returns the metadata of a specific version of a package
|
||||
response = package(package_name, registry_base_url)
|
||||
return response[version]
|
||||
|
||||
|
||||
def get_available_versions(package_name) -> List["str"]:
|
||||
# returns a list of all available versions of a package
|
||||
response = package(package_name)
|
||||
return list(response)
|
||||
|
||||
|
||||
def _get_index(registry_base_url=None):
|
||||
|
||||
url = _get_url("index", registry_base_url)
|
||||
fire_event(RegistryIndexProgressMakingGETRequest(url=url))
|
||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||
resp = requests.get(url, timeout=30)
|
||||
fire_event(RegistryIndexProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||
resp.raise_for_status()
|
||||
|
||||
# The response should be a list. Anything else is unexpected, raise an error.
|
||||
# Raising this error will cause this function to retry and hopefully get a valid response.
|
||||
|
||||
response = resp.json()
|
||||
|
||||
if not isinstance(response, list): # This will also catch Nonetype
|
||||
error_msg = (
|
||||
f"Request error: The response type of {type(response)} is not valid: {resp.text}"
|
||||
)
|
||||
raise requests.exceptions.ContentDecodingError(error_msg, response=resp)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def index(registry_base_url=None) -> List[str]:
|
||||
# this returns a list of all packages on the Hub
|
||||
get_index_fn = functools.partial(_get_index, registry_base_url)
|
||||
return connection_exception_retry(get_index_fn, 5)
|
||||
|
||||
|
||||
index_cached = memoized(index)
|
||||
|
||||
|
||||
def packages(registry_base_url=None):
|
||||
return _get("api/v1/packages.json", registry_base_url)
|
||||
|
||||
|
||||
def package(name, registry_base_url=None):
|
||||
return _get("api/v1/{}.json".format(name), registry_base_url)
|
||||
|
||||
|
||||
def package_version(name, version, registry_base_url=None):
|
||||
return _get("api/v1/{}/{}.json".format(name, version), registry_base_url)
|
||||
|
||||
|
||||
def get_available_versions(name):
|
||||
response = package(name)
|
||||
return list(response["versions"])
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import errno
|
||||
import functools
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
@@ -12,10 +13,17 @@ import requests
|
||||
import stat
|
||||
from typing import Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
|
||||
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
SystemErrorRetrievingModTime,
|
||||
SystemCouldNotWrite,
|
||||
SystemExecutingCmd,
|
||||
SystemStdOutMsg,
|
||||
SystemStdErrMsg,
|
||||
SystemReportReturnCode,
|
||||
)
|
||||
import dbt.exceptions
|
||||
import dbt.utils
|
||||
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
|
||||
if sys.platform == "win32":
|
||||
from ctypes import WinDLL, c_bool
|
||||
@@ -28,7 +36,7 @@ def find_matching(
|
||||
root_path: str,
|
||||
relative_paths_to_search: List[str],
|
||||
file_pattern: str,
|
||||
) -> List[Dict[str, str]]:
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Given an absolute `root_path`, a list of relative paths to that
|
||||
absolute root path (`relative_paths_to_search`), and a `file_pattern`
|
||||
@@ -56,12 +64,18 @@ def find_matching(
|
||||
for local_file in local_files:
|
||||
absolute_path = os.path.join(current_path, local_file)
|
||||
relative_path = os.path.relpath(absolute_path, absolute_path_to_search)
|
||||
modification_time = 0.0
|
||||
try:
|
||||
modification_time = os.path.getmtime(absolute_path)
|
||||
except OSError:
|
||||
fire_event(SystemErrorRetrievingModTime(path=absolute_path))
|
||||
if reobj.match(local_file):
|
||||
matching.append(
|
||||
{
|
||||
"searched_path": relative_path_to_search,
|
||||
"absolute_path": absolute_path,
|
||||
"relative_path": relative_path,
|
||||
"modification_time": modification_time,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -150,10 +164,7 @@ def write_file(path: str, contents: str = "") -> bool:
|
||||
reason = "Path was possibly too long"
|
||||
# all our hard work and the path was still too long. Log and
|
||||
# continue.
|
||||
logger.debug(
|
||||
f"Could not write to path {path}({len(path)} characters): "
|
||||
f"{reason}\nexception: {exc}"
|
||||
)
|
||||
fire_event(SystemCouldNotWrite(path=path, reason=reason, exc=exc))
|
||||
else:
|
||||
raise
|
||||
return True
|
||||
@@ -167,9 +178,7 @@ def write_json(path: str, data: Dict[str, Any]) -> bool:
|
||||
return write_file(path, json.dumps(data, cls=dbt.utils.JSONEncoder))
|
||||
|
||||
|
||||
def _windows_rmdir_readonly(
|
||||
func: Callable[[str], Any], path: str, exc: Tuple[Any, OSError, Any]
|
||||
):
|
||||
def _windows_rmdir_readonly(func: Callable[[str], Any], path: str, exc: Tuple[Any, OSError, Any]):
|
||||
exception_val = exc[1]
|
||||
if exception_val.errno == errno.EACCES:
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
@@ -237,16 +246,17 @@ def _supports_long_paths() -> bool:
|
||||
# https://stackoverflow.com/a/35097999/11262881
|
||||
# I don't know exaclty what he means, but I am inclined to believe him as
|
||||
# he's pretty active on Python windows bugs!
|
||||
try:
|
||||
dll = WinDLL("ntdll")
|
||||
except OSError: # I don't think this happens? you need ntdll to run python
|
||||
return False
|
||||
# not all windows versions have it at all
|
||||
if not hasattr(dll, "RtlAreLongPathsEnabled"):
|
||||
return False
|
||||
# tell windows we want to get back a single unsigned byte (a bool).
|
||||
dll.RtlAreLongPathsEnabled.restype = c_bool
|
||||
return dll.RtlAreLongPathsEnabled()
|
||||
else:
|
||||
try:
|
||||
dll = WinDLL("ntdll")
|
||||
except OSError: # I don't think this happens? you need ntdll to run python
|
||||
return False
|
||||
# not all windows versions have it at all
|
||||
if not hasattr(dll, "RtlAreLongPathsEnabled"):
|
||||
return False
|
||||
# tell windows we want to get back a single unsigned byte (a bool).
|
||||
dll.RtlAreLongPathsEnabled.restype = c_bool
|
||||
return dll.RtlAreLongPathsEnabled()
|
||||
|
||||
|
||||
def convert_path(path: str) -> str:
|
||||
@@ -326,7 +336,7 @@ def _handle_posix_cmd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
|
||||
|
||||
def _handle_posix_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
"""OSError handling for posix systems.
|
||||
"""OSError handling for POSIX systems.
|
||||
|
||||
Some things that could happen to trigger an OSError:
|
||||
- cwd could not exist
|
||||
@@ -377,7 +387,7 @@ def _handle_windows_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
|
||||
|
||||
def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
"""Interpret an OSError exc and raise the appropriate dbt exception."""
|
||||
"""Interpret an OSError exception and raise the appropriate dbt exception."""
|
||||
if len(cmd) == 0:
|
||||
raise dbt.exceptions.CommandError(cwd, cmd)
|
||||
|
||||
@@ -393,10 +403,8 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
)
|
||||
|
||||
|
||||
def run_cmd(
|
||||
cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None
|
||||
) -> Tuple[bytes, bytes]:
|
||||
logger.debug('Executing "{}"'.format(" ".join(cmd)))
|
||||
def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> Tuple[bytes, bytes]:
|
||||
fire_event(SystemExecutingCmd(cmd=cmd))
|
||||
if len(cmd) == 0:
|
||||
raise dbt.exceptions.CommandError(cwd, cmd)
|
||||
|
||||
@@ -408,6 +416,9 @@ def run_cmd(
|
||||
full_env.update(env)
|
||||
|
||||
try:
|
||||
exe_pth = shutil.which(cmd[0])
|
||||
if exe_pth:
|
||||
cmd = [os.path.abspath(exe_pth)] + list(cmd[1:])
|
||||
proc = subprocess.Popen(
|
||||
cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=full_env
|
||||
)
|
||||
@@ -416,18 +427,27 @@ def run_cmd(
|
||||
except OSError as exc:
|
||||
_interpret_oserror(exc, cwd, cmd)
|
||||
|
||||
logger.debug('STDOUT: "{!s}"'.format(out))
|
||||
logger.debug('STDERR: "{!s}"'.format(err))
|
||||
fire_event(SystemStdOutMsg(bmsg=out))
|
||||
fire_event(SystemStdErrMsg(bmsg=err))
|
||||
|
||||
if proc.returncode != 0:
|
||||
logger.debug("command return code={}".format(proc.returncode))
|
||||
fire_event(SystemReportReturnCode(returncode=proc.returncode))
|
||||
raise dbt.exceptions.CommandResultError(cwd, cmd, proc.returncode, out, err)
|
||||
|
||||
return out, err
|
||||
|
||||
|
||||
def download(
|
||||
def download_with_retries(
|
||||
url: str, path: str, timeout: Optional[Union[float, tuple]] = None
|
||||
) -> None:
|
||||
download_fn = functools.partial(download, url, path, timeout)
|
||||
connection_exception_retry(download_fn, 5)
|
||||
|
||||
|
||||
def download(
|
||||
url: str,
|
||||
path: str,
|
||||
timeout: Optional[Union[float, Tuple[float, float], Tuple[float, None]]] = None,
|
||||
) -> None:
|
||||
path = convert_path(path)
|
||||
connection_timeout = timeout or float(os.getenv("DBT_HTTP_TIMEOUT", 10))
|
||||
@@ -451,12 +471,10 @@ def rename(from_path: str, to_path: str, force: bool = False) -> None:
|
||||
shutil.move(from_path, to_path)
|
||||
|
||||
|
||||
def untar_package(
|
||||
tar_path: str, dest_dir: str, rename_to: Optional[str] = None
|
||||
) -> None:
|
||||
def untar_package(tar_path: str, dest_dir: str, rename_to: Optional[str] = None) -> None:
|
||||
tar_path = convert_path(tar_path)
|
||||
tar_dir_name = None
|
||||
with tarfile.open(tar_path, "r") as tarball:
|
||||
with tarfile.open(tar_path, "r:gz") as tarball:
|
||||
tarball.extractall(dest_dir)
|
||||
tar_dir_name = os.path.commonprefix(tarball.getnames())
|
||||
if rename_to:
|
||||
@@ -488,7 +506,7 @@ def move(src, dst):
|
||||
directory on windows when it has read-only files in it and the move is
|
||||
between two drives.
|
||||
|
||||
This is almost identical to the real shutil.move, except it uses our rmtree
|
||||
This is almost identical to the real shutil.move, except it, uses our rmtree
|
||||
and skips handling non-windows OSes since the existing one works ok there.
|
||||
"""
|
||||
src = convert_path(src)
|
||||
@@ -523,7 +541,7 @@ def move(src, dst):
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
"""Recursively remove path. On permissions errors on windows, try to remove
|
||||
"""Recursively remove the path. On permissions errors on windows, try to remove
|
||||
the read-only flag and try again.
|
||||
"""
|
||||
path = convert_path(path)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import dbt.exceptions
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
import yaml
|
||||
import yaml.scanner
|
||||
|
||||
# the C version is faster, but it doesn't always exist
|
||||
try:
|
||||
@@ -32,9 +31,7 @@ def prefix_with_line_numbers(string, no_start, no_end):
|
||||
numbers = range(no_start, no_end)
|
||||
relevant_lines = line_list[no_start:no_end]
|
||||
|
||||
return "\n".join(
|
||||
[line_no(i + 1, line) for (i, line) in zip(numbers, relevant_lines)]
|
||||
)
|
||||
return "\n".join([line_no(i + 1, line) for (i, line) in zip(numbers, relevant_lines)])
|
||||
|
||||
|
||||
def contextualized_yaml_error(raw_contents, error):
|
||||
@@ -50,11 +47,11 @@ def contextualized_yaml_error(raw_contents, error):
|
||||
)
|
||||
|
||||
|
||||
def safe_load(contents):
|
||||
def safe_load(contents) -> Optional[Dict[str, Any]]:
|
||||
return yaml.load(contents, Loader=SafeLoader)
|
||||
|
||||
|
||||
def load_yaml_text(contents):
|
||||
def load_yaml_text(contents, path=None):
|
||||
try:
|
||||
return safe_load(contents)
|
||||
except (yaml.scanner.ScannerError, yaml.YAMLError) as e:
|
||||
|
||||
@@ -3,18 +3,18 @@ from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, cast, Optional
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from dbt import flags
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.context.providers import generate_runtime_model_context
|
||||
from dbt.contracts.graph.manifest import Manifest, UniqueID
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledDataTestNode,
|
||||
CompiledSchemaTestNode,
|
||||
COMPILED_TYPES,
|
||||
CompiledGenericTestNode,
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
ManifestNode,
|
||||
@@ -27,9 +27,10 @@ from dbt.exceptions import (
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.graph import Graph
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import FoundStats, CompilingNode, WritingInjectedSQLForNode
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import pluralize
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
@@ -54,6 +55,7 @@ def print_compile_stats(stats):
|
||||
NodeType.Seed: "seed file",
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -64,11 +66,9 @@ def print_compile_stats(stats):
|
||||
resource_counts = {k.pluralize(): v for k, v in results.items()}
|
||||
dbt.tracking.track_resource_counts(resource_counts)
|
||||
|
||||
stat_line = ", ".join(
|
||||
[pluralize(ct, names.get(t)) for t, ct in results.items() if t in names]
|
||||
)
|
||||
stat_line = ", ".join([pluralize(ct, names.get(t)) for t, ct in results.items() if t in names])
|
||||
|
||||
logger.info("Found {}".format(stat_line))
|
||||
fire_event(FoundStats(stat_line=stat_line))
|
||||
|
||||
|
||||
def _node_enabled(node: ManifestNode):
|
||||
@@ -89,6 +89,8 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[source.resource_type] += 1
|
||||
for exposure in manifest.exposures.values():
|
||||
stats[exposure.resource_type] += 1
|
||||
for metric in manifest.metrics.values():
|
||||
stats[metric.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
@@ -107,6 +109,19 @@ def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
_add_prepended_cte(prepended_ctes, new_cte)
|
||||
|
||||
|
||||
def _get_tests_for_node(manifest: Manifest, unique_id: UniqueID) -> List[UniqueID]:
|
||||
"""Get a list of tests that depend on the node with the
|
||||
provided unique id"""
|
||||
|
||||
tests = []
|
||||
if unique_id in manifest.child_map:
|
||||
for child_unique_id in manifest.child_map[unique_id]:
|
||||
if child_unique_id.startswith("test."):
|
||||
tests.append(child_unique_id)
|
||||
|
||||
return tests
|
||||
|
||||
|
||||
class Linker:
|
||||
def __init__(self, data=None):
|
||||
if data is None:
|
||||
@@ -142,10 +157,11 @@ class Linker:
|
||||
include all nodes in their corresponding graph entries.
|
||||
"""
|
||||
out_graph = self.graph.copy()
|
||||
for node_id in self.graph.nodes():
|
||||
for node_id in self.graph:
|
||||
data = manifest.expect(node_id).to_dict(omit_none=True)
|
||||
out_graph.add_node(node_id, **data)
|
||||
nx.write_gpickle(out_graph, outfile)
|
||||
with open(outfile, "wb") as outfh:
|
||||
pickle.dump(out_graph, outfh, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
|
||||
class Compiler:
|
||||
@@ -154,7 +170,7 @@ class Compiler:
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.target_path)
|
||||
make_directory(self.config.modules_path)
|
||||
make_directory(self.config.packages_install_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
@@ -165,9 +181,9 @@ class Compiler:
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
context = generate_runtime_model(node, self.config, manifest)
|
||||
context = generate_runtime_model_context(node, self.config, manifest)
|
||||
context.update(extra_context)
|
||||
if isinstance(node, CompiledSchemaTestNode):
|
||||
if isinstance(node, CompiledGenericTestNode):
|
||||
# for test nodes, add a special keyword args value to the context
|
||||
jinja.add_rendered_test_kwargs(context, node)
|
||||
|
||||
@@ -180,7 +196,7 @@ class Compiler:
|
||||
|
||||
def _get_relation_name(self, node: ParsedNode):
|
||||
relation_name = None
|
||||
if node.resource_type in NodeType.refable() and not node.is_ephemeral_model:
|
||||
if node.is_relational and not node.is_ephemeral_model:
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
@@ -237,29 +253,24 @@ class Compiler:
|
||||
trailing_comma = sqlparse.sql.Token(sqlparse.tokens.Punctuation, ",")
|
||||
parsed.insert_after(with_stmt, trailing_comma)
|
||||
|
||||
token = sqlparse.sql.Token(
|
||||
sqlparse.tokens.Keyword, ", ".join(c.sql for c in ctes)
|
||||
)
|
||||
token = sqlparse.sql.Token(sqlparse.tokens.Keyword, ", ".join(c.sql for c in ctes))
|
||||
parsed.insert_after(with_stmt, token)
|
||||
|
||||
return str(parsed)
|
||||
|
||||
def _get_dbt_test_name(self) -> str:
|
||||
return "dbt__cte__internal_test"
|
||||
|
||||
# This method is called by the 'compile_node' method. Starting
|
||||
# from the node that it is passed in, it will recursively call
|
||||
# itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
# not produce SQL that is executed directly, instead they
|
||||
# are rolled up into the models that refer to them by
|
||||
# inserting CTEs into the SQL.
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
||||
|
||||
"""This method is called by the 'compile_node' method. Starting
|
||||
from the node that it is passed in, it will recursively call
|
||||
itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
not produce SQL that is executed directly, instead they
|
||||
are rolled up into the models that refer to them by
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_sql is None:
|
||||
raise RuntimeException("Cannot inject ctes into an unparsed node", model)
|
||||
if model.extra_ctes_injected:
|
||||
@@ -275,62 +286,55 @@ class Compiler:
|
||||
# gathered and then "injected" into the model.
|
||||
prepended_ctes: List[InjectedCTE] = []
|
||||
|
||||
dbt_test_name = self._get_dbt_test_name()
|
||||
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id == dbt_test_name:
|
||||
sql = cte.sql
|
||||
if cte.id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
f"During compilation, found a cte reference that "
|
||||
f"could not be resolved: {cte.id}"
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
assert isinstance(cte_model, tuple(COMPILED_TYPES.values()))
|
||||
cte_model = cast(NonSourceCompiledNode, cte_model)
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
f"During compilation, found a cte reference that "
|
||||
f"could not be resolved: {cte.id}"
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f"{cte.id} is not ephemeral")
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
assert isinstance(cte_model, tuple(COMPILED_TYPES.values()))
|
||||
cte_model = cast(NonSourceCompiledNode, cte_model)
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
sql = f" {new_cte_name} as (\n{cte_model.compiled_sql}\n)"
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_sql
|
||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
# We don't save injected_sql into compiled sql for ephemeral models
|
||||
# because it will cause problems with processing of subsequent models.
|
||||
# Ephemeral models do not produce executable SQL of their own.
|
||||
if not model.is_ephemeral_model:
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_sql,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.compiled_sql = injected_sql
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_sql,
|
||||
prepended_ctes,
|
||||
)
|
||||
model._pre_injected_sql = model.compiled_sql
|
||||
model.compiled_sql = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict(omit_none=True))
|
||||
@@ -339,33 +343,6 @@ class Compiler:
|
||||
|
||||
return model, prepended_ctes
|
||||
|
||||
def _add_ctes(
|
||||
self,
|
||||
compiled_node: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> NonSourceCompiledNode:
|
||||
"""Wrap the data test SQL in a CTE."""
|
||||
|
||||
# for data tests, we need to insert a special CTE at the end of the
|
||||
# list containing the test query, and then have the "real" query be a
|
||||
# select count(*) from that model.
|
||||
# the benefit of doing it this way is that _add_ctes() can be
|
||||
# rewritten for different adapters to handle databases that don't
|
||||
# support CTEs, or at least don't have full support.
|
||||
if isinstance(compiled_node, CompiledDataTestNode):
|
||||
# the last prepend (so last in order) should be the data test body.
|
||||
# then we can add our select count(*) from _that_ cte as the "real"
|
||||
# compiled_sql, and do the regular prepend logic from CTEs.
|
||||
name = self._get_dbt_test_name()
|
||||
cte = InjectedCTE(
|
||||
id=name, sql=f" {name} as (\n{compiled_node.compiled_sql}\n)"
|
||||
)
|
||||
compiled_node.extra_ctes.append(cte)
|
||||
compiled_node.compiled_sql = f"\nselect count(*) from {name}"
|
||||
|
||||
return compiled_node
|
||||
|
||||
# creates a compiled_node from the ManifestNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_sql" using the node, the
|
||||
@@ -379,7 +356,7 @@ class Compiler:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
logger.debug("Compiling {}".format(node.unique_id))
|
||||
fire_event(CompilingNode(unique_id=node.unique_id))
|
||||
|
||||
data = node.to_dict(omit_none=True)
|
||||
data.update(
|
||||
@@ -404,10 +381,6 @@ class Compiler:
|
||||
|
||||
compiled_node.compiled = True
|
||||
|
||||
# add ctes for specific test nodes, and also for
|
||||
# possible future use in adapters
|
||||
compiled_node = self._add_ctes(compiled_node, manifest, extra_context)
|
||||
|
||||
return compiled_node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
@@ -421,35 +394,90 @@ class Compiler:
|
||||
|
||||
for dependency in node.depends_on_nodes:
|
||||
if dependency in manifest.nodes:
|
||||
linker.dependency(
|
||||
node.unique_id, (manifest.nodes[dependency].unique_id)
|
||||
)
|
||||
linker.dependency(node.unique_id, (manifest.nodes[dependency].unique_id))
|
||||
elif dependency in manifest.sources:
|
||||
linker.dependency(
|
||||
node.unique_id, (manifest.sources[dependency].unique_id)
|
||||
)
|
||||
linker.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
elif dependency in manifest.metrics:
|
||||
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
else:
|
||||
dependency_not_found(node, dependency)
|
||||
|
||||
def link_graph(self, linker: Linker, manifest: Manifest):
|
||||
def link_graph(self, linker: Linker, manifest: Manifest, add_test_edges: bool = False):
|
||||
for source in manifest.sources.values():
|
||||
linker.add_node(source.unique_id)
|
||||
for node in manifest.nodes.values():
|
||||
self.link_node(linker, node, manifest)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(linker, exposure, manifest)
|
||||
# linker.add_node(exposure.unique_id)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(linker, metric, manifest)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
if cycle:
|
||||
raise RuntimeError("Found a cycle: {}".format(cycle))
|
||||
|
||||
def compile(self, manifest: Manifest, write=True) -> Graph:
|
||||
if add_test_edges:
|
||||
manifest.build_parent_and_child_maps()
|
||||
self.add_test_edges(linker, manifest)
|
||||
|
||||
def add_test_edges(self, linker: Linker, manifest: Manifest) -> None:
|
||||
"""This method adds additional edges to the DAG. For a given non-test
|
||||
executable node, add an edge from an upstream test to the given node if
|
||||
the set of nodes the test depends on is a subset of the upstream nodes
|
||||
for the given node."""
|
||||
|
||||
# Given a graph:
|
||||
# model1 --> model2 --> model3
|
||||
# | |
|
||||
# | \/
|
||||
# \/ test 2
|
||||
# test1
|
||||
#
|
||||
# Produce the following graph:
|
||||
# model1 --> model2 --> model3
|
||||
# | /\ | /\ /\
|
||||
# | | \/ | |
|
||||
# \/ | test2 ----| |
|
||||
# test1 ----|---------------|
|
||||
|
||||
for node_id in linker.graph:
|
||||
# If node is executable (in manifest.nodes) and does _not_
|
||||
# represent a test, continue.
|
||||
if (
|
||||
node_id in manifest.nodes
|
||||
and manifest.nodes[node_id].resource_type != NodeType.Test
|
||||
):
|
||||
# Get *everything* upstream of the node
|
||||
all_upstream_nodes = nx.traversal.bfs_tree(linker.graph, node_id, reverse=True)
|
||||
# Get the set of upstream nodes not including the current node.
|
||||
upstream_nodes = set([n for n in all_upstream_nodes if n != node_id])
|
||||
|
||||
# Get all tests that depend on any upstream nodes.
|
||||
upstream_tests = []
|
||||
for upstream_node in upstream_nodes:
|
||||
upstream_tests += _get_tests_for_node(manifest, upstream_node)
|
||||
|
||||
for upstream_test in upstream_tests:
|
||||
# Get the set of all nodes that the test depends on
|
||||
# including the upstream_node itself. This is necessary
|
||||
# because tests can depend on multiple nodes (ex:
|
||||
# relationship tests). Test nodes do not distinguish
|
||||
# between what node the test is "testing" and what
|
||||
# node(s) it depends on.
|
||||
test_depends_on = set(manifest.nodes[upstream_test].depends_on_nodes)
|
||||
|
||||
# If the set of nodes that an upstream test depends on
|
||||
# is a subset of all upstream nodes of the current node,
|
||||
# add an edge from the upstream test to the current node.
|
||||
if test_depends_on.issubset(upstream_nodes):
|
||||
linker.graph.add_edge(upstream_test, node_id)
|
||||
|
||||
def compile(self, manifest: Manifest, write=True, add_test_edges=False) -> Graph:
|
||||
self.initialize()
|
||||
linker = Linker()
|
||||
|
||||
self.link_graph(linker, manifest)
|
||||
self.link_graph(linker, manifest, add_test_edges)
|
||||
|
||||
stats = _generate_stats(manifest)
|
||||
|
||||
@@ -463,19 +491,14 @@ class Compiler:
|
||||
def _write_node(self, node: NonSourceCompiledNode) -> ManifestNode:
|
||||
if not node.extra_ctes_injected or node.resource_type == NodeType.Snapshot:
|
||||
return node
|
||||
logger.debug(f'Writing injected SQL for node "{node.unique_id}"')
|
||||
fire_event(WritingInjectedSQLForNode(unique_id=node.unique_id))
|
||||
|
||||
if node.compiled_sql:
|
||||
node.build_path = node.write_node(
|
||||
node.compiled_path = node.write_node(
|
||||
self.config.target_path, "compiled", node.compiled_sql
|
||||
)
|
||||
return node
|
||||
|
||||
# This is the main entry point into this code. It's called by
|
||||
# CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
# RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
# the node into a compiled node, and then calls the
|
||||
# recursive method to "prepend" the ctes.
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
@@ -483,6 +506,12 @@ class Compiler:
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
write: bool = True,
|
||||
) -> NonSourceCompiledNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
the node into a compiled node, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
1
core/dbt/config/README.md
Normal file
1
core/dbt/config/README.md
Normal file
@@ -0,0 +1 @@
|
||||
# Config README
|
||||
@@ -1,4 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, PROFILES_DIR, read_user_config # noqa
|
||||
from .profile import Profile, read_user_config # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
|
||||
@@ -4,6 +4,7 @@ import os
|
||||
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from dbt import flags
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
@@ -14,14 +15,15 @@ from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import ValidationException
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.types import MissingProfileTarget
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.utils import coerce_dict_str
|
||||
|
||||
from .renderer import ProfileRenderer
|
||||
|
||||
DEFAULT_THREADS = 1
|
||||
|
||||
DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser("~"), ".dbt")
|
||||
PROFILES_DIR = os.path.expanduser(os.getenv("DBT_PROFILES_DIR", DEFAULT_PROFILES_DIR))
|
||||
|
||||
INVALID_PROFILE_MESSAGE = """
|
||||
dbt encountered an error while trying to read your profiles.yml file.
|
||||
@@ -42,7 +44,7 @@ defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=PROFILES_DIR
|
||||
profiles_file=DEFAULT_PROFILES_DIR
|
||||
)
|
||||
|
||||
|
||||
@@ -69,10 +71,10 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
try:
|
||||
profile = read_profile(directory)
|
||||
if profile:
|
||||
user_cfg = coerce_dict_str(profile.get("config", {}))
|
||||
if user_cfg is not None:
|
||||
UserConfig.validate(user_cfg)
|
||||
return UserConfig.from_dict(user_cfg)
|
||||
user_config = coerce_dict_str(profile.get("config", {}))
|
||||
if user_config is not None:
|
||||
UserConfig.validate(user_config)
|
||||
return UserConfig.from_dict(user_config)
|
||||
except (RuntimeException, ValidationError):
|
||||
pass
|
||||
return UserConfig()
|
||||
@@ -80,13 +82,33 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
|
||||
# The Profile class is included in RuntimeConfig, so any attribute
|
||||
# additions must also be set where the RuntimeConfig class is created
|
||||
@dataclass
|
||||
# `init=False` is a workaround for https://bugs.python.org/issue45081
|
||||
@dataclass(init=False)
|
||||
class Profile(HasCredentials):
|
||||
profile_name: str
|
||||
target_name: str
|
||||
config: UserConfig
|
||||
user_config: UserConfig
|
||||
threads: int
|
||||
credentials: Credentials
|
||||
profile_env_vars: Dict[str, Any]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
profile_name: str,
|
||||
target_name: str,
|
||||
user_config: UserConfig,
|
||||
threads: int,
|
||||
credentials: Credentials,
|
||||
):
|
||||
"""Explicitly defining `__init__` to work around bug in Python 3.9.7
|
||||
https://bugs.python.org/issue45081
|
||||
"""
|
||||
self.profile_name = profile_name
|
||||
self.target_name = target_name
|
||||
self.user_config = user_config
|
||||
self.threads = threads
|
||||
self.credentials = credentials
|
||||
self.profile_env_vars = {} # never available on init
|
||||
|
||||
def to_profile_info(self, serialize_credentials: bool = False) -> Dict[str, Any]:
|
||||
"""Unlike to_project_config, this dict is not a mirror of any existing
|
||||
@@ -100,12 +122,12 @@ class Profile(HasCredentials):
|
||||
result = {
|
||||
"profile_name": self.profile_name,
|
||||
"target_name": self.target_name,
|
||||
"config": self.config,
|
||||
"user_config": self.user_config,
|
||||
"threads": self.threads,
|
||||
"credentials": self.credentials,
|
||||
}
|
||||
if serialize_credentials:
|
||||
result["config"] = self.config.to_dict(omit_none=True)
|
||||
result["user_config"] = self.user_config.to_dict(omit_none=True)
|
||||
result["credentials"] = self.credentials.to_dict(omit_none=True)
|
||||
return result
|
||||
|
||||
@@ -118,15 +140,13 @@ class Profile(HasCredentials):
|
||||
"name": self.target_name,
|
||||
"target_name": self.target_name,
|
||||
"profile_name": self.profile_name,
|
||||
"config": self.config.to_dict(omit_none=True),
|
||||
"config": self.user_config.to_dict(omit_none=True),
|
||||
}
|
||||
)
|
||||
return target
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not (
|
||||
isinstance(other, self.__class__) and isinstance(self, other.__class__)
|
||||
):
|
||||
if not (isinstance(other, self.__class__) and isinstance(self, other.__class__)):
|
||||
return NotImplemented
|
||||
return self.to_profile_info() == other.to_profile_info()
|
||||
|
||||
@@ -189,9 +209,7 @@ class Profile(HasCredentials):
|
||||
profile: Dict[str, Any], profile_name: str, target_name: str
|
||||
) -> Dict[str, Any]:
|
||||
if "outputs" not in profile:
|
||||
raise DbtProfileError(
|
||||
"outputs not specified in profile '{}'".format(profile_name)
|
||||
)
|
||||
raise DbtProfileError("outputs not specified in profile '{}'".format(profile_name))
|
||||
outputs = profile["outputs"]
|
||||
|
||||
if target_name not in outputs:
|
||||
@@ -221,7 +239,7 @@ class Profile(HasCredentials):
|
||||
threads: int,
|
||||
profile_name: str,
|
||||
target_name: str,
|
||||
user_cfg: Optional[Dict[str, Any]] = None,
|
||||
user_config: Optional[Dict[str, Any]] = None,
|
||||
) -> "Profile":
|
||||
"""Create a profile from an existing set of Credentials and the
|
||||
remaining information.
|
||||
@@ -230,20 +248,20 @@ class Profile(HasCredentials):
|
||||
:param threads: The number of threads to use for connections.
|
||||
:param profile_name: The profile name used for this profile.
|
||||
:param target_name: The target name used for this profile.
|
||||
:param user_cfg: The user-level config block from the
|
||||
:param user_config: The user-level config block from the
|
||||
raw profiles, if specified.
|
||||
:raises DbtProfileError: If the profile is invalid.
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
if user_cfg is None:
|
||||
user_cfg = {}
|
||||
UserConfig.validate(user_cfg)
|
||||
config = UserConfig.from_dict(user_cfg)
|
||||
if user_config is None:
|
||||
user_config = {}
|
||||
UserConfig.validate(user_config)
|
||||
user_config_obj: UserConfig = UserConfig.from_dict(user_config)
|
||||
|
||||
profile = cls(
|
||||
profile_name=profile_name,
|
||||
target_name=target_name,
|
||||
config=config,
|
||||
user_config=user_config_obj,
|
||||
threads=threads,
|
||||
credentials=credentials,
|
||||
)
|
||||
@@ -275,11 +293,7 @@ class Profile(HasCredentials):
|
||||
target_name = renderer.render_value(raw_profile["target"])
|
||||
else:
|
||||
target_name = "default"
|
||||
logger.debug(
|
||||
"target not specified in profile '{}', using '{}'".format(
|
||||
profile_name, target_name
|
||||
)
|
||||
)
|
||||
fire_event(MissingProfileTarget(profile_name=profile_name, target_name=target_name))
|
||||
|
||||
raw_profile_data = cls._get_profile_data(raw_profile, profile_name, target_name)
|
||||
|
||||
@@ -295,7 +309,7 @@ class Profile(HasCredentials):
|
||||
raw_profile: Dict[str, Any],
|
||||
profile_name: str,
|
||||
renderer: ProfileRenderer,
|
||||
user_cfg: Optional[Dict[str, Any]] = None,
|
||||
user_config: Optional[Dict[str, Any]] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> "Profile":
|
||||
@@ -307,7 +321,7 @@ class Profile(HasCredentials):
|
||||
disk as yaml and its values rendered with jinja.
|
||||
:param profile_name: The profile name used.
|
||||
:param renderer: The config renderer.
|
||||
:param user_cfg: The global config for the user, if it
|
||||
:param user_config: The global config for the user, if it
|
||||
was present.
|
||||
:param target_override: The target to use, if provided on
|
||||
the command line.
|
||||
@@ -317,9 +331,9 @@ class Profile(HasCredentials):
|
||||
target could not be found
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
# user_cfg is not rendered.
|
||||
if user_cfg is None:
|
||||
user_cfg = raw_profile.get("config")
|
||||
# user_config is not rendered.
|
||||
if user_config is None:
|
||||
user_config = raw_profile.get("config")
|
||||
# TODO: should it be, and the values coerced to bool?
|
||||
target_name, profile_data = cls.render_profile(
|
||||
raw_profile, profile_name, target_override, renderer
|
||||
@@ -340,7 +354,7 @@ class Profile(HasCredentials):
|
||||
profile_name=profile_name,
|
||||
target_name=target_name,
|
||||
threads=threads,
|
||||
user_cfg=user_cfg,
|
||||
user_config=user_config,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -367,9 +381,7 @@ class Profile(HasCredentials):
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
if profile_name not in raw_profiles:
|
||||
raise DbtProjectError(
|
||||
"Could not find profile named '{}'".format(profile_name)
|
||||
)
|
||||
raise DbtProjectError("Could not find profile named '{}'".format(profile_name))
|
||||
|
||||
# First, we've already got our final decision on profile name, and we
|
||||
# don't render keys, so we can pluck that out
|
||||
@@ -377,13 +389,13 @@ class Profile(HasCredentials):
|
||||
if not raw_profile:
|
||||
msg = f"Profile {profile_name} in profiles.yml is empty"
|
||||
raise DbtProfileError(INVALID_PROFILE_MESSAGE.format(error_string=msg))
|
||||
user_cfg = raw_profiles.get("config")
|
||||
user_config = raw_profiles.get("config")
|
||||
|
||||
return cls.from_raw_profile_info(
|
||||
raw_profile=raw_profile,
|
||||
profile_name=profile_name,
|
||||
renderer=renderer,
|
||||
user_cfg=user_cfg,
|
||||
user_config=user_config,
|
||||
target_override=target_override,
|
||||
threads_override=threads_override,
|
||||
)
|
||||
@@ -411,10 +423,8 @@ class Profile(HasCredentials):
|
||||
"""
|
||||
threads_override = getattr(args, "threads", None)
|
||||
target_override = getattr(args, "target", None)
|
||||
raw_profiles = read_profile(args.profiles_dir)
|
||||
profile_name = cls.pick_profile_name(
|
||||
getattr(args, "profile", None), project_profile_name
|
||||
)
|
||||
raw_profiles = read_profile(flags.PROFILES_DIR)
|
||||
profile_name = cls.pick_profile_name(getattr(args, "profile", None), project_profile_name)
|
||||
return cls.from_raw_profiles(
|
||||
raw_profiles=raw_profiles,
|
||||
profile_name=profile_name,
|
||||
|
||||
@@ -15,6 +15,7 @@ from typing_extensions import Protocol, runtime_checkable
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt import flags, deprecations
|
||||
from dbt.clients.system import resolve_path_from_base
|
||||
from dbt.clients.system import path_exists
|
||||
from dbt.clients.system import load_file_contents
|
||||
@@ -50,7 +51,7 @@ INVALID_VERSION_ERROR = """\
|
||||
This version of dbt is not supported with the '{package}' package.
|
||||
Installed version of dbt: {installed}
|
||||
Required version of dbt for '{package}': {version_spec}
|
||||
Check the requirements for the '{package}' package, or run dbt again with \
|
||||
Check for a different version of the '{package}' package, or run dbt again with \
|
||||
--no-version-check
|
||||
"""
|
||||
|
||||
@@ -59,7 +60,7 @@ IMPOSSIBLE_VERSION_ERROR = """\
|
||||
The package version requirement can never be satisfied for the '{package}
|
||||
package.
|
||||
Required versions of dbt for '{package}': {version_spec}
|
||||
Check the requirements for the '{package}' package, or run dbt again with \
|
||||
Check for a different version of the '{package}' package, or run dbt again with \
|
||||
--no-version-check
|
||||
"""
|
||||
|
||||
@@ -105,9 +106,7 @@ def package_config_from_data(packages_data: Dict[str, Any]):
|
||||
PackageConfig.validate(packages_data)
|
||||
packages = PackageConfig.from_dict(packages_data)
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(
|
||||
MALFORMED_PACKAGE_ERROR.format(error=str(e.message))
|
||||
) from e
|
||||
raise DbtProjectError(MALFORMED_PACKAGE_ERROR.format(error=str(e.message))) from e
|
||||
return packages
|
||||
|
||||
|
||||
@@ -127,20 +126,29 @@ def _parse_versions(versions: Union[List[str], str]) -> List[VersionSpecifier]:
|
||||
|
||||
|
||||
def _all_source_paths(
|
||||
source_paths: List[str],
|
||||
data_paths: List[str],
|
||||
model_paths: List[str],
|
||||
seed_paths: List[str],
|
||||
snapshot_paths: List[str],
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
# We need to turn a list of lists into just a list, then convert to a set to
|
||||
# get only unique elements, then back to a list
|
||||
return list(
|
||||
chain(source_paths, data_paths, snapshot_paths, analysis_paths, macro_paths)
|
||||
set(list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)))
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def flag_or(flag: Optional[T], value: Optional[T], default: T) -> T:
|
||||
if flag is None:
|
||||
return value_or(value, default)
|
||||
else:
|
||||
return flag
|
||||
|
||||
|
||||
def value_or(value: Optional[T], default: T) -> T:
|
||||
if value is None:
|
||||
return default
|
||||
@@ -187,8 +195,7 @@ def validate_version(dbt_version: List[VersionSpecifier], project_name: str):
|
||||
installed = get_installed_version()
|
||||
if not versions_compatible(*dbt_version):
|
||||
msg = IMPOSSIBLE_VERSION_ERROR.format(
|
||||
package=project_name,
|
||||
version_spec=[x.to_version_string() for x in dbt_version],
|
||||
package=project_name, version_spec=[x.to_version_string() for x in dbt_version]
|
||||
)
|
||||
raise DbtProjectError(msg)
|
||||
|
||||
@@ -228,15 +235,9 @@ def _get_required_version(
|
||||
|
||||
@dataclass
|
||||
class RenderComponents:
|
||||
project_dict: Dict[str, Any] = field(
|
||||
metadata=dict(description="The project dictionary")
|
||||
)
|
||||
packages_dict: Dict[str, Any] = field(
|
||||
metadata=dict(description="The packages dictionary")
|
||||
)
|
||||
selectors_dict: Dict[str, Any] = field(
|
||||
metadata=dict(description="The selectors dictionary")
|
||||
)
|
||||
project_dict: Dict[str, Any] = field(metadata=dict(description="The project dictionary"))
|
||||
packages_dict: Dict[str, Any] = field(metadata=dict(description="The packages dictionary"))
|
||||
selectors_dict: Dict[str, Any] = field(metadata=dict(description="The selectors dictionary"))
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -247,8 +248,7 @@ class PartialProject(RenderComponents):
|
||||
project_name: Optional[str] = field(
|
||||
metadata=dict(
|
||||
description=(
|
||||
"The name of the project. This should always be set and will not "
|
||||
"be rendered"
|
||||
"The name of the project. This should always be set and will not " "be rendered"
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -256,9 +256,7 @@ class PartialProject(RenderComponents):
|
||||
metadata=dict(description="The root directory of the project"),
|
||||
)
|
||||
verify_version: bool = field(
|
||||
metadata=dict(
|
||||
description=("If True, verify the dbt version matches the required version")
|
||||
)
|
||||
metadata=dict(description=("If True, verify the dbt version matches the required version"))
|
||||
)
|
||||
|
||||
def render_profile_name(self, renderer) -> Optional[str]:
|
||||
@@ -281,6 +279,7 @@ class PartialProject(RenderComponents):
|
||||
selectors_dict=rendered_selectors,
|
||||
)
|
||||
|
||||
# Called by 'collect_parts' in RuntimeConfig
|
||||
def render(self, renderer: DbtProjectYamlRenderer) -> "Project":
|
||||
try:
|
||||
rendered = self.get_rendered(renderer)
|
||||
@@ -290,6 +289,24 @@ class PartialProject(RenderComponents):
|
||||
exc.path = os.path.join(self.project_root, "dbt_project.yml")
|
||||
raise
|
||||
|
||||
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
||||
if deprecated_path in project_dict:
|
||||
if exp_path in project_dict:
|
||||
msg = (
|
||||
"{deprecated_path} and {exp_path} cannot both be defined. The "
|
||||
"`{deprecated_path}` config has been deprecated in favor of `{exp_path}`. "
|
||||
"Please update your `dbt_project.yml` configuration to reflect this "
|
||||
"change."
|
||||
)
|
||||
raise DbtProjectError(
|
||||
msg.format(deprecated_path=deprecated_path, exp_path=exp_path)
|
||||
)
|
||||
deprecations.warn(
|
||||
f"project-config-{deprecated_path}",
|
||||
deprecated_path=deprecated_path,
|
||||
exp_path=exp_path,
|
||||
)
|
||||
|
||||
def create_project(self, rendered: RenderComponents) -> "Project":
|
||||
unrendered = RenderComponents(
|
||||
project_dict=self.project_dict,
|
||||
@@ -301,6 +318,9 @@ class PartialProject(RenderComponents):
|
||||
verify_version=self.verify_version,
|
||||
)
|
||||
|
||||
self.check_config_path(rendered.project_dict, "source-paths", "model-paths")
|
||||
self.check_config_path(rendered.project_dict, "data-paths", "seed-paths")
|
||||
|
||||
try:
|
||||
ProjectContract.validate(rendered.project_dict)
|
||||
cfg = ProjectContract.from_dict(rendered.project_dict)
|
||||
@@ -320,23 +340,33 @@ class PartialProject(RenderComponents):
|
||||
# to have been a cli argument.
|
||||
profile_name = cfg.profile
|
||||
# these are all the defaults
|
||||
source_paths: List[str] = value_or(cfg.source_paths, ["models"])
|
||||
|
||||
# `source_paths` is deprecated but still allowed. Copy it into
|
||||
# `model_paths` to simlify logic throughout the rest of the system.
|
||||
model_paths: List[str] = value_or(
|
||||
cfg.model_paths if "model-paths" in rendered.project_dict else cfg.source_paths,
|
||||
["models"],
|
||||
)
|
||||
macro_paths: List[str] = value_or(cfg.macro_paths, ["macros"])
|
||||
data_paths: List[str] = value_or(cfg.data_paths, ["data"])
|
||||
test_paths: List[str] = value_or(cfg.test_paths, ["test"])
|
||||
analysis_paths: List[str] = value_or(cfg.analysis_paths, [])
|
||||
# `data_paths` is deprecated but still allowed. Copy it into
|
||||
# `seed_paths` to simlify logic throughout the rest of the system.
|
||||
seed_paths: List[str] = value_or(
|
||||
cfg.seed_paths if "seed-paths" in rendered.project_dict else cfg.data_paths, ["seeds"]
|
||||
)
|
||||
test_paths: List[str] = value_or(cfg.test_paths, ["tests"])
|
||||
analysis_paths: List[str] = value_or(cfg.analysis_paths, ["analyses"])
|
||||
snapshot_paths: List[str] = value_or(cfg.snapshot_paths, ["snapshots"])
|
||||
|
||||
all_source_paths: List[str] = _all_source_paths(
|
||||
source_paths, data_paths, snapshot_paths, analysis_paths, macro_paths
|
||||
model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths
|
||||
)
|
||||
|
||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||
target_path: str = value_or(cfg.target_path, "target")
|
||||
target_path: str = flag_or(flags.TARGET_PATH, cfg.target_path, "target")
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = value_or(cfg.log_path, "logs")
|
||||
modules_path: str = value_or(cfg.modules_path, "dbt_modules")
|
||||
log_path: str = flag_or(flags.LOG_PATH, cfg.log_path, "logs")
|
||||
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
# break many things
|
||||
@@ -344,22 +374,28 @@ class PartialProject(RenderComponents):
|
||||
if cfg.quoting is not None:
|
||||
quoting = cfg.quoting.to_dict(omit_none=True)
|
||||
|
||||
dispatch: List[Dict[str, Any]]
|
||||
models: Dict[str, Any]
|
||||
seeds: Dict[str, Any]
|
||||
snapshots: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
dispatch = cfg.dispatch
|
||||
models = cfg.models
|
||||
seeds = cfg.seeds
|
||||
snapshots = cfg.snapshots
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
else:
|
||||
vars_dict = cfg.vars
|
||||
|
||||
vars_value = VarProvider(vars_dict)
|
||||
# There will never be any project_env_vars when it's first created
|
||||
project_env_vars: Dict[str, Any] = {}
|
||||
on_run_start: List[str] = value_or(cfg.on_run_start, [])
|
||||
on_run_end: List[str] = value_or(cfg.on_run_end, [])
|
||||
|
||||
@@ -374,15 +410,14 @@ class PartialProject(RenderComponents):
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict["selectors"]
|
||||
)
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
project_root=project_root,
|
||||
profile_name=profile_name,
|
||||
source_paths=source_paths,
|
||||
model_paths=model_paths,
|
||||
macro_paths=macro_paths,
|
||||
data_paths=data_paths,
|
||||
seed_paths=seed_paths,
|
||||
test_paths=test_paths,
|
||||
analysis_paths=analysis_paths,
|
||||
docs_paths=docs_paths,
|
||||
@@ -391,11 +426,12 @@ class PartialProject(RenderComponents):
|
||||
snapshot_paths=snapshot_paths,
|
||||
clean_targets=clean_targets,
|
||||
log_path=log_path,
|
||||
modules_path=modules_path,
|
||||
packages_install_path=packages_install_path,
|
||||
quoting=quoting,
|
||||
models=models,
|
||||
on_run_start=on_run_start,
|
||||
on_run_end=on_run_end,
|
||||
dispatch=dispatch,
|
||||
seeds=seeds,
|
||||
snapshots=snapshots,
|
||||
dbt_version=dbt_version,
|
||||
@@ -404,9 +440,11 @@ class PartialProject(RenderComponents):
|
||||
selectors=selectors,
|
||||
query_comment=query_comment,
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
unrendered=unrendered,
|
||||
project_env_vars=project_env_vars,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
@@ -484,9 +522,9 @@ class Project:
|
||||
version: Union[SemverString, float]
|
||||
project_root: str
|
||||
profile_name: Optional[str]
|
||||
source_paths: List[str]
|
||||
model_paths: List[str]
|
||||
macro_paths: List[str]
|
||||
data_paths: List[str]
|
||||
seed_paths: List[str]
|
||||
test_paths: List[str]
|
||||
analysis_paths: List[str]
|
||||
docs_paths: List[str]
|
||||
@@ -495,14 +533,16 @@ class Project:
|
||||
snapshot_paths: List[str]
|
||||
clean_targets: List[str]
|
||||
log_path: str
|
||||
modules_path: str
|
||||
packages_install_path: str
|
||||
quoting: Dict[str, Any]
|
||||
models: Dict[str, Any]
|
||||
on_run_start: List[str]
|
||||
on_run_end: List[str]
|
||||
dispatch: List[Dict[str, Any]]
|
||||
seeds: Dict[str, Any]
|
||||
snapshots: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
packages: Dict[str, Any]
|
||||
@@ -511,25 +551,31 @@ class Project:
|
||||
query_comment: QueryComment
|
||||
config_version: int
|
||||
unrendered: RenderComponents
|
||||
project_env_vars: Dict[str, Any]
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
return _all_source_paths(
|
||||
self.source_paths,
|
||||
self.data_paths,
|
||||
self.model_paths,
|
||||
self.seed_paths,
|
||||
self.snapshot_paths,
|
||||
self.analysis_paths,
|
||||
self.macro_paths,
|
||||
)
|
||||
|
||||
@property
|
||||
def generic_test_paths(self):
|
||||
generic_test_paths = []
|
||||
for test_path in self.test_paths:
|
||||
generic_test_paths.append(os.path.join(test_path, "generic"))
|
||||
return generic_test_paths
|
||||
|
||||
def __str__(self):
|
||||
cfg = self.to_project_config(with_packages=True)
|
||||
return str(cfg)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not (
|
||||
isinstance(other, self.__class__) and isinstance(self, other.__class__)
|
||||
):
|
||||
if not (isinstance(other, self.__class__) and isinstance(self, other.__class__)):
|
||||
return False
|
||||
return self.to_project_config(with_packages=True) == other.to_project_config(
|
||||
with_packages=True
|
||||
@@ -549,9 +595,9 @@ class Project:
|
||||
"version": self.version,
|
||||
"project-root": self.project_root,
|
||||
"profile": self.profile_name,
|
||||
"source-paths": self.source_paths,
|
||||
"model-paths": self.model_paths,
|
||||
"macro-paths": self.macro_paths,
|
||||
"data-paths": self.data_paths,
|
||||
"seed-paths": self.seed_paths,
|
||||
"test-paths": self.test_paths,
|
||||
"analysis-paths": self.analysis_paths,
|
||||
"docs-paths": self.docs_paths,
|
||||
@@ -564,13 +610,13 @@ class Project:
|
||||
"models": self.models,
|
||||
"on-run-start": self.on_run_start,
|
||||
"on-run-end": self.on_run_end,
|
||||
"dispatch": self.dispatch,
|
||||
"seeds": self.seeds,
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [
|
||||
v.to_version_string() for v in self.dbt_version
|
||||
],
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
}
|
||||
)
|
||||
@@ -589,34 +635,12 @@ class Project:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@classmethod
|
||||
def partial_load(
|
||||
cls, project_root: str, *, verify_version: bool = False
|
||||
) -> PartialProject:
|
||||
def partial_load(cls, project_root: str, *, verify_version: bool = False) -> PartialProject:
|
||||
return PartialProject.from_project_root(
|
||||
project_root,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def render_from_dict(
|
||||
cls,
|
||||
project_root: str,
|
||||
project_dict: Dict[str, Any],
|
||||
packages_dict: Dict[str, Any],
|
||||
selectors_dict: Dict[str, Any],
|
||||
renderer: DbtProjectYamlRenderer,
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
) -> "Project":
|
||||
partial = PartialProject.from_dicts(
|
||||
project_root=project_root,
|
||||
project_dict=project_dict,
|
||||
packages_dict=packages_dict,
|
||||
selectors_dict=selectors_dict,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
return partial.render(renderer)
|
||||
|
||||
@classmethod
|
||||
def from_project_root(
|
||||
cls,
|
||||
@@ -631,10 +655,26 @@ class Project:
|
||||
def hashed_name(self):
|
||||
return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
|
||||
|
||||
def get_selector(self, name: str) -> SelectionSpec:
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
raise RuntimeException(
|
||||
f"Could not find selector named {name}, expected one of "
|
||||
f"{list(self.selectors)}"
|
||||
f"Could not find selector named {name}, expected one of " f"{list(self.selectors)}"
|
||||
)
|
||||
return self.selectors[name]
|
||||
return self.selectors[name]["definition"]
|
||||
|
||||
def get_default_selector_name(self) -> Union[str, None]:
|
||||
"""This function fetch the default selector to use on `dbt run` (if any)
|
||||
:return: either a selector if default is set or None
|
||||
:rtype: Union[SelectionSpec, None]
|
||||
"""
|
||||
for selector_name, selector in self.selectors.items():
|
||||
if selector["default"] is True:
|
||||
return selector_name
|
||||
|
||||
return None
|
||||
|
||||
def get_macro_search_order(self, macro_namespace: str):
|
||||
for dispatch_entry in self.dispatch:
|
||||
if dispatch_entry["macro_namespace"] == macro_namespace:
|
||||
return dispatch_entry["search_order"]
|
||||
return None
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
from typing import Dict, Any, Tuple, Optional, Union, Callable
|
||||
import re
|
||||
import os
|
||||
|
||||
from dbt.clients.jinja import get_rendered, catch_jinja
|
||||
|
||||
from dbt.context.target import TargetContext
|
||||
from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER
|
||||
from dbt.context.base import BaseContext
|
||||
from dbt.contracts.connection import HasCredentials
|
||||
from dbt.exceptions import DbtProjectError, CompilationException, RecursionException
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import deep_map
|
||||
from dbt.utils import deep_map_render
|
||||
from dbt.logger import SECRET_ENV_PREFIX
|
||||
|
||||
|
||||
Keypath = Tuple[Union[str, int], ...]
|
||||
@@ -41,11 +46,10 @@ class BaseRenderer:
|
||||
|
||||
def render_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
return deep_map(self.render_entry, data)
|
||||
return deep_map_render(self.render_entry, data)
|
||||
except RecursionException:
|
||||
raise DbtProjectError(
|
||||
f"Cycle detected: {self.name} input has a reference to itself",
|
||||
project=data,
|
||||
f"Cycle detected: {self.name} input has a reference to itself", project=data
|
||||
)
|
||||
|
||||
|
||||
@@ -93,15 +97,29 @@ class ProjectPostprocessor(Dict[Keypath, Callable[[Any], Any]]):
|
||||
class DbtProjectYamlRenderer(BaseRenderer):
|
||||
_KEYPATH_HANDLERS = ProjectPostprocessor()
|
||||
|
||||
def __init__(
|
||||
self, profile: Optional[HasCredentials] = None, cli_vars: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
# Generate contexts here because we want to save the context
|
||||
# object in order to retrieve the env_vars. This is almost always
|
||||
# a TargetContext, but in the debug task we want a project
|
||||
# even when we don't have a profile.
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
if profile:
|
||||
self.ctx_obj = TargetContext(profile, cli_vars)
|
||||
else:
|
||||
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
||||
context = self.ctx_obj.to_dict()
|
||||
super().__init__(context)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"Project config"
|
||||
|
||||
# Uses SecretRenderer
|
||||
def get_package_renderer(self) -> BaseRenderer:
|
||||
return PackageRenderer(self.context)
|
||||
|
||||
def get_selector_renderer(self) -> BaseRenderer:
|
||||
return SelectorRenderer(self.context)
|
||||
return PackageRenderer(self.ctx_obj.cli_vars)
|
||||
|
||||
def render_project(
|
||||
self,
|
||||
@@ -119,8 +137,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
return package_renderer.render_data(packages)
|
||||
|
||||
def render_selectors(self, selectors: Dict[str, Any]):
|
||||
selector_renderer = self.get_selector_renderer()
|
||||
return selector_renderer.render_data(selectors)
|
||||
return self.render_data(selectors)
|
||||
|
||||
def render_entry(self, value: Any, keypath: Keypath) -> Any:
|
||||
result = super().render_entry(value, keypath)
|
||||
@@ -139,10 +156,8 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
if first == "vars":
|
||||
return False
|
||||
|
||||
if first in {"seeds", "models", "snapshots", "seeds"}:
|
||||
keypath_parts = {
|
||||
(k.lstrip("+") if isinstance(k, str) else k) for k in keypath
|
||||
}
|
||||
if first in {"seeds", "models", "snapshots", "tests"}:
|
||||
keypath_parts = {(k.lstrip("+ ") if isinstance(k, str) else k) for k in keypath}
|
||||
# model-level hooks
|
||||
if "pre-hook" in keypath_parts or "post-hook" in keypath_parts:
|
||||
return False
|
||||
@@ -150,80 +165,48 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
return True
|
||||
|
||||
|
||||
class ProfileRenderer(BaseRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
"Profile"
|
||||
|
||||
|
||||
class SchemaYamlRenderer(BaseRenderer):
|
||||
DOCUMENTABLE_NODES = frozenset(n.pluralize() for n in NodeType.documentable())
|
||||
class SecretRenderer(BaseRenderer):
|
||||
def __init__(self, cli_vars: Dict[str, Any] = {}) -> None:
|
||||
# Generate contexts here because we want to save the context
|
||||
# object in order to retrieve the env_vars.
|
||||
self.ctx_obj = SecretContext(cli_vars)
|
||||
context = self.ctx_obj.to_dict()
|
||||
super().__init__(context)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "Rendering yaml"
|
||||
return "Secret"
|
||||
|
||||
def _is_norender_key(self, keypath: Keypath) -> bool:
|
||||
"""
|
||||
models:
|
||||
- name: blah
|
||||
- description: blah
|
||||
tests: ...
|
||||
- columns:
|
||||
- name:
|
||||
- description: blah
|
||||
tests: ...
|
||||
|
||||
Return True if it's tests or description - those aren't rendered
|
||||
"""
|
||||
if len(keypath) >= 2 and keypath[1] in ("tests", "description"):
|
||||
return True
|
||||
|
||||
if (
|
||||
len(keypath) >= 4
|
||||
and keypath[1] == "columns"
|
||||
and keypath[3] in ("tests", "description")
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# don't render descriptions or test keyword arguments
|
||||
def should_render_keypath(self, keypath: Keypath) -> bool:
|
||||
if len(keypath) < 2:
|
||||
return True
|
||||
|
||||
if keypath[0] not in self.DOCUMENTABLE_NODES:
|
||||
return True
|
||||
|
||||
if len(keypath) < 3:
|
||||
return True
|
||||
|
||||
if keypath[0] == NodeType.Source.pluralize():
|
||||
if keypath[2] == "description":
|
||||
return False
|
||||
if keypath[2] == "tables":
|
||||
if self._is_norender_key(keypath[3:]):
|
||||
return False
|
||||
elif keypath[0] == NodeType.Macro.pluralize():
|
||||
if keypath[2] == "arguments":
|
||||
if self._is_norender_key(keypath[3:]):
|
||||
return False
|
||||
elif self._is_norender_key(keypath[1:]):
|
||||
return False
|
||||
else: # keypath[0] in self.DOCUMENTABLE_NODES:
|
||||
if self._is_norender_key(keypath[1:]):
|
||||
return False
|
||||
return True
|
||||
def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:
|
||||
# First, standard Jinja rendering, with special handling for 'secret' environment variables
|
||||
# "{{ env_var('DBT_SECRET_ENV_VAR') }}" -> "$$$DBT_SECRET_START$$$DBT_SECRET_ENV_{VARIABLE_NAME}$$$DBT_SECRET_END$$$"
|
||||
# This prevents Jinja manipulation of secrets via macros/filters that might leak partial/modified values in logs
|
||||
rendered = super().render_value(value, keypath)
|
||||
# Now, detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$)
|
||||
# and replace them with the actual secret value
|
||||
if SECRET_ENV_PREFIX in str(rendered):
|
||||
search_group = f"({SECRET_ENV_PREFIX}(.*))"
|
||||
pattern = SECRET_PLACEHOLDER.format(search_group).replace("$", r"\$")
|
||||
m = re.search(
|
||||
pattern,
|
||||
rendered,
|
||||
)
|
||||
if m:
|
||||
found = m.group(1)
|
||||
value = os.environ[found]
|
||||
replace_this = SECRET_PLACEHOLDER.format(found)
|
||||
return rendered.replace(replace_this, value)
|
||||
else:
|
||||
return rendered
|
||||
|
||||
|
||||
class PackageRenderer(BaseRenderer):
|
||||
class ProfileRenderer(SecretRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
return "Profile"
|
||||
|
||||
|
||||
class PackageRenderer(SecretRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
return "Packages config"
|
||||
|
||||
|
||||
class SelectorRenderer(BaseRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
return "Selector config"
|
||||
|
||||
@@ -1,40 +1,26 @@
|
||||
import itertools
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, fields
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Dict,
|
||||
Any,
|
||||
Optional,
|
||||
Mapping,
|
||||
Iterator,
|
||||
Iterable,
|
||||
Tuple,
|
||||
List,
|
||||
MutableSet,
|
||||
Type,
|
||||
)
|
||||
from typing import Dict, Any, Optional, Mapping, Iterator, Iterable, Tuple, List, MutableSet, Type
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
from dbt import tracking
|
||||
from dbt import flags
|
||||
from dbt.adapters.factory import get_relation_class_by_name, get_include_paths
|
||||
from dbt.helper_types import FQNPath, PathSet
|
||||
from dbt.context import generate_base_context
|
||||
from dbt.context.target import generate_target_context
|
||||
from dbt.helper_types import FQNPath, PathSet, DictDefaultEmptyStr
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.exceptions import (
|
||||
RuntimeException,
|
||||
DbtProfileError,
|
||||
DbtProjectError,
|
||||
validator_error_message,
|
||||
warn_or_error,
|
||||
@@ -65,6 +51,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
def __post_init__(self):
|
||||
self.validate()
|
||||
|
||||
# Called by 'new_project' and 'from_args'
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
@@ -92,9 +79,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
source_paths=project.source_paths,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
data_paths=project.data_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
@@ -103,11 +90,12 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
modules_path=project.modules_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=quoting,
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
dispatch=project.dispatch,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
@@ -116,12 +104,15 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
project_env_vars=project.project_env_vars,
|
||||
profile_env_vars=profile.profile_env_vars,
|
||||
profile_name=profile.profile_name,
|
||||
target_name=profile.target_name,
|
||||
config=profile.config,
|
||||
user_config=profile.user_config,
|
||||
threads=profile.threads,
|
||||
credentials=profile.credentials,
|
||||
args=args,
|
||||
@@ -129,6 +120,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
# Called by 'load_projects' in this class
|
||||
def new_project(self, project_root: str) -> "RuntimeConfig":
|
||||
"""Given a new project root, read in its project dictionary, supply the
|
||||
existing project's profile info, and create a new project file.
|
||||
@@ -143,22 +135,22 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
profile.validate()
|
||||
|
||||
# load the new project and its packages. Don't pass cli variables.
|
||||
renderer = DbtProjectYamlRenderer(generate_target_context(profile, {}))
|
||||
renderer = DbtProjectYamlRenderer(profile)
|
||||
|
||||
project = Project.from_project_root(
|
||||
project_root,
|
||||
renderer,
|
||||
verify_version=getattr(self.args, "version_check", False),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
|
||||
cfg = self.from_parts(
|
||||
runtime_config = self.from_parts(
|
||||
project=project,
|
||||
profile=profile,
|
||||
args=deepcopy(self.args),
|
||||
)
|
||||
# force our quoting back onto the new project.
|
||||
cfg.quoting = deepcopy(self.quoting)
|
||||
return cfg
|
||||
runtime_config.quoting = deepcopy(self.quoting)
|
||||
return runtime_config
|
||||
|
||||
def serialize(self) -> Dict[str, Any]:
|
||||
"""Serialize the full configuration to a single dictionary. For any
|
||||
@@ -191,29 +183,35 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
return Profile.render_from_args(args, profile_renderer, profile_name)
|
||||
|
||||
@classmethod
|
||||
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = getattr(args, "version_check", False)
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
# Note: only the named profile section is rendered. The rest of the
|
||||
# profile is ignored.
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
profile_renderer = ProfileRenderer(generate_base_context(cli_vars))
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
|
||||
# get a new renderer using our target information and render the
|
||||
# project
|
||||
ctx = generate_target_context(profile, cli_vars)
|
||||
project_renderer = DbtProjectYamlRenderer(ctx)
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = partial.render(project_renderer)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return (project, profile)
|
||||
|
||||
# Called in main.py, lib.py, task/base.py
|
||||
@classmethod
|
||||
def from_args(cls, args: Any) -> "RuntimeConfig":
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
@@ -234,9 +232,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
)
|
||||
|
||||
def get_metadata(self) -> ManifestMetadata:
|
||||
return ManifestMetadata(
|
||||
project_id=self.hashed_name(), adapter_type=self.credentials.type
|
||||
)
|
||||
return ManifestMetadata(project_id=self.hashed_name(), adapter_type=self.credentials.type)
|
||||
|
||||
def _get_v2_config_paths(
|
||||
self,
|
||||
@@ -246,7 +242,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
) -> PathSet:
|
||||
for key, value in config.items():
|
||||
if isinstance(value, dict) and not key.startswith("+"):
|
||||
self._get_v2_config_paths(value, path + (key,), paths)
|
||||
self._get_config_paths(value, path + (key,), paths)
|
||||
else:
|
||||
paths.add(path)
|
||||
return frozenset(paths)
|
||||
@@ -268,7 +264,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
return frozenset(paths)
|
||||
|
||||
def get_resource_config_paths(self) -> Dict[str, PathSet]:
|
||||
"""Return a dictionary with 'seeds' and 'models' keys whose values are
|
||||
"""Return a dictionary with resource type keys whose values are
|
||||
lists of lists of strings, where each inner list of strings represents
|
||||
a configured path in the resource.
|
||||
"""
|
||||
@@ -277,6 +273,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"seeds": self._get_config_paths(self.seeds),
|
||||
"snapshots": self._get_config_paths(self.snapshots),
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
}
|
||||
|
||||
def get_unused_resource_config_paths(
|
||||
@@ -315,13 +312,26 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
warn_or_error(msg, log_fmt=warning_tag("{}"))
|
||||
|
||||
def load_dependencies(self) -> Mapping[str, "RuntimeConfig"]:
|
||||
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
|
||||
if self.dependencies is None:
|
||||
all_projects = {self.project_name: self}
|
||||
internal_packages = get_include_paths(self.credentials.type)
|
||||
project_paths = itertools.chain(
|
||||
internal_packages, self._get_project_directories()
|
||||
)
|
||||
if base_only:
|
||||
# Test setup -- we want to load macros without dependencies
|
||||
project_paths = itertools.chain(internal_packages)
|
||||
else:
|
||||
# raise exception if fewer installed packages than in packages.yml
|
||||
count_packages_specified = len(self.packages.packages) # type: ignore
|
||||
count_packages_installed = len(tuple(self._get_project_directories()))
|
||||
if count_packages_specified > count_packages_installed:
|
||||
raise_compiler_error(
|
||||
f"dbt found {count_packages_specified} package(s) "
|
||||
f"specified in packages.yml, but only "
|
||||
f"{count_packages_installed} package(s) installed "
|
||||
f'in {self.packages_install_path}. Run "dbt deps" to '
|
||||
f"install package dependencies."
|
||||
)
|
||||
project_paths = itertools.chain(internal_packages, self._get_project_directories())
|
||||
for project_name, project in self.load_projects(project_paths):
|
||||
if project_name in all_projects:
|
||||
raise_compiler_error(
|
||||
@@ -337,9 +347,8 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
def clear_dependencies(self):
|
||||
self.dependencies = None
|
||||
|
||||
def load_projects(
|
||||
self, paths: Iterable[Path]
|
||||
) -> Iterator[Tuple[str, "RuntimeConfig"]]:
|
||||
# Called by 'load_dependencies' in this class
|
||||
def load_projects(self, paths: Iterable[Path]) -> Iterator[Tuple[str, "RuntimeConfig"]]:
|
||||
for path in paths:
|
||||
try:
|
||||
project = self.new_project(str(path))
|
||||
@@ -353,7 +362,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
yield project.project_name, project
|
||||
|
||||
def _get_project_directories(self) -> Iterator[Path]:
|
||||
root = Path(self.project_root) / self.modules_path
|
||||
root = Path(self.project_root) / self.packages_install_path
|
||||
|
||||
if root.exists():
|
||||
for path in root.iterdir():
|
||||
@@ -369,6 +378,10 @@ class UnsetCredentials(Credentials):
|
||||
def type(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def unique_field(self):
|
||||
return None
|
||||
|
||||
def connection_info(self, *args, **kwargs):
|
||||
return {}
|
||||
|
||||
@@ -376,41 +389,37 @@ class UnsetCredentials(Credentials):
|
||||
return ()
|
||||
|
||||
|
||||
class UnsetConfig(UserConfig):
|
||||
def __getattribute__(self, name):
|
||||
if name in {f.name for f in fields(UserConfig)}:
|
||||
raise AttributeError(f"'UnsetConfig' object has no attribute {name}")
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
return {}
|
||||
|
||||
|
||||
# This is used by UnsetProfileConfig, for commands which do
|
||||
# not require a profile, i.e. dbt deps and clean
|
||||
class UnsetProfile(Profile):
|
||||
def __init__(self):
|
||||
self.credentials = UnsetCredentials()
|
||||
self.config = UnsetConfig()
|
||||
self.user_config = UserConfig() # This will be read in _get_rendered_profile
|
||||
self.profile_name = ""
|
||||
self.target_name = ""
|
||||
self.threads = -1
|
||||
|
||||
def to_target_dict(self):
|
||||
return {}
|
||||
return DictDefaultEmptyStr({})
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name in {"profile_name", "target_name", "threads"}:
|
||||
raise RuntimeException(
|
||||
f'Error: disallowed attribute "{name}" - no profile!'
|
||||
)
|
||||
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
return Profile.__getattribute__(self, name)
|
||||
|
||||
|
||||
# This class is used by the dbt deps and clean commands, because they don't
|
||||
# require a functioning profile.
|
||||
@dataclass
|
||||
class UnsetProfileConfig(RuntimeConfig):
|
||||
"""This class acts a lot _like_ a RuntimeConfig, except if your profile is
|
||||
missing, any access to profile members results in an exception.
|
||||
"""
|
||||
|
||||
profile_name: str = field(repr=False)
|
||||
target_name: str = field(repr=False)
|
||||
|
||||
def __post_init__(self):
|
||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
||||
# `del` the attrs we don't want users touching.
|
||||
@@ -422,16 +431,64 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
def __getattribute__(self, name):
|
||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
||||
if name in {"profile_name", "target_name"}:
|
||||
raise RuntimeException(
|
||||
f'Error: disallowed attribute "{name}" - no profile!'
|
||||
)
|
||||
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
# avoid every attribute access triggering infinite recursion
|
||||
return RuntimeConfig.__getattribute__(self, name)
|
||||
|
||||
def to_target_dict(self):
|
||||
# re-override the poisoned profile behavior
|
||||
return {}
|
||||
return DictDefaultEmptyStr({})
|
||||
|
||||
def to_project_config(self, with_packages=False):
|
||||
"""Return a dict representation of the config that could be written to
|
||||
disk with `yaml.safe_dump` to get this configuration.
|
||||
|
||||
Overrides dbt.config.Project.to_project_config to omit undefined profile
|
||||
attributes.
|
||||
|
||||
:param with_packages bool: If True, include the serialized packages
|
||||
file in the root.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = deepcopy(
|
||||
{
|
||||
"name": self.project_name,
|
||||
"version": self.version,
|
||||
"project-root": self.project_root,
|
||||
"profile": "",
|
||||
"model-paths": self.model_paths,
|
||||
"macro-paths": self.macro_paths,
|
||||
"seed-paths": self.seed_paths,
|
||||
"test-paths": self.test_paths,
|
||||
"analysis-paths": self.analysis_paths,
|
||||
"docs-paths": self.docs_paths,
|
||||
"asset-paths": self.asset_paths,
|
||||
"target-path": self.target_path,
|
||||
"snapshot-paths": self.snapshot_paths,
|
||||
"clean-targets": self.clean_targets,
|
||||
"log-path": self.log_path,
|
||||
"quoting": self.quoting,
|
||||
"models": self.models,
|
||||
"on-run-start": self.on_run_start,
|
||||
"on-run-end": self.on_run_end,
|
||||
"dispatch": self.dispatch,
|
||||
"seeds": self.seeds,
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
result["query-comment"] = self.query_comment.to_dict(omit_none=True)
|
||||
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict(omit_none=True))
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
@@ -454,9 +511,9 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
source_paths=project.source_paths,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
data_paths=project.data_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
@@ -465,11 +522,12 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
modules_path=project.modules_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=project.quoting, # we never use this anyway.
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
dispatch=project.dispatch,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
@@ -478,12 +536,15 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
project_env_vars=project.project_env_vars,
|
||||
profile_env_vars=profile.profile_env_vars,
|
||||
profile_name="",
|
||||
target_name="",
|
||||
config=UnsetConfig(),
|
||||
user_config=UserConfig(),
|
||||
threads=getattr(args, "threads", 1),
|
||||
credentials=UnsetCredentials(),
|
||||
args=args,
|
||||
@@ -498,17 +559,12 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
try:
|
||||
profile = Profile.render_from_args(args, profile_renderer, profile_name)
|
||||
except (DbtProjectError, DbtProfileError) as exc:
|
||||
logger.debug("Profile not loaded due to error: {}", exc, exc_info=True)
|
||||
logger.info(
|
||||
'No profile "{}" found, continuing with no target', profile_name
|
||||
)
|
||||
# return the poisoned form
|
||||
profile = UnsetProfile()
|
||||
# disable anonymous usage statistics
|
||||
tracking.disable_tracking()
|
||||
|
||||
profile = UnsetProfile()
|
||||
# The profile (for warehouse connection) is not needed, but we want
|
||||
# to get the UserConfig, which is also in profiles.yml
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
profile.user_config = user_config
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
@@ -523,9 +579,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
if not isinstance(profile, UnsetProfile):
|
||||
# if it's a real profile, return a real config
|
||||
cls = RuntimeConfig
|
||||
|
||||
return cls.from_parts(project=project, profile=profile, args=args)
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
from copy import deepcopy
|
||||
from typing import Dict, Any, Union
|
||||
from dbt.clients.yaml_helper import yaml, Loader, Dumper, load_yaml_text # noqa: F401
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from .renderer import SelectorRenderer
|
||||
from .renderer import BaseRenderer
|
||||
|
||||
from dbt.clients.system import (
|
||||
load_file_contents,
|
||||
@@ -27,12 +28,13 @@ Validator Error:
|
||||
"""
|
||||
|
||||
|
||||
class SelectorConfig(Dict[str, SelectionSpec]):
|
||||
class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
@classmethod
|
||||
def selectors_from_dict(cls, data: Dict[str, Any]) -> "SelectorConfig":
|
||||
try:
|
||||
SelectorFile.validate(data)
|
||||
selector_file = SelectorFile.from_dict(data)
|
||||
validate_selector_default(selector_file)
|
||||
selectors = parse_from_selectors_definition(selector_file)
|
||||
except ValidationError as exc:
|
||||
yaml_sel_cfg = yaml.dump(exc.instance)
|
||||
@@ -56,7 +58,7 @@ class SelectorConfig(Dict[str, SelectionSpec]):
|
||||
def render_from_dict(
|
||||
cls,
|
||||
data: Dict[str, Any],
|
||||
renderer: SelectorRenderer,
|
||||
renderer: BaseRenderer,
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
rendered = renderer.render_data(data)
|
||||
@@ -71,7 +73,7 @@ class SelectorConfig(Dict[str, SelectionSpec]):
|
||||
def from_path(
|
||||
cls,
|
||||
path: Path,
|
||||
renderer: SelectorRenderer,
|
||||
renderer: BaseRenderer,
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
data = load_yaml_text(load_file_contents(str(path)))
|
||||
@@ -113,6 +115,24 @@ def selector_config_from_data(selectors_data: Dict[str, Any]) -> SelectorConfig:
|
||||
return selectors
|
||||
|
||||
|
||||
def validate_selector_default(selector_file: SelectorFile) -> None:
|
||||
"""Check if a selector.yml file has more than 1 default key set to true"""
|
||||
default_set: bool = False
|
||||
default_selector_name: Union[str, None] = None
|
||||
|
||||
for selector in selector_file.selectors:
|
||||
if selector.default is True and default_set is False:
|
||||
default_set = True
|
||||
default_selector_name = selector.name
|
||||
continue
|
||||
if selector.default is True and default_set is True:
|
||||
raise DbtSelectorsError(
|
||||
"Error when parsing the selector file. "
|
||||
"Found multiple selectors with `default: true`:"
|
||||
f"{default_selector_name} and {selector.name}"
|
||||
)
|
||||
|
||||
|
||||
# These are utilities to clean up the dictionary created from
|
||||
# selectors.yml by turning the cli-string format entries into
|
||||
# normalized dictionary entries. It parallels the flow in
|
||||
@@ -121,28 +141,33 @@ def selector_config_from_data(selectors_data: Dict[str, Any]) -> SelectorConfig:
|
||||
# good to combine the two flows into one at some point.
|
||||
class SelectorDict:
|
||||
@classmethod
|
||||
def parse_dict_definition(cls, definition):
|
||||
def parse_dict_definition(cls, definition, selector_dict={}):
|
||||
key = list(definition)[0]
|
||||
value = definition[key]
|
||||
if isinstance(value, list):
|
||||
new_values = []
|
||||
for sel_def in value:
|
||||
new_value = cls.parse_from_definition(sel_def)
|
||||
new_value = cls.parse_from_definition(sel_def, selector_dict=selector_dict)
|
||||
new_values.append(new_value)
|
||||
value = new_values
|
||||
if key == "exclude":
|
||||
definition = {key: value}
|
||||
elif len(definition) == 1:
|
||||
definition = {"method": key, "value": value}
|
||||
elif key == "method" and value == "selector":
|
||||
sel_def = definition.get("value")
|
||||
if sel_def not in selector_dict:
|
||||
raise DbtSelectorsError(f"Existing selector definition for {sel_def} not found.")
|
||||
return selector_dict[definition["value"]]["definition"]
|
||||
return definition
|
||||
|
||||
@classmethod
|
||||
def parse_a_definition(cls, def_type, definition):
|
||||
def parse_a_definition(cls, def_type, definition, selector_dict={}):
|
||||
# this definition must be a list
|
||||
new_dict = {def_type: []}
|
||||
for sel_def in definition[def_type]:
|
||||
if isinstance(sel_def, dict):
|
||||
sel_def = cls.parse_from_definition(sel_def)
|
||||
sel_def = cls.parse_from_definition(sel_def, selector_dict=selector_dict)
|
||||
new_dict[def_type].append(sel_def)
|
||||
elif isinstance(sel_def, str):
|
||||
sel_def = SelectionCriteria.dict_from_single_spec(sel_def)
|
||||
@@ -152,15 +177,17 @@ class SelectorDict:
|
||||
return new_dict
|
||||
|
||||
@classmethod
|
||||
def parse_from_definition(cls, definition):
|
||||
def parse_from_definition(cls, definition, selector_dict={}):
|
||||
if isinstance(definition, str):
|
||||
definition = SelectionCriteria.dict_from_single_spec(definition)
|
||||
elif "union" in definition:
|
||||
definition = cls.parse_a_definition("union", definition)
|
||||
definition = cls.parse_a_definition("union", definition, selector_dict=selector_dict)
|
||||
elif "intersection" in definition:
|
||||
definition = cls.parse_a_definition("intersection", definition)
|
||||
definition = cls.parse_a_definition(
|
||||
"intersection", definition, selector_dict=selector_dict
|
||||
)
|
||||
elif isinstance(definition, dict):
|
||||
definition = cls.parse_dict_definition(definition)
|
||||
definition = cls.parse_dict_definition(definition, selector_dict=selector_dict)
|
||||
return definition
|
||||
|
||||
# This is the normal entrypoint of this code. Give it the
|
||||
@@ -171,6 +198,8 @@ class SelectorDict:
|
||||
for selector in selectors:
|
||||
sel_name = selector["name"]
|
||||
selector_dict[sel_name] = selector
|
||||
definition = cls.parse_from_definition(selector["definition"])
|
||||
definition = cls.parse_from_definition(
|
||||
selector["definition"], selector_dict=deepcopy(selector_dict)
|
||||
)
|
||||
selector_dict[sel_name]["definition"] = definition
|
||||
return selector_dict
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
from typing import Dict, Any
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from xmlrpc.client import Boolean
|
||||
from dbt.contracts.project import UserConfig
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.exceptions import raise_compiler_error, ValidationException
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.config import Profile, Project, read_user_config
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import InvalidVarsYAML
|
||||
from dbt.exceptions import ValidationException, raise_compiler_error
|
||||
|
||||
|
||||
def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
@@ -18,5 +25,51 @@ def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
"of type '{}'".format(type_name)
|
||||
)
|
||||
except ValidationException:
|
||||
logger.error("The YAML provided in the --vars argument is not valid.\n")
|
||||
fire_event(InvalidVarsYAML())
|
||||
raise
|
||||
|
||||
|
||||
def get_project_config(
|
||||
project_path: str,
|
||||
profile_name: str,
|
||||
args: Namespace = Namespace(),
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
profile: Optional[Profile] = None,
|
||||
user_config: Optional[UserConfig] = None,
|
||||
return_dict: Boolean = True,
|
||||
) -> Union[Project, Dict]:
|
||||
"""Returns a project config (dict or object) from a given project path and profile name.
|
||||
|
||||
Args:
|
||||
project_path: Path to project
|
||||
profile_name: Name of profile
|
||||
args: An argparse.Namespace that represents what would have been passed in on the
|
||||
command line (optional)
|
||||
cli_vars: A dict of any vars that would have been passed in on the command line (optional)
|
||||
(see parse_cli_vars above for formatting details)
|
||||
profile: A dbt.config.profile.Profile object (optional)
|
||||
user_config: A dbt.contracts.project.UserConfig object (optional)
|
||||
return_dict: Return a dict if true, return the full dbt.config.project.Project object if false
|
||||
|
||||
Returns:
|
||||
A full project config
|
||||
|
||||
"""
|
||||
# Generate a profile if not provided
|
||||
if profile is None:
|
||||
# Generate user_config if not provided
|
||||
if user_config is None:
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
# Update flags
|
||||
flags.set_from_args(args, user_config)
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
profile = Profile.render_from_args(args, ProfileRenderer(cli_vars), profile_name)
|
||||
# Generate a project
|
||||
project = Project.from_project_root(
|
||||
project_path,
|
||||
DbtProjectYamlRenderer(profile),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
# Return
|
||||
return project.to_project_config() if return_dict else project
|
||||
|
||||
51
core/dbt/context/README.md
Normal file
51
core/dbt/context/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Contexts and Jinja rendering
|
||||
|
||||
Contexts are used for Jinja rendering. They include context methods, executable macros, and various settings that are available in Jinja.
|
||||
|
||||
The most common entrypoint to Jinja rendering in dbt is a method named `get_rendered`, which takes two arguments: templated code (string), and a context used to render it (dictionary).
|
||||
|
||||
The context is the bundle of information that is in "scope" when rendering Jinja-templated code. For instance, imagine a simple Jinja template:
|
||||
```
|
||||
{% set new_value = some_macro(some_variable) %}
|
||||
```
|
||||
Both `some_macro()` and `some_variable` must be defined in that context. Otherwise, it will raise an error when rendering.
|
||||
|
||||
Different contexts are used in different places because we allow access to different methods and data in different places. Executable SQL, for example, includes all available macros and the model being run. The variables and macros in scope for Jinja defined in yaml files is much more limited.
|
||||
|
||||
### Implementation
|
||||
|
||||
The context that is passed to Jinja is always in a dictionary format, not an actual class, so a `to_dict()` is executed on a context class before it is used for rendering.
|
||||
|
||||
Each context has a `generate_<name>_context` function to create the context. `ProviderContext` subclasses have different generate functions for parsing and for execution, so that certain functions (notably `ref`, `source`, and `config`) can return different results
|
||||
|
||||
### Hierarchy
|
||||
|
||||
All contexts inherit from the `BaseContext`, which includes "pure" methods (e.g. `tojson`), `env_var()`, and `var()` (but only CLI values, passed via `--vars`).
|
||||
|
||||
Methods available in parent contexts are also available in child contexts.
|
||||
|
||||
```
|
||||
BaseContext -- core/dbt/context/base.py
|
||||
SecretContext -- core/dbt/context/secret.py
|
||||
TargetContext -- core/dbt/context/target.py
|
||||
ConfiguredContext -- core/dbt/context/configured.py
|
||||
SchemaYamlContext -- core/dbt/context/configured.py
|
||||
DocsRuntimeContext -- core/dbt/context/configured.py
|
||||
MacroResolvingContext -- core/dbt/context/configured.py
|
||||
ManifestContext -- core/dbt/context/manifest.py
|
||||
QueryHeaderContext -- core/dbt/context/manifest.py
|
||||
ProviderContext -- core/dbt/context/provider.py
|
||||
MacroContext -- core/dbt/context/provider.py
|
||||
ModelContext -- core/dbt/context/provider.py
|
||||
TestContext -- core/dbt/context/provider.py
|
||||
```
|
||||
|
||||
### Contexts for configuration
|
||||
|
||||
Contexts for rendering "special" `.yml` (configuration) files:
|
||||
- `SecretContext`: Supports "secret" env vars, which are prefixed with `DBT_ENV_SECRET_`. Used for rendering in `profiles.yml` and `packages.yml` ONLY. Secrets defined elsewhere will raise explicit errors.
|
||||
- `TargetContext`: The same as `Base`, plus `target` (connection profile). Used most notably in `dbt_project.yml` and `selectors.yml`.
|
||||
|
||||
Contexts for other `.yml` files in the project:
|
||||
- `SchemaYamlContext`: Supports `vars` declared on the CLI and in `dbt_project.yml`. Does not support custom macros, beyond `var()` + `env_var()` methods. Used for all `.yml` files, to define properties and configuration.
|
||||
- `DocsRuntimeContext`: Standard `.yml` file context, plus `doc()` method (with all `docs` blocks in scope). Used to resolve `description` properties.
|
||||
@@ -1,531 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping
|
||||
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
from dbt.clients.jinja import undefined_error, get_rendered
|
||||
from dbt.clients.yaml_helper import ( # noqa: F401
|
||||
yaml,
|
||||
safe_load,
|
||||
SafeLoader,
|
||||
Loader,
|
||||
Dumper,
|
||||
)
|
||||
from dbt.contracts.graph.compiled import CompiledResource
|
||||
from dbt.exceptions import raise_compiler_error, MacroReturn
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.version import __version__ as dbt_version
|
||||
|
||||
# These modules are added to the context. Consider alternative
|
||||
# approaches which will extend well to potentially many modules
|
||||
import pytz
|
||||
import datetime
|
||||
import re
|
||||
|
||||
|
||||
def get_pytz_module_context() -> Dict[str, Any]:
|
||||
context_exports = pytz.__all__ # type: ignore
|
||||
|
||||
return {name: getattr(pytz, name) for name in context_exports}
|
||||
|
||||
|
||||
def get_datetime_module_context() -> Dict[str, Any]:
|
||||
context_exports = ["date", "datetime", "time", "timedelta", "tzinfo"]
|
||||
|
||||
return {name: getattr(datetime, name) for name in context_exports}
|
||||
|
||||
|
||||
def get_re_module_context() -> Dict[str, Any]:
|
||||
context_exports = re.__all__
|
||||
|
||||
return {name: getattr(re, name) for name in context_exports}
|
||||
|
||||
|
||||
def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
return {
|
||||
"pytz": get_pytz_module_context(),
|
||||
"datetime": get_datetime_module_context(),
|
||||
"re": get_re_module_context(),
|
||||
}
|
||||
|
||||
|
||||
class ContextMember:
|
||||
def __init__(self, value, name=None):
|
||||
self.name = name
|
||||
self.inner = value
|
||||
|
||||
def key(self, default):
|
||||
if self.name is None:
|
||||
return default
|
||||
return self.name
|
||||
|
||||
|
||||
def contextmember(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
return ContextMember(value)
|
||||
|
||||
|
||||
def contextproperty(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
return ContextMember(property(value))
|
||||
|
||||
|
||||
class ContextMeta(type):
|
||||
def __new__(mcls, name, bases, dct):
|
||||
context_members = {}
|
||||
context_attrs = {}
|
||||
new_dct = {}
|
||||
|
||||
for base in bases:
|
||||
context_members.update(getattr(base, "_context_members_", {}))
|
||||
context_attrs.update(getattr(base, "_context_attrs_", {}))
|
||||
|
||||
for key, value in dct.items():
|
||||
if isinstance(value, ContextMember):
|
||||
context_key = value.key(key)
|
||||
context_members[context_key] = value.inner
|
||||
context_attrs[context_key] = key
|
||||
value = value.inner
|
||||
new_dct[key] = value
|
||||
new_dct["_context_members_"] = context_members
|
||||
new_dct["_context_attrs_"] = context_attrs
|
||||
return type.__new__(mcls, name, bases, new_dct)
|
||||
|
||||
|
||||
class Var:
|
||||
UndefinedVarError = (
|
||||
"Required var '{}' not found in config:\nVars " "supplied to {} = {}"
|
||||
)
|
||||
_VAR_NOTSET = object()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
context: Mapping[str, Any],
|
||||
cli_vars: Mapping[str, Any],
|
||||
node: Optional[CompiledResource] = None,
|
||||
) -> None:
|
||||
self._context: Mapping[str, Any] = context
|
||||
self._cli_vars: Mapping[str, Any] = cli_vars
|
||||
self._node: Optional[CompiledResource] = node
|
||||
self._merged: Mapping[str, Any] = self._generate_merged()
|
||||
|
||||
def _generate_merged(self) -> Mapping[str, Any]:
|
||||
return self._cli_vars
|
||||
|
||||
@property
|
||||
def node_name(self):
|
||||
if self._node is not None:
|
||||
return self._node.name
|
||||
else:
|
||||
return "<Configuration>"
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
dct = {k: self._merged[k] for k in self._merged}
|
||||
pretty_vars = json.dumps(dct, sort_keys=True, indent=4)
|
||||
msg = self.UndefinedVarError.format(var_name, self.node_name, pretty_vars)
|
||||
raise_compiler_error(msg, self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
return var_name in self._merged
|
||||
|
||||
def get_rendered_var(self, var_name):
|
||||
raw = self._merged[var_name]
|
||||
# if bool/int/float/etc are passed in, don't compile anything
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
|
||||
return get_rendered(raw, self._context)
|
||||
|
||||
def __call__(self, var_name, default=_VAR_NOTSET):
|
||||
if self.has_var(var_name):
|
||||
return self.get_rendered_var(var_name)
|
||||
elif default is not self._VAR_NOTSET:
|
||||
return default
|
||||
else:
|
||||
return self.get_missing_var(var_name)
|
||||
|
||||
|
||||
class BaseContext(metaclass=ContextMeta):
|
||||
def __init__(self, cli_vars):
|
||||
self._ctx = {}
|
||||
self.cli_vars = cli_vars
|
||||
|
||||
def generate_builtins(self):
|
||||
builtins: Dict[str, Any] = {}
|
||||
for key, value in self._context_members_.items():
|
||||
if hasattr(value, "__get__"):
|
||||
# handle properties, bound methods, etc
|
||||
value = value.__get__(self)
|
||||
builtins[key] = value
|
||||
return builtins
|
||||
|
||||
# no dbtClassMixin so this is not an actual override
|
||||
def to_dict(self):
|
||||
self._ctx["context"] = self._ctx
|
||||
builtins = self.generate_builtins()
|
||||
self._ctx["builtins"] = builtins
|
||||
self._ctx.update(builtins)
|
||||
return self._ctx
|
||||
|
||||
@contextproperty
|
||||
def dbt_version(self) -> str:
|
||||
"""The `dbt_version` variable returns the installed version of dbt that
|
||||
is currently running. It can be used for debugging or auditing
|
||||
purposes.
|
||||
|
||||
> macros/get_version.sql
|
||||
|
||||
{% macro get_version() %}
|
||||
{% set msg = "The installed version of dbt is: " ~ dbt_version %}
|
||||
{% do log(msg, info=true) %}
|
||||
{% endmacro %}
|
||||
|
||||
Example output:
|
||||
|
||||
$ dbt run-operation get_version
|
||||
The installed version of dbt is 0.16.0
|
||||
"""
|
||||
return dbt_version
|
||||
|
||||
@contextproperty
|
||||
def var(self) -> Var:
|
||||
"""Variables can be passed from your `dbt_project.yml` file into models
|
||||
during compilation. These variables are useful for configuring packages
|
||||
for deployment in multiple environments, or defining values that should
|
||||
be used across multiple models within a package.
|
||||
|
||||
To add a variable to a model, use the `var()` function:
|
||||
|
||||
> my_model.sql:
|
||||
|
||||
select * from events where event_type = '{{ var("event_type") }}'
|
||||
|
||||
If you try to run this model without supplying an `event_type`
|
||||
variable, you'll receive a compilation error that looks like this:
|
||||
|
||||
Encountered an error:
|
||||
! Compilation error while compiling model package_name.my_model:
|
||||
! Required var 'event_type' not found in config:
|
||||
Vars supplied to package_name.my_model = {
|
||||
}
|
||||
|
||||
To supply a variable to a given model, add one or more `vars`
|
||||
dictionaries to the `models` config in your `dbt_project.yml` file.
|
||||
These `vars` are in-scope for all models at or below where they are
|
||||
defined, so place them where they make the most sense. Below are three
|
||||
different placements of the `vars` dict, all of which will make the
|
||||
`my_model` model compile.
|
||||
|
||||
> dbt_project.yml:
|
||||
|
||||
# 1) scoped at the model level
|
||||
models:
|
||||
package_name:
|
||||
my_model:
|
||||
materialized: view
|
||||
vars:
|
||||
event_type: activation
|
||||
# 2) scoped at the package level
|
||||
models:
|
||||
package_name:
|
||||
vars:
|
||||
event_type: activation
|
||||
my_model:
|
||||
materialized: view
|
||||
# 3) scoped globally
|
||||
models:
|
||||
vars:
|
||||
event_type: activation
|
||||
package_name:
|
||||
my_model:
|
||||
materialized: view
|
||||
|
||||
## Variable default values
|
||||
|
||||
The `var()` function takes an optional second argument, `default`. If
|
||||
this argument is provided, then it will be the default value for the
|
||||
variable if one is not explicitly defined.
|
||||
|
||||
> my_model.sql:
|
||||
|
||||
-- Use 'activation' as the event_type if the variable is not
|
||||
-- defined.
|
||||
select *
|
||||
from events
|
||||
where event_type = '{{ var("event_type", "activation") }}'
|
||||
"""
|
||||
return Var(self._ctx, self.cli_vars)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def env_var(var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
|
||||
If the default is None, raise an exception for an undefined variable.
|
||||
"""
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
elif default is not None:
|
||||
return default
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
undefined_error(msg)
|
||||
|
||||
if os.environ.get("DBT_MACRO_DEBUGGING"):
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def debug():
|
||||
"""Enter a debugger at this line in the compiled jinja code."""
|
||||
import sys
|
||||
import ipdb # type: ignore
|
||||
|
||||
frame = sys._getframe(3)
|
||||
ipdb.set_trace(frame)
|
||||
return ""
|
||||
|
||||
@contextmember("return")
|
||||
@staticmethod
|
||||
def _return(data: Any) -> NoReturn:
|
||||
"""The `return` function can be used in macros to return data to the
|
||||
caller. The type of the data (`dict`, `list`, `int`, etc) will be
|
||||
preserved through the return call.
|
||||
|
||||
:param data: The data to return to the caller
|
||||
|
||||
|
||||
> macros/example.sql:
|
||||
|
||||
{% macro get_data() %}
|
||||
{{ return([1,2,3]) }}
|
||||
{% endmacro %}
|
||||
|
||||
> models/my_model.sql:
|
||||
|
||||
select
|
||||
-- getdata() returns a list!
|
||||
{% for i in getdata() %}
|
||||
{{ i }}
|
||||
{% if not loop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
"""
|
||||
raise MacroReturn(data)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def fromjson(string: str, default: Any = None) -> Any:
|
||||
"""The `fromjson` context method can be used to deserialize a json
|
||||
string into a Python object primitive, eg. a `dict` or `list`.
|
||||
|
||||
:param value: The json string to deserialize
|
||||
:param default: A default value to return if the `string` argument
|
||||
cannot be deserialized (optional)
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_json_str = '{"abc": 123}' %}
|
||||
{% set my_dict = fromjson(my_json_str) %}
|
||||
{% do log(my_dict['abc']) %}
|
||||
"""
|
||||
try:
|
||||
return json.loads(string)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def tojson(value: Any, default: Any = None, sort_keys: bool = False) -> Any:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
object primitive, eg. a `dict` or `list` to a json string.
|
||||
|
||||
:param value: The value serialize to json
|
||||
:param default: A default value to return if the `value` argument
|
||||
cannot be serialized
|
||||
:param sort_keys: If True, sort the keys.
|
||||
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_dict = {"abc": 123} %}
|
||||
{% set my_json_string = tojson(my_dict) %}
|
||||
{% do log(my_json_string) %}
|
||||
"""
|
||||
try:
|
||||
return json.dumps(value, sort_keys=sort_keys)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def fromyaml(value: str, default: Any = None) -> Any:
|
||||
"""The fromyaml context method can be used to deserialize a yaml string
|
||||
into a Python object primitive, eg. a `dict` or `list`.
|
||||
|
||||
:param value: The yaml string to deserialize
|
||||
:param default: A default value to return if the `string` argument
|
||||
cannot be deserialized (optional)
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_yml_str -%}
|
||||
dogs:
|
||||
- good
|
||||
- bad
|
||||
{%- endset %}
|
||||
{% set my_dict = fromyaml(my_yml_str) %}
|
||||
{% do log(my_dict['dogs'], info=true) %}
|
||||
-- ["good", "bad"]
|
||||
{% do my_dict['dogs'].pop() }
|
||||
{% do log(my_dict['dogs'], info=true) %}
|
||||
-- ["good"]
|
||||
"""
|
||||
try:
|
||||
return safe_load(value)
|
||||
except (AttributeError, ValueError, yaml.YAMLError):
|
||||
return default
|
||||
|
||||
# safe_dump defaults to sort_keys=True, but we act like json.dumps (the
|
||||
# opposite)
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def toyaml(
|
||||
value: Any, default: Optional[str] = None, sort_keys: bool = False
|
||||
) -> Optional[str]:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
object primitive, eg. a `dict` or `list` to a yaml string.
|
||||
|
||||
:param value: The value serialize to yaml
|
||||
:param default: A default value to return if the `value` argument
|
||||
cannot be serialized
|
||||
:param sort_keys: If True, sort the keys.
|
||||
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_dict = {"abc": 123} %}
|
||||
{% set my_yaml_string = toyaml(my_dict) %}
|
||||
{% do log(my_yaml_string) %}
|
||||
"""
|
||||
try:
|
||||
return yaml.safe_dump(data=value, sort_keys=sort_keys)
|
||||
except (ValueError, yaml.YAMLError):
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def log(msg: str, info: bool = False) -> str:
|
||||
"""Logs a line to either the log file or stdout.
|
||||
|
||||
:param msg: The message to log
|
||||
:param info: If `False`, write to the log file. If `True`, write to
|
||||
both the log file and stdout.
|
||||
|
||||
> macros/my_log_macro.sql
|
||||
|
||||
{% macro some_macro(arg1, arg2) %}
|
||||
{{ log("Running some_macro: " ~ arg1 ~ ", " ~ arg2) }}
|
||||
{% endmacro %}"
|
||||
"""
|
||||
if info:
|
||||
logger.info(msg)
|
||||
else:
|
||||
logger.debug(msg)
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
def run_started_at(self) -> Optional[datetime.datetime]:
|
||||
"""`run_started_at` outputs the timestamp that this run started, e.g.
|
||||
`2017-04-21 01:23:45.678`. The `run_started_at` variable is a Python
|
||||
`datetime` object. As of 0.9.1, the timezone of this variable defaults
|
||||
to UTC.
|
||||
|
||||
> run_started_at_example.sql
|
||||
|
||||
select
|
||||
'{{ run_started_at.strftime("%Y-%m-%d") }}' as date_day
|
||||
from ...
|
||||
|
||||
|
||||
To modify the timezone of this variable, use the the `pytz` module:
|
||||
|
||||
> run_started_at_utc.sql
|
||||
|
||||
{% set est = modules.pytz.timezone("America/New_York") %}
|
||||
select
|
||||
'{{ run_started_at.astimezone(est) }}' as run_started_est
|
||||
from ...
|
||||
"""
|
||||
if tracking.active_user is not None:
|
||||
return tracking.active_user.run_started_at
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
def invocation_id(self) -> Optional[str]:
|
||||
"""invocation_id outputs a UUID generated for this dbt run (useful for
|
||||
auditing)
|
||||
"""
|
||||
if tracking.active_user is not None:
|
||||
return tracking.active_user.invocation_id
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
def modules(self) -> Dict[str, Any]:
|
||||
"""The `modules` variable in the Jinja context contains useful Python
|
||||
modules for operating on data.
|
||||
|
||||
# datetime
|
||||
|
||||
This variable is a pointer to the Python datetime module.
|
||||
|
||||
Usage:
|
||||
|
||||
{% set dt = modules.datetime.datetime.now() %}
|
||||
|
||||
# pytz
|
||||
|
||||
This variable is a pointer to the Python pytz module.
|
||||
|
||||
Usage:
|
||||
|
||||
{% set dt = modules.datetime.datetime(2002, 10, 27, 6, 0, 0) %}
|
||||
{% set dt_local = modules.pytz.timezone('US/Eastern').localize(dt) %}
|
||||
{{ dt_local }}
|
||||
""" # noqa
|
||||
return get_context_modules()
|
||||
|
||||
@contextproperty
|
||||
def flags(self) -> Any:
|
||||
"""The `flags` variable contains true/false values for flags provided
|
||||
on the command line.
|
||||
|
||||
> flags.sql:
|
||||
|
||||
{% if flags.FULL_REFRESH %}
|
||||
drop table ...
|
||||
{% else %}
|
||||
-- no-op
|
||||
{% endif %}
|
||||
|
||||
The list of valid flags are:
|
||||
|
||||
- `flags.STRICT_MODE`: True if `--strict` (or `-S`) was provided on the
|
||||
command line
|
||||
- `flags.FULL_REFRESH`: True if `--full-refresh` was provided on the
|
||||
command line
|
||||
- `flags.NON_DESTRUCTIVE`: True if `--non-destructive` was provided on
|
||||
the command line
|
||||
"""
|
||||
return flags
|
||||
|
||||
|
||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
ctx = BaseContext(cli_vars)
|
||||
# This is not a Mashumaro to_dict call
|
||||
return ctx.to_dict()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user