mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-18 20:01:28 +00:00
Compare commits
3587 Commits
v0.8.2
...
experiment
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3144df1fa6 | ||
|
|
992dc5ce5c | ||
|
|
243c2cb0ed | ||
|
|
c888fe52d6 | ||
|
|
32ff2fbfd4 | ||
|
|
7599b9bca1 | ||
|
|
0b1d93a18b | ||
|
|
2b48152da6 | ||
|
|
e743e23d6b | ||
|
|
f846f921f2 | ||
|
|
1060035838 | ||
|
|
69cc20013e | ||
|
|
3572bfd37d | ||
|
|
a6b82990f5 | ||
|
|
540c1fd9c6 | ||
|
|
46d36cd412 | ||
|
|
a170764fc5 | ||
|
|
f72873a1ce | ||
|
|
82496c30b1 | ||
|
|
cb3c007acd | ||
|
|
cb460a797c | ||
|
|
df24c7d2f8 | ||
|
|
133c15c0e2 | ||
|
|
116e18a19e | ||
|
|
ec0af7c97b | ||
|
|
a34a877737 | ||
|
|
f018794465 | ||
|
|
d45f5e9791 | ||
|
|
04bd0d834c | ||
|
|
ed4f0c4713 | ||
|
|
c747068d4a | ||
|
|
aa0fbdc993 | ||
|
|
b50bfa7277 | ||
|
|
e91988f679 | ||
|
|
3ed1fce3fb | ||
|
|
e3ea0b511a | ||
|
|
c411c663de | ||
|
|
1c6f66fc14 | ||
|
|
1f927a374c | ||
|
|
07c4225aa8 | ||
|
|
42a85ac39f | ||
|
|
16e6d31ee3 | ||
|
|
a6db5b436d | ||
|
|
47675f2e28 | ||
|
|
0642bbefa7 | ||
|
|
43da603d52 | ||
|
|
f9e1f4d111 | ||
|
|
1508564e10 | ||
|
|
c14e6f4dcc | ||
|
|
75b6a20134 | ||
|
|
d82a07c221 | ||
|
|
c6f7dbcaa5 | ||
|
|
82cd099e48 | ||
|
|
546c011dd8 | ||
|
|
10b33ccaf6 | ||
|
|
bc01572176 | ||
|
|
ccd2064722 | ||
|
|
0fb42901dd | ||
|
|
a4280d7457 | ||
|
|
6966ede68b | ||
|
|
27dd14a5a2 | ||
|
|
2494301f1e | ||
|
|
f13143accb | ||
|
|
26d340a917 | ||
|
|
cc75cd4102 | ||
|
|
cf8615b231 | ||
|
|
30f473a2b1 | ||
|
|
4618709baa | ||
|
|
16b098ea42 | ||
|
|
b31c4d407a | ||
|
|
28c36cc5e2 | ||
|
|
6bfbcb842e | ||
|
|
a0eade4fdd | ||
|
|
ee24b7e88a | ||
|
|
c9baddf9a4 | ||
|
|
c5c780a685 | ||
|
|
421aaabf62 | ||
|
|
86788f034f | ||
|
|
232d3758cf | ||
|
|
71bcf9b31d | ||
|
|
bf4ee4f064 | ||
|
|
aa3bdfeb17 | ||
|
|
ce6967d396 | ||
|
|
330065f5e0 | ||
|
|
944db82553 | ||
|
|
c257361f05 | ||
|
|
ffdbfb018a | ||
|
|
cfa2bd6b08 | ||
|
|
51e90c3ce0 | ||
|
|
d69149f43e | ||
|
|
f261663f3d | ||
|
|
e5948dd1d3 | ||
|
|
5f13aab7d8 | ||
|
|
292d489592 | ||
|
|
0a01f20e35 | ||
|
|
2bd08d5c4c | ||
|
|
adae5126db | ||
|
|
dddf1bcb76 | ||
|
|
d23d4b0fd4 | ||
|
|
658f7550b3 | ||
|
|
cfb50ae21e | ||
|
|
9b0a365822 | ||
|
|
97ab130619 | ||
|
|
3578fde290 | ||
|
|
f382da69b8 | ||
|
|
2da3d215c6 | ||
|
|
43ed29c14c | ||
|
|
9df0283689 | ||
|
|
04b82cf4a5 | ||
|
|
274c3012b0 | ||
|
|
2b24a4934f | ||
|
|
692a423072 | ||
|
|
148f55335f | ||
|
|
2f752842a1 | ||
|
|
aff72996a1 | ||
|
|
08e425bcf6 | ||
|
|
454ddc601a | ||
|
|
b025f208a8 | ||
|
|
b60e533b9d | ||
|
|
37af0e0d59 | ||
|
|
ac1de5bce9 | ||
|
|
ef7ff55e07 | ||
|
|
608db5b982 | ||
|
|
8dd69efd48 | ||
|
|
73f7fba793 | ||
|
|
867e2402d2 | ||
|
|
a3b9e61967 | ||
|
|
cd149b68e8 | ||
|
|
cd3583c736 | ||
|
|
441f86f3ed | ||
|
|
f62bea65a1 | ||
|
|
886b574987 | ||
|
|
2888bac275 | ||
|
|
35c9206916 | ||
|
|
c4c5b59312 | ||
|
|
f25fb4e5ac | ||
|
|
868bfec5e6 | ||
|
|
e7c242213a | ||
|
|
862552ead4 | ||
|
|
9d90e0c167 | ||
|
|
a281f227cd | ||
|
|
5b981278db | ||
|
|
c1091ed3d1 | ||
|
|
08aed63455 | ||
|
|
90a550ee4f | ||
|
|
34869fc2a2 | ||
|
|
3deb10156d | ||
|
|
8c0e84de05 | ||
|
|
23be083c39 | ||
|
|
217aafce39 | ||
|
|
03210c63f4 | ||
|
|
a90510f6f2 | ||
|
|
36d91aded6 | ||
|
|
9afe8a1297 | ||
|
|
1e6f272034 | ||
|
|
a1aa2f81ef | ||
|
|
62899ef308 | ||
|
|
7f3396c002 | ||
|
|
453bc18196 | ||
|
|
dbb6b57b76 | ||
|
|
d7137db78c | ||
|
|
5ac4f2d80b | ||
|
|
5ba5271da9 | ||
|
|
b834e3015a | ||
|
|
c8721ded62 | ||
|
|
1e97372d24 | ||
|
|
fd4e111784 | ||
|
|
75094e7e21 | ||
|
|
8db2d674ed | ||
|
|
ffb140fab3 | ||
|
|
e93543983c | ||
|
|
0d066f80ff | ||
|
|
ccca1b2016 | ||
|
|
fec0e31a25 | ||
|
|
d246aa8f6d | ||
|
|
66bfba2258 | ||
|
|
b53b4373cb | ||
|
|
0810f93883 | ||
|
|
a4e696a252 | ||
|
|
0951d08f52 | ||
|
|
dbf367e070 | ||
|
|
6447ba8ec8 | ||
|
|
43e260966f | ||
|
|
b0e301b046 | ||
|
|
c8a9ea4979 | ||
|
|
afb7fc05da | ||
|
|
14124ccca8 | ||
|
|
df5022dbc3 | ||
|
|
015e798a31 | ||
|
|
c19125bb02 | ||
|
|
0e6ac5baf1 | ||
|
|
2c8d1b5b8c | ||
|
|
f7c0c1c21a | ||
|
|
4edd98f7ce | ||
|
|
df0abb7000 | ||
|
|
4f93da307f | ||
|
|
a8765d54aa | ||
|
|
bb834358d4 | ||
|
|
ec0f3d22e7 | ||
|
|
009b75cab6 | ||
|
|
d64668df1e | ||
|
|
72e808c9a7 | ||
|
|
96cc9223be | ||
|
|
13b099fbd0 | ||
|
|
1a8416c297 | ||
|
|
8538bec99e | ||
|
|
f983900597 | ||
|
|
3af02020ff | ||
|
|
8c71488757 | ||
|
|
74316bf702 | ||
|
|
7aa8c435c9 | ||
|
|
daeb51253d | ||
|
|
0ce2f41db4 | ||
|
|
02e5a962d7 | ||
|
|
dcc32dc69f | ||
|
|
af3d6681dd | ||
|
|
106968a3be | ||
|
|
2cd56ca044 | ||
|
|
eff198d079 | ||
|
|
c3b5b88cd2 | ||
|
|
4e19e87bbc | ||
|
|
6be6f6585d | ||
|
|
d7579f0c99 | ||
|
|
b741679c9c | ||
|
|
852990e967 | ||
|
|
21fd75b500 | ||
|
|
3e5d9010a3 | ||
|
|
784616ec29 | ||
|
|
6251d19946 | ||
|
|
17b1332a2a | ||
|
|
74eec3bdbe | ||
|
|
a9901c4ea7 | ||
|
|
348a2f91ee | ||
|
|
7115d862ea | ||
|
|
52ed4aa631 | ||
|
|
92cedf8931 | ||
|
|
e1097f11b5 | ||
|
|
eb34c0e46b | ||
|
|
ee2181b371 | ||
|
|
2a5d090e91 | ||
|
|
857bebe819 | ||
|
|
9728152768 | ||
|
|
2566a85429 | ||
|
|
46b3130198 | ||
|
|
8664516c8d | ||
|
|
0733c246ea | ||
|
|
4203985e3e | ||
|
|
900298bce7 | ||
|
|
09c37f508e | ||
|
|
c9e01bcc81 | ||
|
|
b079545e0f | ||
|
|
c3bf0f8cbf | ||
|
|
e945bca1d9 | ||
|
|
bf5835de5e | ||
|
|
7503f0cb10 | ||
|
|
3a751bcf9b | ||
|
|
c31ba101d6 | ||
|
|
ecadc74d44 | ||
|
|
63d25aaf19 | ||
|
|
5af82c3c05 | ||
|
|
8b4d74ed17 | ||
|
|
6a6a9064d5 | ||
|
|
b188a9488a | ||
|
|
7c2635f65d | ||
|
|
c67d0a0e1a | ||
|
|
7ee78e89c9 | ||
|
|
40370e104f | ||
|
|
a8809baa6c | ||
|
|
244d5d2c3b | ||
|
|
a0370a6617 | ||
|
|
eb077fcc75 | ||
|
|
c5adc50eed | ||
|
|
6e71b6fd31 | ||
|
|
278382589d | ||
|
|
6f0f6cf21a | ||
|
|
01331ed311 | ||
|
|
f638a3d50c | ||
|
|
512c41dbaf | ||
|
|
f6bab4adcf | ||
|
|
526ecee3da | ||
|
|
1bc9815d53 | ||
|
|
78bd7c9465 | ||
|
|
d74df8692b | ||
|
|
eda86412cc | ||
|
|
cce5945fd2 | ||
|
|
72038258ed | ||
|
|
056d8fa9ad | ||
|
|
3888e0066f | ||
|
|
ee6571d050 | ||
|
|
9472288304 | ||
|
|
fd5e10cfdf | ||
|
|
aeae18ec37 | ||
|
|
03d3943e99 | ||
|
|
214d137672 | ||
|
|
83db275ddf | ||
|
|
b8f16d081a | ||
|
|
675b01ed48 | ||
|
|
b20224a096 | ||
|
|
fd6edfccc4 | ||
|
|
4c58438e8a | ||
|
|
5ff383a025 | ||
|
|
dcb6854683 | ||
|
|
e4644bfe5a | ||
|
|
93168fef87 | ||
|
|
9832822bdf | ||
|
|
5d91aa3bcd | ||
|
|
354ab5229b | ||
|
|
00de0cd4b5 | ||
|
|
26210216da | ||
|
|
e29c14a22b | ||
|
|
a6990c8fb8 | ||
|
|
3e40e71b96 | ||
|
|
3f45abe331 | ||
|
|
6777c62789 | ||
|
|
1aac869738 | ||
|
|
493554ea30 | ||
|
|
1cf87c639b | ||
|
|
2cb3d92163 | ||
|
|
89b6e52a73 | ||
|
|
97407c10ff | ||
|
|
81222dadbc | ||
|
|
400555c391 | ||
|
|
9125b05809 | ||
|
|
139b353a28 | ||
|
|
fc474a07d0 | ||
|
|
8fd8fa09a5 | ||
|
|
41ae831d0e | ||
|
|
dbca540d70 | ||
|
|
dc7eca4bf9 | ||
|
|
fb07149cb7 | ||
|
|
b2bd5a5548 | ||
|
|
aa6b333e79 | ||
|
|
0cb9740535 | ||
|
|
46eadd54e5 | ||
|
|
6b032b49fe | ||
|
|
35f78ee0f9 | ||
|
|
5ec36df7f0 | ||
|
|
f918fd65b6 | ||
|
|
d08a39483d | ||
|
|
9191f4ff2d | ||
|
|
19232f554f | ||
|
|
b4a83414ac | ||
|
|
cb0e62576d | ||
|
|
e3f557406f | ||
|
|
676af831c0 | ||
|
|
873d76d72c | ||
|
|
8ee490b881 | ||
|
|
ff31b277f6 | ||
|
|
120eb5b502 | ||
|
|
a93e288d6a | ||
|
|
8cf9311ced | ||
|
|
713e781473 | ||
|
|
a32295e74a | ||
|
|
204b02de3e | ||
|
|
8379edce99 | ||
|
|
e265ab67c7 | ||
|
|
fde1f13b4e | ||
|
|
9c3839c7e2 | ||
|
|
c0fd702cc7 | ||
|
|
429419c4af | ||
|
|
56ae20602d | ||
|
|
a4b80cc2e4 | ||
|
|
4994cc07a0 | ||
|
|
e96cf02561 | ||
|
|
764c9b2986 | ||
|
|
40c6499d3a | ||
|
|
3a78efd83c | ||
|
|
eb33cf75e3 | ||
|
|
863d8e6405 | ||
|
|
1fc5a45b9e | ||
|
|
7751fece35 | ||
|
|
7670c42462 | ||
|
|
b72fc3cd25 | ||
|
|
4cc1a4f74c | ||
|
|
540607086c | ||
|
|
7d929e98af | ||
|
|
0086097639 | ||
|
|
daff0badc8 | ||
|
|
22c4d8fabe | ||
|
|
3485482460 | ||
|
|
c43873379c | ||
|
|
ea5e5df5a3 | ||
|
|
f2caf2f1ff | ||
|
|
07d4020fca | ||
|
|
2142e529ff | ||
|
|
b9d502e2e6 | ||
|
|
8c80862c10 | ||
|
|
2356c7b63d | ||
|
|
9c24fc25f5 | ||
|
|
4f1a6d56c1 | ||
|
|
b71b7e209e | ||
|
|
2581e98aff | ||
|
|
afc7136bae | ||
|
|
e489170558 | ||
|
|
50106f2bd3 | ||
|
|
e96f4a5be6 | ||
|
|
4768ac5fda | ||
|
|
c91fcc527a | ||
|
|
8520ff35b3 | ||
|
|
9b8a98f4ec | ||
|
|
4bd4afaec7 | ||
|
|
69352d8414 | ||
|
|
4a21ea6575 | ||
|
|
86bbb9fe38 | ||
|
|
4030d4fc20 | ||
|
|
182f69a9ec | ||
|
|
fb40efe4b7 | ||
|
|
9d00c00072 | ||
|
|
10c3118f9c | ||
|
|
1fa149dca2 | ||
|
|
60f4c963b5 | ||
|
|
51b8e64972 | ||
|
|
ae542dce74 | ||
|
|
fa8a4f2020 | ||
|
|
481bdd56d3 | ||
|
|
1a9083ddb7 | ||
|
|
9779f43620 | ||
|
|
d31e82edfc | ||
|
|
981535a1c3 | ||
|
|
5354e39e5f | ||
|
|
ca9293cbfb | ||
|
|
e2fe6a8249 | ||
|
|
a8347b7ada | ||
|
|
bcbf7c3b7b | ||
|
|
6a26cb280f | ||
|
|
fd658ace9d | ||
|
|
5e71a2aa3f | ||
|
|
e3fb923b34 | ||
|
|
f55b257609 | ||
|
|
81bf3dae5c | ||
|
|
d0074f3411 | ||
|
|
2cc2d971c6 | ||
|
|
5830f5590e | ||
|
|
75facebe80 | ||
|
|
0130398e9f | ||
|
|
22d9b86e9f | ||
|
|
c87b671275 | ||
|
|
1eb5857811 | ||
|
|
5fc1cb39a6 | ||
|
|
1f8e29276e | ||
|
|
cf02c7fd02 | ||
|
|
5d93c64c0e | ||
|
|
c738928ea3 | ||
|
|
707310db64 | ||
|
|
59bf43dc1f | ||
|
|
fe461381a2 | ||
|
|
685873ab42 | ||
|
|
b6a951903e | ||
|
|
acfa84918e | ||
|
|
75304eb3be | ||
|
|
1d7eb59ff2 | ||
|
|
4273cc9e29 | ||
|
|
29be2de5cb | ||
|
|
91b0496c89 | ||
|
|
f043f948de | ||
|
|
7ef7a8f306 | ||
|
|
db8eea2468 | ||
|
|
55813d9209 | ||
|
|
4c05daae1b | ||
|
|
16eb7232c3 | ||
|
|
c28ffcdd9f | ||
|
|
4b8652f1c4 | ||
|
|
674bd8f264 | ||
|
|
1ba832dbfe | ||
|
|
d3e4d3fbcb | ||
|
|
58a3cb4fbd | ||
|
|
8ad1551b15 | ||
|
|
123771163a | ||
|
|
f80a759488 | ||
|
|
42f8a4715e | ||
|
|
c29892e340 | ||
|
|
b4a2ed6bb5 | ||
|
|
67e8caf045 | ||
|
|
2562debe31 | ||
|
|
87e2fd610c | ||
|
|
af118bcc53 | ||
|
|
7e01172b4c | ||
|
|
f56ae93772 | ||
|
|
3834805929 | ||
|
|
50b6057bbf | ||
|
|
c0199abacf | ||
|
|
77688c74f3 | ||
|
|
47ab7419ac | ||
|
|
dc209f77ec | ||
|
|
76aa8c7df5 | ||
|
|
671a29ff34 | ||
|
|
eb35794aca | ||
|
|
a8d6691dee | ||
|
|
955f4ae977 | ||
|
|
25a869a686 | ||
|
|
7aa8030b76 | ||
|
|
108d843bba | ||
|
|
099fea8565 | ||
|
|
a573a2ada1 | ||
|
|
1468ca8ebc | ||
|
|
274aea9f8f | ||
|
|
38a99a75ed | ||
|
|
6e06bd0cb4 | ||
|
|
02a793998a | ||
|
|
81ab9469b7 | ||
|
|
bcc928495d | ||
|
|
621ae7dbc9 | ||
|
|
85f2c03903 | ||
|
|
f7fd741d43 | ||
|
|
fad0d81837 | ||
|
|
09687409dc | ||
|
|
091bcd107c | ||
|
|
f9b300d63a | ||
|
|
4f2acc2c96 | ||
|
|
6aa4a60d5c | ||
|
|
b075bf51b0 | ||
|
|
de2341ece0 | ||
|
|
ff67e7d47c | ||
|
|
31644ed39d | ||
|
|
acb235ef4f | ||
|
|
d554835b50 | ||
|
|
c8453d80fc | ||
|
|
f3f713ae65 | ||
|
|
cfa741e597 | ||
|
|
13fb2351ed | ||
|
|
3555ba518d | ||
|
|
c5a19ca42e | ||
|
|
153eb7e9d3 | ||
|
|
7ba52d4931 | ||
|
|
ebe5b46653 | ||
|
|
1bd82d4914 | ||
|
|
89775fa94f | ||
|
|
4456872635 | ||
|
|
ee9ae22651 | ||
|
|
afe0f46768 | ||
|
|
203d8c3481 | ||
|
|
c9ae49255d | ||
|
|
51f17d3358 | ||
|
|
cacdd58b41 | ||
|
|
8b7bcbbc47 | ||
|
|
bdf9482e75 | ||
|
|
b94d0b66e6 | ||
|
|
1dff122a94 | ||
|
|
fb8065df27 | ||
|
|
4274139210 | ||
|
|
b422a44c03 | ||
|
|
285479c0bc | ||
|
|
e641ec12fa | ||
|
|
36fda28a92 | ||
|
|
1ece515074 | ||
|
|
04f840d907 | ||
|
|
df8ccc04eb | ||
|
|
dd764b93e0 | ||
|
|
335497f688 | ||
|
|
41a9251982 | ||
|
|
84bf03580d | ||
|
|
43d5dfcb71 | ||
|
|
7a9fc7ef12 | ||
|
|
ecf24cd4d9 | ||
|
|
ba828e59de | ||
|
|
34719f94b4 | ||
|
|
c95a6792e5 | ||
|
|
88529d5c25 | ||
|
|
3489878395 | ||
|
|
d938013733 | ||
|
|
dfe2ea4d0a | ||
|
|
7a3e345cf7 | ||
|
|
1fb8c9c896 | ||
|
|
2ed97640d5 | ||
|
|
6d31aa6f67 | ||
|
|
a76ffbd194 | ||
|
|
0d7e0ba087 | ||
|
|
618d491f92 | ||
|
|
21a3462798 | ||
|
|
d2c5783e5a | ||
|
|
007cb4516a | ||
|
|
8ec93d9973 | ||
|
|
0a64708302 | ||
|
|
b964f6dd22 | ||
|
|
14233188be | ||
|
|
e3193c6243 | ||
|
|
8782714ec3 | ||
|
|
03010ecde7 | ||
|
|
97d63d3da4 | ||
|
|
a212f4eb5e | ||
|
|
5c993fd568 | ||
|
|
df60189266 | ||
|
|
97f61a9ecc | ||
|
|
8740e6ca66 | ||
|
|
c9eb3c2b54 | ||
|
|
d9411bc9bb | ||
|
|
a3a1b143cf | ||
|
|
5c18c78b50 | ||
|
|
3bcb5f147d | ||
|
|
1bfe43ff76 | ||
|
|
2d2019c059 | ||
|
|
4613619841 | ||
|
|
44e3c7eb6d | ||
|
|
fd0b460391 | ||
|
|
3b917b9d79 | ||
|
|
970049fde8 | ||
|
|
5b40cc4a01 | ||
|
|
0919c5642d | ||
|
|
de4f90b3f4 | ||
|
|
a1c2270956 | ||
|
|
b9cf0cfc44 | ||
|
|
d2869e4749 | ||
|
|
3ec911b62c | ||
|
|
4ff66c1cb6 | ||
|
|
184146b825 | ||
|
|
1e8543efb1 | ||
|
|
a976e54c19 | ||
|
|
88e26da026 | ||
|
|
58522b8f67 | ||
|
|
f09bb17da4 | ||
|
|
353ae0f090 | ||
|
|
0404881d85 | ||
|
|
fb88d54c31 | ||
|
|
a523356504 | ||
|
|
91ff7f1c80 | ||
|
|
51f6c268d3 | ||
|
|
5e1b284ad0 | ||
|
|
a962d28674 | ||
|
|
7f0c1f823a | ||
|
|
d08a2b8c02 | ||
|
|
d7f3518a9a | ||
|
|
d34d2a023c | ||
|
|
1ecd954199 | ||
|
|
6e161ab84e | ||
|
|
97c59f36a2 | ||
|
|
89443eac0e | ||
|
|
3e8414e259 | ||
|
|
06a30f0447 | ||
|
|
4e636aa53a | ||
|
|
7bd18ef331 | ||
|
|
181fcc3965 | ||
|
|
3a1f745e65 | ||
|
|
2dc3ea3f12 | ||
|
|
1e8ed49661 | ||
|
|
84bf169458 | ||
|
|
3dabe62254 | ||
|
|
0d246ac95b | ||
|
|
2d0612c972 | ||
|
|
4cf2b78fca | ||
|
|
3af8a22d13 | ||
|
|
b0c7b3a38b | ||
|
|
3be03b681b | ||
|
|
dd5b7240b6 | ||
|
|
2b9b82e5c4 | ||
|
|
c5c14bb798 | ||
|
|
a201fc773a | ||
|
|
d897164d5c | ||
|
|
e558d3f9ad | ||
|
|
6a7078f634 | ||
|
|
b14676f0c5 | ||
|
|
4cef1af8cb | ||
|
|
42aa407adc | ||
|
|
e7298d60d3 | ||
|
|
9c0ff932c8 | ||
|
|
288dd221f8 | ||
|
|
d374ef9ffe | ||
|
|
7fa6a363b2 | ||
|
|
13da3390e5 | ||
|
|
4164e6ee8e | ||
|
|
2b454f99dd | ||
|
|
2025634417 | ||
|
|
42e582a6ad | ||
|
|
339a7e9409 | ||
|
|
f420911e19 | ||
|
|
2e39a5fff0 | ||
|
|
e0f7deabfd | ||
|
|
abe345e925 | ||
|
|
4d33554465 | ||
|
|
32c559838d | ||
|
|
62a0bf8732 | ||
|
|
24ae8b4498 | ||
|
|
2945a91189 | ||
|
|
26735dac8d | ||
|
|
7c0a41c998 | ||
|
|
778a89ef8a | ||
|
|
83d85dc928 | ||
|
|
25a06aee01 | ||
|
|
c89ea1ad56 | ||
|
|
76f9f23b16 | ||
|
|
a45f6bea5b | ||
|
|
49fec95b33 | ||
|
|
d38fd80dff | ||
|
|
37b03ef5d2 | ||
|
|
0c878a056e | ||
|
|
1da4bef068 | ||
|
|
65c156750d | ||
|
|
bd34dfe1f7 | ||
|
|
78445f9879 | ||
|
|
053210330e | ||
|
|
df894f91f2 | ||
|
|
f758614b0a | ||
|
|
71846c98b3 | ||
|
|
272cad7b97 | ||
|
|
5eb9523325 | ||
|
|
d2b2f2f6b6 | ||
|
|
55f9720a44 | ||
|
|
9d52e6acbf | ||
|
|
8f649f5ab4 | ||
|
|
fbfb54e3e2 | ||
|
|
e523d908bf | ||
|
|
8b2d416035 | ||
|
|
078356458b | ||
|
|
4846246f12 | ||
|
|
dc10b3d904 | ||
|
|
05d4a5e4a3 | ||
|
|
2883b6dc17 | ||
|
|
5f9e153db8 | ||
|
|
c05153f00a | ||
|
|
3616641e93 | ||
|
|
78cbeca606 | ||
|
|
354c7ecddf | ||
|
|
4d659318c4 | ||
|
|
9d4399eb28 | ||
|
|
32702559f6 | ||
|
|
310a1dfaec | ||
|
|
6528b5f25b | ||
|
|
19378abfae | ||
|
|
593e86c15d | ||
|
|
6226d60c9a | ||
|
|
aa35dc9437 | ||
|
|
098d05c27f | ||
|
|
05917c6d2f | ||
|
|
e300c62d76 | ||
|
|
3829d1f964 | ||
|
|
0d2593e0e3 | ||
|
|
e99f0d12b8 | ||
|
|
d0e736f31b | ||
|
|
3a5db3c16d | ||
|
|
0cddd9c417 | ||
|
|
51ce3e1093 | ||
|
|
db50880399 | ||
|
|
8e63da2f04 | ||
|
|
a0b502dd44 | ||
|
|
ff272c797a | ||
|
|
f4b93c8a5a | ||
|
|
c01056b99a | ||
|
|
12ca721e0b | ||
|
|
9c7d519ab2 | ||
|
|
5299079f01 | ||
|
|
4c8575dea3 | ||
|
|
f3aaa8bcee | ||
|
|
2cc53321ae | ||
|
|
64e9d704e2 | ||
|
|
1c1f10056d | ||
|
|
ccae4a8888 | ||
|
|
5c1840914b | ||
|
|
be61e6724a | ||
|
|
38975ecee0 | ||
|
|
d8b31c02e2 | ||
|
|
331b961819 | ||
|
|
769e4d5193 | ||
|
|
dbf7e690b8 | ||
|
|
12a9c2b441 | ||
|
|
12d3c52de2 | ||
|
|
614f0b4350 | ||
|
|
74c78ef927 | ||
|
|
765c2de148 | ||
|
|
644fef1e84 | ||
|
|
b9053a24dc | ||
|
|
0be8326351 | ||
|
|
a7a73df696 | ||
|
|
e9b4c47887 | ||
|
|
7ee6f1de39 | ||
|
|
eea51bee30 | ||
|
|
0beb03fe53 | ||
|
|
4ccfe020a2 | ||
|
|
19735d5416 | ||
|
|
e53a45b3e0 | ||
|
|
e00d1ed78c | ||
|
|
83a4562b98 | ||
|
|
27fef50919 | ||
|
|
947a00f6c9 | ||
|
|
2f36d7d259 | ||
|
|
e2d7027074 | ||
|
|
a2de92d2a3 | ||
|
|
edef65f333 | ||
|
|
e7dfe97ecc | ||
|
|
cd0fe51109 | ||
|
|
6c0a5021cd | ||
|
|
c9b34681b7 | ||
|
|
8422c3a9d3 | ||
|
|
daed251d0d | ||
|
|
ddb6d791e8 | ||
|
|
107170163a | ||
|
|
7aa09e393c | ||
|
|
75cc3e40a5 | ||
|
|
78cf84310c | ||
|
|
0f6622fab6 | ||
|
|
bb20df0db9 | ||
|
|
8a2b5f53f6 | ||
|
|
e30db9816e | ||
|
|
5f8efc3aa8 | ||
|
|
f3565f3f70 | ||
|
|
2be7ee6ee0 | ||
|
|
f7a8cd1250 | ||
|
|
3f5cc224a6 | ||
|
|
26363638bc | ||
|
|
1cd51181eb | ||
|
|
a1d5c340e8 | ||
|
|
6f1763317a | ||
|
|
8973393333 | ||
|
|
a5d53c4453 | ||
|
|
85fda89413 | ||
|
|
7b6c11adb7 | ||
|
|
7e14666802 | ||
|
|
6fbf6721b9 | ||
|
|
ec76498686 | ||
|
|
05634e61a7 | ||
|
|
2dd7ef5302 | ||
|
|
49a5a556e9 | ||
|
|
855dd98c62 | ||
|
|
2759c9e3bd | ||
|
|
594b8b786a | ||
|
|
3387d085b4 | ||
|
|
ee6b842f9a | ||
|
|
fce11d7b46 | ||
|
|
48c9b8abdc | ||
|
|
c3c99f317e | ||
|
|
9deb50f812 | ||
|
|
18c02ea991 | ||
|
|
3e23a5ff75 | ||
|
|
076bb9c356 | ||
|
|
c21af17a30 | ||
|
|
1d298ea5cf | ||
|
|
f214ebf4e9 | ||
|
|
ce5ca6f845 | ||
|
|
cc2be00abb | ||
|
|
bbfcce1cc6 | ||
|
|
d81961ccee | ||
|
|
c1da5dd513 | ||
|
|
7c201fed81 | ||
|
|
f3a97b6c08 | ||
|
|
6509067856 | ||
|
|
6482718dbf | ||
|
|
6dac4c76eb | ||
|
|
0d825eb590 | ||
|
|
6cc0d9103d | ||
|
|
514a1ad7d0 | ||
|
|
7f99353ca0 | ||
|
|
a58657403e | ||
|
|
b9a86df795 | ||
|
|
cf94ce6a54 | ||
|
|
e9d112ac39 | ||
|
|
f4c6bf30b6 | ||
|
|
626522d1bb | ||
|
|
58f3905af4 | ||
|
|
89b4984009 | ||
|
|
804c779359 | ||
|
|
d699add216 | ||
|
|
e36fb3cc17 | ||
|
|
fcf57a5066 | ||
|
|
715c886dad | ||
|
|
e2cd45b18a | ||
|
|
a8f8708418 | ||
|
|
ca5b02af9a | ||
|
|
832d43efdc | ||
|
|
e9daff7616 | ||
|
|
22999a9861 | ||
|
|
64edf70b93 | ||
|
|
75dbb0bc19 | ||
|
|
e34439dcd7 | ||
|
|
9c84ce1e6f | ||
|
|
907003881e | ||
|
|
4e2ec6b7f8 | ||
|
|
4b90ff892f | ||
|
|
5591a82a15 | ||
|
|
598c06f5b0 | ||
|
|
bf12001c04 | ||
|
|
e59df0fbb5 | ||
|
|
47f5c51b2d | ||
|
|
e9b982b8ea | ||
|
|
770cf71c13 | ||
|
|
5734fb897c | ||
|
|
df8aa642c5 | ||
|
|
5c76cfb071 | ||
|
|
75f8bc2679 | ||
|
|
208c1cfb1a | ||
|
|
1d543b3737 | ||
|
|
b41b72c316 | ||
|
|
26703df4d7 | ||
|
|
f490f6af98 | ||
|
|
70ad551cf4 | ||
|
|
48705b6d8c | ||
|
|
6e5c8750f6 | ||
|
|
1f78f30756 | ||
|
|
df5b97c614 | ||
|
|
1de892bf0e | ||
|
|
457cbbd954 | ||
|
|
842edd62a6 | ||
|
|
7b374a4b08 | ||
|
|
d0f5664358 | ||
|
|
053546402d | ||
|
|
0fa9d0ab12 | ||
|
|
9d0eab6305 | ||
|
|
79a90d0671 | ||
|
|
451e95e48d | ||
|
|
d3f535a0a7 | ||
|
|
4fc54dae32 | ||
|
|
4e70d078ef | ||
|
|
31673d8acd | ||
|
|
d5e127bfea | ||
|
|
f40374deb9 | ||
|
|
f8cbea4dbf | ||
|
|
e3bd6cd5ef | ||
|
|
71e7b0ab71 | ||
|
|
fd662a1855 | ||
|
|
2a8940415d | ||
|
|
b63a271d55 | ||
|
|
47e70efca6 | ||
|
|
0f3dd88029 | ||
|
|
35a052f4ab | ||
|
|
a31e78817e | ||
|
|
f250d7ab47 | ||
|
|
8686ab9a9d | ||
|
|
2cd98c2c60 | ||
|
|
c9eec4fa06 | ||
|
|
6230c608d0 | ||
|
|
ab8392b856 | ||
|
|
067f02f9d8 | ||
|
|
0c1f8550b8 | ||
|
|
7287204279 | ||
|
|
e392212c0e | ||
|
|
dacfe38429 | ||
|
|
1eb75d9be7 | ||
|
|
a122d35e43 | ||
|
|
445543e256 | ||
|
|
cfeffe40f6 | ||
|
|
dfb72ccb2e | ||
|
|
0f4a369aeb | ||
|
|
385f7f2c24 | ||
|
|
aa36ed5aa4 | ||
|
|
c87096993e | ||
|
|
784ab79eec | ||
|
|
7122a3f90b | ||
|
|
868a447c58 | ||
|
|
002244e3b3 | ||
|
|
5a319d6ee1 | ||
|
|
a1ca9025e7 | ||
|
|
9cd7cbc9e3 | ||
|
|
2d3dc1f6cc | ||
|
|
d01c1614a5 | ||
|
|
fb5320f7bc | ||
|
|
7c916e9bdb | ||
|
|
b774702ee8 | ||
|
|
ffaaacc148 | ||
|
|
9de9335554 | ||
|
|
feae21992a | ||
|
|
18cfe81e00 | ||
|
|
869bdc454d | ||
|
|
6725234466 | ||
|
|
05bf1512f9 | ||
|
|
bd2d1f7722 | ||
|
|
f58bc49176 | ||
|
|
5cabafc257 | ||
|
|
0fdff04fc6 | ||
|
|
1448d0b0c2 | ||
|
|
f843e658a1 | ||
|
|
06ffb6dd53 | ||
|
|
78110e17e5 | ||
|
|
74df1a1c31 | ||
|
|
d9862af8e1 | ||
|
|
ea82d8f8ca | ||
|
|
8de3235a63 | ||
|
|
eb3ed5744b | ||
|
|
b98a4d82c3 | ||
|
|
ac562d9388 | ||
|
|
d0cb960455 | ||
|
|
7db16f8521 | ||
|
|
229d3986c2 | ||
|
|
c237c8ee9e | ||
|
|
bd9a1320fa | ||
|
|
9eb4506117 | ||
|
|
58c95bb399 | ||
|
|
af5af829cf | ||
|
|
0961bdb506 | ||
|
|
7b9111d386 | ||
|
|
f8580a23ff | ||
|
|
b5f99881fe | ||
|
|
6266216fda | ||
|
|
ca39ac5f00 | ||
|
|
5d99798c7d | ||
|
|
24ed84dddd | ||
|
|
e82e68d22a | ||
|
|
e6bb06030c | ||
|
|
595c82c1f1 | ||
|
|
a46855821b | ||
|
|
89ba3e707a | ||
|
|
2cd6a2d7a0 | ||
|
|
1f758f90d0 | ||
|
|
8681dd8c93 | ||
|
|
0781cef8b1 | ||
|
|
5edbe5758e | ||
|
|
20c5c4c3dd | ||
|
|
e376c14e8a | ||
|
|
68babfb4bb | ||
|
|
f3d4377fdd | ||
|
|
efaeb6786e | ||
|
|
96d2978a23 | ||
|
|
6e1665d1fb | ||
|
|
c6603be194 | ||
|
|
d17e706351 | ||
|
|
acd978e054 | ||
|
|
ed6591e450 | ||
|
|
fc61869ea8 | ||
|
|
80501e7f8f | ||
|
|
a3e6518c41 | ||
|
|
b71496a25d | ||
|
|
391ef70e67 | ||
|
|
29e2bbc0c2 | ||
|
|
5884f7df55 | ||
|
|
6708046951 | ||
|
|
4751a5f3e2 | ||
|
|
0b82def5f5 | ||
|
|
ee529a5472 | ||
|
|
d38344125e | ||
|
|
caee9415e4 | ||
|
|
7e85ad95c7 | ||
|
|
86a35d6c13 | ||
|
|
ff3360afc0 | ||
|
|
eea322e647 | ||
|
|
120f0f0959 | ||
|
|
38443cf3f5 | ||
|
|
c69f28e0a0 | ||
|
|
704e44e479 | ||
|
|
c19a7d7b32 | ||
|
|
7323ddbabf | ||
|
|
0b7dc326e9 | ||
|
|
afc0341b54 | ||
|
|
0df8462562 | ||
|
|
305ace528a | ||
|
|
f7b8e5c861 | ||
|
|
8bf5dd0502 | ||
|
|
8d84da40e1 | ||
|
|
ab886cde16 | ||
|
|
ab5432daa0 | ||
|
|
60d4708f15 | ||
|
|
7fba68fcbd | ||
|
|
8f45ecebb4 | ||
|
|
c51ba50946 | ||
|
|
616b32eb67 | ||
|
|
278f764d61 | ||
|
|
5ed5693cd5 | ||
|
|
236a087a42 | ||
|
|
044f8fce4e | ||
|
|
618adf501d | ||
|
|
0da3b07fb3 | ||
|
|
4bea303c5f | ||
|
|
c18b72b53d | ||
|
|
ac9ea53e8e | ||
|
|
669a18564f | ||
|
|
c6b234673c | ||
|
|
1adf994740 | ||
|
|
72d82e45ed | ||
|
|
070e13ded8 | ||
|
|
c7d8031681 | ||
|
|
bf8932efaa | ||
|
|
ff9e0cd4cf | ||
|
|
1522d4cb2b | ||
|
|
fac8a286e8 | ||
|
|
d37e74c62c | ||
|
|
0a1862e92e | ||
|
|
9dd16045fb | ||
|
|
c05b45b241 | ||
|
|
2a0f69ac5f | ||
|
|
d5f50954c0 | ||
|
|
dda9289d77 | ||
|
|
66f9ee0b3e | ||
|
|
72d2cab517 | ||
|
|
a376d656a7 | ||
|
|
204fc25c21 | ||
|
|
bea8c72a82 | ||
|
|
16c1fcf4db | ||
|
|
566f78a95c | ||
|
|
6ca707822e | ||
|
|
59477a32bc | ||
|
|
bc38750d47 | ||
|
|
51278c0fbf | ||
|
|
644e3e8721 | ||
|
|
107bc5c217 | ||
|
|
1313445afe | ||
|
|
79db8807a6 | ||
|
|
4dc12c72c2 | ||
|
|
ac427bdc0c | ||
|
|
96a3736cd7 | ||
|
|
6424b65097 | ||
|
|
c6121675bb | ||
|
|
626f835601 | ||
|
|
aa38c8101b | ||
|
|
d2760d5a49 | ||
|
|
0c3f9e7e2e | ||
|
|
fd4a33e450 | ||
|
|
f07face7c0 | ||
|
|
935f985e25 | ||
|
|
35b43e9708 | ||
|
|
9991b066c7 | ||
|
|
c76d6f34a2 | ||
|
|
380f7c2e51 | ||
|
|
ab07b8ca06 | ||
|
|
6f302e26b3 | ||
|
|
f9343c4683 | ||
|
|
466ed1f8e6 | ||
|
|
83cdd40191 | ||
|
|
579a6d6064 | ||
|
|
3a5eb4c2ad | ||
|
|
40455108bb | ||
|
|
2c241025c9 | ||
|
|
f5697fd82b | ||
|
|
10d20197ed | ||
|
|
5d5913efeb | ||
|
|
8cc0178b64 | ||
|
|
6a26a5f47d | ||
|
|
01f5ce73a3 | ||
|
|
1e94b1f5bb | ||
|
|
13e8b0dd0d | ||
|
|
f25b830aad | ||
|
|
be0e12edb0 | ||
|
|
6aafd827f9 | ||
|
|
41746eebbb | ||
|
|
fb3019f3f1 | ||
|
|
aa3cb37e05 | ||
|
|
f0a4810b08 | ||
|
|
42e8c56b13 | ||
|
|
ff0e955ccb | ||
|
|
f8ee0561cb | ||
|
|
6c7d71028b | ||
|
|
c63f60ef7c | ||
|
|
79e8a86750 | ||
|
|
aa0e58ef3b | ||
|
|
4c93b51427 | ||
|
|
f4f10f6dd2 | ||
|
|
e9b537a861 | ||
|
|
b385d176a7 | ||
|
|
df2ae076b9 | ||
|
|
9e6e381a40 | ||
|
|
a8bdaba497 | ||
|
|
e3ac64d14a | ||
|
|
c146e5076e | ||
|
|
374d0b4e91 | ||
|
|
c415b6b41f | ||
|
|
3807b0b6cd | ||
|
|
8b05113cc8 | ||
|
|
304b11ee09 | ||
|
|
11b7bf77c0 | ||
|
|
ee2d990ece | ||
|
|
031d844d30 | ||
|
|
33d3c9f0bd | ||
|
|
a6aa1c36b3 | ||
|
|
90542fb499 | ||
|
|
70031c63b6 | ||
|
|
b781cdad6d | ||
|
|
504c163570 | ||
|
|
5f294bc587 | ||
|
|
b202da7d84 | ||
|
|
43525d622a | ||
|
|
0979c67b63 | ||
|
|
90abe7bbe7 | ||
|
|
9c9d6248ae | ||
|
|
8c7fef47a4 | ||
|
|
78f1afa959 | ||
|
|
f5d358d3cf | ||
|
|
55cd72cf62 | ||
|
|
11d62f6c13 | ||
|
|
4182c03258 | ||
|
|
2da96e3169 | ||
|
|
8e3c95b48c | ||
|
|
3c3f57ece1 | ||
|
|
69ec5fd82e | ||
|
|
386895f800 | ||
|
|
6065237de4 | ||
|
|
a7617745e2 | ||
|
|
923f28225a | ||
|
|
7dfef27886 | ||
|
|
5c698908a4 | ||
|
|
d88089fb24 | ||
|
|
0188d03d1b | ||
|
|
7b6ea338ed | ||
|
|
40e88ded25 | ||
|
|
38bcc2b736 | ||
|
|
73e6941817 | ||
|
|
1268c119b5 | ||
|
|
434edce8a7 | ||
|
|
c0ceaee122 | ||
|
|
e9a053c18d | ||
|
|
427f3bf163 | ||
|
|
96cfc49cb9 | ||
|
|
dc65118f17 | ||
|
|
31025a0a7f | ||
|
|
d20c301dcd | ||
|
|
0a382c411f | ||
|
|
367bf6849b | ||
|
|
848716607c | ||
|
|
0a0e71a9ff | ||
|
|
ca232dbdf4 | ||
|
|
efcf78b348 | ||
|
|
bbc0d30fbf | ||
|
|
1c60882525 | ||
|
|
d6cb415f76 | ||
|
|
f58bf878d5 | ||
|
|
2fbc716615 | ||
|
|
cce99b1ff2 | ||
|
|
f98b1669fa | ||
|
|
efebabe44f | ||
|
|
42a0461f57 | ||
|
|
a23ad3ad4a | ||
|
|
1478be716d | ||
|
|
0eed51dfdd | ||
|
|
574d65ab4b | ||
|
|
ac336b20ca | ||
|
|
c9613016e2 | ||
|
|
f9696a85d3 | ||
|
|
deb97546c0 | ||
|
|
cea964910d | ||
|
|
f8b99f2e34 | ||
|
|
3a7e328c6e | ||
|
|
732792fc80 | ||
|
|
dd842ac3b8 | ||
|
|
bdcf10e209 | ||
|
|
a43edd9dd8 | ||
|
|
65d9c187b8 | ||
|
|
75e1c1dac8 | ||
|
|
28ccdcc52d | ||
|
|
7c34ff166a | ||
|
|
36bf479115 | ||
|
|
3b90bfcdc3 | ||
|
|
ab19135ca6 | ||
|
|
41f7b8cb9d | ||
|
|
d8b9541b86 | ||
|
|
a4620adbce | ||
|
|
02ac87d160 | ||
|
|
f4c62726ee | ||
|
|
0844be5a28 | ||
|
|
3a77626cd7 | ||
|
|
4fee33c020 | ||
|
|
e3c53ee874 | ||
|
|
367eff77fc | ||
|
|
1bed6a1b96 | ||
|
|
1cf9220380 | ||
|
|
fd8629a6cf | ||
|
|
be1ca971be | ||
|
|
5fa59d86e7 | ||
|
|
be80009aca | ||
|
|
f18f6af5e0 | ||
|
|
65f14723ca | ||
|
|
6910847c71 | ||
|
|
62b19b53f9 | ||
|
|
8fb6592c6a | ||
|
|
cda35e2a9a | ||
|
|
4472719362 | ||
|
|
6c12b7a951 | ||
|
|
ad3b63ed5d | ||
|
|
e7545ad183 | ||
|
|
e390595551 | ||
|
|
b77b3a4566 | ||
|
|
16692e2e5f | ||
|
|
757bdf69dc | ||
|
|
a259f154da | ||
|
|
e441c6dfa7 | ||
|
|
c4c7af88c0 | ||
|
|
d78b249827 | ||
|
|
af8cdedc07 | ||
|
|
fc7a62b650 | ||
|
|
7919c90b81 | ||
|
|
1854d20566 | ||
|
|
c5c5c3d863 | ||
|
|
6c47ae1d88 | ||
|
|
70ad200dca | ||
|
|
341956768a | ||
|
|
0656477131 | ||
|
|
e878d0e76e | ||
|
|
dc6a38c360 | ||
|
|
f883e11c7a | ||
|
|
7e4402b128 | ||
|
|
e045e14522 | ||
|
|
f209e17215 | ||
|
|
07461d24e0 | ||
|
|
47cef1d907 | ||
|
|
c59adc3369 | ||
|
|
cc3ba20ec9 | ||
|
|
562f3d0bb7 | ||
|
|
e8d321cbab | ||
|
|
1edffac2bc | ||
|
|
735ffb3c92 | ||
|
|
66b60d860e | ||
|
|
f99d31f7c5 | ||
|
|
170b1d80b5 | ||
|
|
10846653f0 | ||
|
|
5d4d4f32d2 | ||
|
|
1f49150d30 | ||
|
|
3f583bd927 | ||
|
|
d11987a2ad | ||
|
|
79f7985784 | ||
|
|
104422eed9 | ||
|
|
d77e4205c1 | ||
|
|
847bc0995b | ||
|
|
529b053620 | ||
|
|
4d72e4e257 | ||
|
|
8e18af5912 | ||
|
|
bee5bb8660 | ||
|
|
e486872d81 | ||
|
|
6e8d4a6fa6 | ||
|
|
9081303a97 | ||
|
|
40c07afd85 | ||
|
|
da46a679f3 | ||
|
|
283405dc17 | ||
|
|
ed38cbbc0b | ||
|
|
ad91636c74 | ||
|
|
5543dab97c | ||
|
|
d4ae9a89df | ||
|
|
032faf2dce | ||
|
|
bcea7cc8ad | ||
|
|
0ad94f80cd | ||
|
|
7e9533afc7 | ||
|
|
6ca6233387 | ||
|
|
503050904c | ||
|
|
ba2f9d5781 | ||
|
|
e571cbaeec | ||
|
|
202f8a1678 | ||
|
|
4068c6694e | ||
|
|
7d43037d4e | ||
|
|
d6bdb466ae | ||
|
|
1114885429 | ||
|
|
e29078e5d1 | ||
|
|
2bd56de934 | ||
|
|
df82cd5aec | ||
|
|
67571f4ede | ||
|
|
fa22c5b071 | ||
|
|
5b7c81eb2d | ||
|
|
8b20136571 | ||
|
|
2a426edfe5 | ||
|
|
5dd9c997e2 | ||
|
|
be5fa32c0c | ||
|
|
0c3fb9528a | ||
|
|
d5c24b0c63 | ||
|
|
9b8e06c51d | ||
|
|
ea4948ff8a | ||
|
|
5dc939355a | ||
|
|
2672859ee0 | ||
|
|
542a65323f | ||
|
|
85e6d7f649 | ||
|
|
e73e276e2b | ||
|
|
1879fb5109 | ||
|
|
15cac5afc4 | ||
|
|
1ae17d5745 | ||
|
|
7106a7c1d1 | ||
|
|
0115e469c1 | ||
|
|
7729295bce | ||
|
|
3265825446 | ||
|
|
90fdc9b2e2 | ||
|
|
9853951aa5 | ||
|
|
b839c79144 | ||
|
|
fad8813911 | ||
|
|
ed57876068 | ||
|
|
5143dbb167 | ||
|
|
75126aa167 | ||
|
|
00a1840a46 | ||
|
|
127f8768aa | ||
|
|
6e330fa993 | ||
|
|
c6ab6459fb | ||
|
|
4e58589afd | ||
|
|
89c65affea | ||
|
|
099b487560 | ||
|
|
25235a4d5a | ||
|
|
1d329e0364 | ||
|
|
d07f23395c | ||
|
|
5e3eba6662 | ||
|
|
61adfe42ff | ||
|
|
80e7e71526 | ||
|
|
1e6790ab05 | ||
|
|
18358db467 | ||
|
|
2eea89a0a6 | ||
|
|
dcce90bcc4 | ||
|
|
b5fc1ef4a3 | ||
|
|
4ae129d565 | ||
|
|
3ec9d358f7 | ||
|
|
1fc4718d36 | ||
|
|
bf3f5e1b92 | ||
|
|
20496651c5 | ||
|
|
9df123a180 | ||
|
|
e7b597787d | ||
|
|
048b96b540 | ||
|
|
be29156f6f | ||
|
|
140cfd70ce | ||
|
|
2e5fdbf5ce | ||
|
|
ee710e3922 | ||
|
|
b8febddad5 | ||
|
|
da74681de2 | ||
|
|
c8605647f1 | ||
|
|
032d77dd15 | ||
|
|
0df49c59c0 | ||
|
|
d354aa6f09 | ||
|
|
551bf42c66 | ||
|
|
b691a73dba | ||
|
|
26720997ea | ||
|
|
431ce50964 | ||
|
|
04bc2a800a | ||
|
|
ccab27a1e2 | ||
|
|
fa40c41124 | ||
|
|
1881c0b932 | ||
|
|
0d44dbf078 | ||
|
|
c5a6634d59 | ||
|
|
81dee7b857 | ||
|
|
b1f88cb1a3 | ||
|
|
f2ba4b03a2 | ||
|
|
c1af3abbdc | ||
|
|
0bf6ecafa0 | ||
|
|
0658a420ee | ||
|
|
5823683265 | ||
|
|
5b65591cc2 | ||
|
|
7fae368c0d | ||
|
|
04ebdbb00b | ||
|
|
b030b4eba5 | ||
|
|
b01226fe62 | ||
|
|
e080bfc79a | ||
|
|
9eaf2438dd | ||
|
|
bd5e6bd1e6 | ||
|
|
62755fe5b1 | ||
|
|
e570d22f40 | ||
|
|
3e48dc3b4d | ||
|
|
9b06679d56 | ||
|
|
9cc7a7a87f | ||
|
|
4e23e7dbd1 | ||
|
|
d9ec8125c9 | ||
|
|
73c418cf57 | ||
|
|
8b722c7951 | ||
|
|
fdfcd4c651 | ||
|
|
4357c3c74e | ||
|
|
5c80d6ab4e | ||
|
|
199b1ed1dc | ||
|
|
b9f8dedc8b | ||
|
|
96a9c6cb92 | ||
|
|
2ad5122ef4 | ||
|
|
1f749805a5 | ||
|
|
de5ff68943 | ||
|
|
e7d2221d08 | ||
|
|
0b8f3bc4c5 | ||
|
|
9d5488e361 | ||
|
|
80574a5a6a | ||
|
|
affdbe719a | ||
|
|
ba3e14cefd | ||
|
|
105dc001e5 | ||
|
|
8bdaa69a2c | ||
|
|
760d46af5c | ||
|
|
21c3f41814 | ||
|
|
5c42f1af14 | ||
|
|
7a89ef2465 | ||
|
|
38244bfdb3 | ||
|
|
e7759b4bae | ||
|
|
66a4f76c74 | ||
|
|
80c9e623ef | ||
|
|
722d87ca5c | ||
|
|
f0221b1fdb | ||
|
|
51e65a35a6 | ||
|
|
a8b93f6d48 | ||
|
|
9b7de69f08 | ||
|
|
554b32efb5 | ||
|
|
c4a5eb9803 | ||
|
|
f92f389612 | ||
|
|
1021637283 | ||
|
|
d85a54a01c | ||
|
|
77dceab7be | ||
|
|
01e97ff285 | ||
|
|
f6f0f5ce1e | ||
|
|
75a5c09981 | ||
|
|
16674275ed | ||
|
|
a0524af242 | ||
|
|
54b64f8922 | ||
|
|
706cea8996 | ||
|
|
1a72660a3f | ||
|
|
efe75d9b51 | ||
|
|
2ac24991c1 | ||
|
|
b11aab5369 | ||
|
|
a348c3f7c1 | ||
|
|
78a731b9e2 | ||
|
|
94ede83462 | ||
|
|
1085fb8cf4 | ||
|
|
0a5b436da1 | ||
|
|
7d3591f5c1 | ||
|
|
4ed1986c26 | ||
|
|
33b4e72401 | ||
|
|
c36a463f44 | ||
|
|
f85015365f | ||
|
|
b9a47cafb2 | ||
|
|
22ceecaa87 | ||
|
|
37e373a68f | ||
|
|
ff9013d977 | ||
|
|
39f92e527c | ||
|
|
f1ef68c166 | ||
|
|
260752f501 | ||
|
|
01c23c3414 | ||
|
|
dea5f98c4e | ||
|
|
38d51cd97c | ||
|
|
22db36c4a6 | ||
|
|
71a293ce46 | ||
|
|
75356cd9ce | ||
|
|
c7e2d2270d | ||
|
|
df05037841 | ||
|
|
1235b3f468 | ||
|
|
99e778b9b7 | ||
|
|
c1b3690671 | ||
|
|
df1384bf31 | ||
|
|
5a4ddd685d | ||
|
|
8cd8705d67 | ||
|
|
4927674306 | ||
|
|
eb464752fb | ||
|
|
210092eeda | ||
|
|
9f7ecd28f9 | ||
|
|
a05b013ecc | ||
|
|
bc49fc2e10 | ||
|
|
344af97656 | ||
|
|
ac650c1034 | ||
|
|
b411630ad4 | ||
|
|
86b917fd33 | ||
|
|
c1815d6750 | ||
|
|
e6ab64ee45 | ||
|
|
f7d39ae7b3 | ||
|
|
98c6eace0c | ||
|
|
6be4ee00aa | ||
|
|
88b35f7ee1 | ||
|
|
27212a16dd | ||
|
|
74672c8fad | ||
|
|
d4108ca840 | ||
|
|
499aa57ac2 | ||
|
|
0b05bf0fa6 | ||
|
|
d98613db13 | ||
|
|
c4baf72fd4 | ||
|
|
d725c7c5e6 | ||
|
|
0dfb5d4441 | ||
|
|
d9ee6ea917 | ||
|
|
0fe5fb628a | ||
|
|
fea06c6fdb | ||
|
|
02c2926ea4 | ||
|
|
b151e2a15a | ||
|
|
a78b4fda8f | ||
|
|
a702f58ed7 | ||
|
|
8311285adb | ||
|
|
62e22c27d5 | ||
|
|
89dd04d06e | ||
|
|
9222f803f4 | ||
|
|
2456b6dc8b | ||
|
|
91b97641a2 | ||
|
|
123c3fa78b | ||
|
|
ab4925f59f | ||
|
|
0356a74d4c | ||
|
|
37a1288ab0 | ||
|
|
43959dc4d0 | ||
|
|
11e5e1b0f2 | ||
|
|
9589e7aaa1 | ||
|
|
c5c7932ee7 | ||
|
|
5d181c3ef2 | ||
|
|
ce4c58a1f0 | ||
|
|
e6b4a12c08 | ||
|
|
7c8a21ddcf | ||
|
|
40839c79fc | ||
|
|
284f5a4968 | ||
|
|
6241ff4bef | ||
|
|
cf41203504 | ||
|
|
460d73f55e | ||
|
|
09403094cd | ||
|
|
40c9328e84 | ||
|
|
382a993e68 | ||
|
|
0f05404f72 | ||
|
|
33836ea5f8 | ||
|
|
cc491904bc | ||
|
|
be36c5d974 | ||
|
|
2dd604b039 | ||
|
|
58a371f9d9 | ||
|
|
564cc22d95 | ||
|
|
1da96b56e5 | ||
|
|
5a929b095c | ||
|
|
3bc4834c82 | ||
|
|
43213cddfd | ||
|
|
9975cd5f27 | ||
|
|
65b4c18ab6 | ||
|
|
d14d250e0a | ||
|
|
a993d9c355 | ||
|
|
14c4bc7967 | ||
|
|
143402dace | ||
|
|
3dff3e71bc | ||
|
|
549168b835 | ||
|
|
d4c1dbb133 | ||
|
|
ca454360d8 | ||
|
|
2f17424b89 | ||
|
|
b80fd81e65 | ||
|
|
5797be9b14 | ||
|
|
a771c637d7 | ||
|
|
dd5e47f2e0 | ||
|
|
1b0d635152 | ||
|
|
ace777e495 | ||
|
|
e882ed39dc | ||
|
|
7c6f878c69 | ||
|
|
fc94a5e19d | ||
|
|
7538f1eb9c | ||
|
|
43028fe89d | ||
|
|
5f9b8b92ba | ||
|
|
4c865ac793 | ||
|
|
7a51ef664a | ||
|
|
9e9f0307df | ||
|
|
5eafbb794a | ||
|
|
b5483754d0 | ||
|
|
e03fd44d6d | ||
|
|
9b9c1db05e | ||
|
|
63d14f3a0c | ||
|
|
bef02456eb | ||
|
|
a4efd223a4 | ||
|
|
5058049096 | ||
|
|
c0e854007e | ||
|
|
118344ba62 | ||
|
|
33e75f8fe1 | ||
|
|
03472381a0 | ||
|
|
3aabe2da1a | ||
|
|
ad0bd8763a | ||
|
|
3b696eedb2 | ||
|
|
b2c1727b67 | ||
|
|
b602c9cdf6 | ||
|
|
86f0609931 | ||
|
|
3b456593a6 | ||
|
|
a3e8eabc66 | ||
|
|
df2c498e9a | ||
|
|
21da0ed5b3 | ||
|
|
e51c942e91 | ||
|
|
9dec850072 | ||
|
|
7814ea05be | ||
|
|
7f9750dd4f | ||
|
|
5b1e4f56a0 | ||
|
|
9a9f705c42 | ||
|
|
08bb956a20 | ||
|
|
c4938e6567 | ||
|
|
0dc34c347d | ||
|
|
faf5a88727 | ||
|
|
97722bef03 | ||
|
|
a268f79878 | ||
|
|
45a155a765 | ||
|
|
7ae632d33f | ||
|
|
582e8e183c | ||
|
|
c28f2dc5c1 | ||
|
|
0b18212e69 | ||
|
|
67d499e9de | ||
|
|
46c28405ec | ||
|
|
2e825963f1 | ||
|
|
b8858cc1f5 | ||
|
|
77252de54c | ||
|
|
2869467b4f | ||
|
|
099adf9bab | ||
|
|
31e49fb662 | ||
|
|
3405745182 | ||
|
|
09f2aae866 | ||
|
|
0579c49ab0 | ||
|
|
7df980b5d4 | ||
|
|
c2b2f70a69 | ||
|
|
5c723bf92d | ||
|
|
905f634781 | ||
|
|
b62f0ee511 | ||
|
|
ba111db2a8 | ||
|
|
03d6b40afe | ||
|
|
6f1df6ae65 | ||
|
|
2d6fc280fe | ||
|
|
e7ffdcd09f | ||
|
|
17dacd31f4 | ||
|
|
f7b9874a93 | ||
|
|
36fa9bd8bf | ||
|
|
51372897bd | ||
|
|
1831acbaec | ||
|
|
4dffd524e9 | ||
|
|
a5a78e6173 | ||
|
|
51bca114a0 | ||
|
|
72f0559f8a | ||
|
|
9be47e67d9 | ||
|
|
5190b65b14 | ||
|
|
f7a92603c1 | ||
|
|
0c6bc3ec9b | ||
|
|
53af0233a7 | ||
|
|
9c9d261433 | ||
|
|
f64ddd6fcb | ||
|
|
c26cf1977c | ||
|
|
abc766280a | ||
|
|
2ca4c1a344 | ||
|
|
58b2955cf1 | ||
|
|
a75a22d71a | ||
|
|
d245bcbc6e | ||
|
|
6ab537db6e | ||
|
|
94009584ec | ||
|
|
dc0e10f6a7 | ||
|
|
235ec1e3d1 | ||
|
|
83de6b1617 | ||
|
|
1f1d10033f | ||
|
|
9f2a707b2b | ||
|
|
08cb38b342 | ||
|
|
6ac347813a | ||
|
|
b9b4ce30e8 | ||
|
|
f1f14d237e | ||
|
|
563dfc1371 | ||
|
|
6198584f7d | ||
|
|
0003d57163 | ||
|
|
5e7f718df3 | ||
|
|
8bdc6877c0 | ||
|
|
d7eee61dd4 | ||
|
|
e0d4e9392a | ||
|
|
7f8e198074 | ||
|
|
e1b040c8f9 | ||
|
|
2d0f93850c | ||
|
|
610c02abc4 | ||
|
|
452cc6e78a | ||
|
|
642ebeef52 | ||
|
|
31ca9a1041 | ||
|
|
670c26bdbd | ||
|
|
1bbc00c346 | ||
|
|
ca4979099c | ||
|
|
30cd27e5fc | ||
|
|
72d83f988e | ||
|
|
c4cd4fc42f | ||
|
|
b56d93b909 | ||
|
|
ab9fcb4624 | ||
|
|
84d585c14c | ||
|
|
1873f4018d | ||
|
|
57f4221fc9 | ||
|
|
15ff08dc6f | ||
|
|
5b6586de3c | ||
|
|
ff158b8353 | ||
|
|
f985902a00 | ||
|
|
e022e73b0c | ||
|
|
658be46831 | ||
|
|
06018ee3b5 | ||
|
|
1b4c76365c | ||
|
|
8239f9c4e3 | ||
|
|
239e6738a8 | ||
|
|
d605ff6fc2 | ||
|
|
76dc41b86e | ||
|
|
2f85d83090 | ||
|
|
1f34139df3 | ||
|
|
78115cf12a | ||
|
|
3a030ab74b | ||
|
|
ef3d63f30c | ||
|
|
0d8e0cb1ca | ||
|
|
a8ee33ff08 | ||
|
|
16f93ceee5 | ||
|
|
b3ef028361 | ||
|
|
672363bff4 | ||
|
|
a1d651aed2 | ||
|
|
5fa24d7880 | ||
|
|
78d23c8243 | ||
|
|
d43d49494d | ||
|
|
f4ca94f6d8 | ||
|
|
0dedac6a2e | ||
|
|
f268ee9ae9 | ||
|
|
57aa443bd7 | ||
|
|
45bc25d589 | ||
|
|
c98da40873 | ||
|
|
bcd953490d | ||
|
|
3103442447 | ||
|
|
e08bc540cf | ||
|
|
d6682f092c | ||
|
|
bf5a6b84b4 | ||
|
|
97e0cdebbc | ||
|
|
6f64f51f8d | ||
|
|
f94a7a73c1 | ||
|
|
5cd8cff11d | ||
|
|
c293fd94d3 | ||
|
|
91f0e444c0 | ||
|
|
abdcdefb5f | ||
|
|
6164733489 | ||
|
|
f9a0ccf318 | ||
|
|
bb82093ffc | ||
|
|
d3139a531c | ||
|
|
3a40cf2373 | ||
|
|
a7b0d05d3c | ||
|
|
ae796a8497 | ||
|
|
6d44caa4e3 | ||
|
|
f32519b156 | ||
|
|
d5824d9238 | ||
|
|
e83aab202b | ||
|
|
00a22e18cc | ||
|
|
fee9382c7f | ||
|
|
f757a08a99 | ||
|
|
414716b841 | ||
|
|
50f4f8a5b0 | ||
|
|
73d0308e35 | ||
|
|
43b8293a07 | ||
|
|
15c26192e8 | ||
|
|
95a0587499 | ||
|
|
0f1693a9d7 | ||
|
|
4c624d0a85 | ||
|
|
6287d6d30b | ||
|
|
c19260ec4e | ||
|
|
43daea05c1 | ||
|
|
75916754a6 | ||
|
|
14d8683135 | ||
|
|
61f8e6d4a1 | ||
|
|
b658f879f9 | ||
|
|
4aa4295508 | ||
|
|
75c8feaeb9 | ||
|
|
66ff79dfbd | ||
|
|
7206c202bf | ||
|
|
773c979955 | ||
|
|
ef16a99f88 | ||
|
|
d86092ae78 | ||
|
|
6c87bed66b | ||
|
|
2d100c33b8 | ||
|
|
81b755ff0f | ||
|
|
fb9747b6be | ||
|
|
cd1eccf5f4 | ||
|
|
3b69d310dd | ||
|
|
dc2ebf292b | ||
|
|
2815b33b0a | ||
|
|
9819c3a01e | ||
|
|
cf619eacc5 | ||
|
|
410086df6b | ||
|
|
2b9c5ae7d4 | ||
|
|
eb921e9281 | ||
|
|
6d2aca182c | ||
|
|
a2691d5dad | ||
|
|
c1b7c4c1b7 | ||
|
|
66ffcf1e1b | ||
|
|
e2d9eb4772 | ||
|
|
0bbaa5be21 | ||
|
|
54d0230d1b | ||
|
|
4018e550fe | ||
|
|
21f2e01921 | ||
|
|
34cb9c7186 | ||
|
|
52ac98ea1c | ||
|
|
aa254b350b | ||
|
|
8194fd4d13 | ||
|
|
2031e232ca | ||
|
|
b71e51e30f | ||
|
|
a1d93f92cd | ||
|
|
3a5c0ebc78 | ||
|
|
5061397e66 | ||
|
|
31e085b7df | ||
|
|
13a7d96d84 | ||
|
|
3765435ee6 | ||
|
|
e61f08b981 | ||
|
|
1fbd82d19c | ||
|
|
dfb4b3a2c8 | ||
|
|
5f30d5ddaf | ||
|
|
aceda5ec62 | ||
|
|
626e642d90 | ||
|
|
812c549156 | ||
|
|
34ca230473 | ||
|
|
c211d0e436 | ||
|
|
52f62430f4 | ||
|
|
6fdac5020f | ||
|
|
dca02111be | ||
|
|
fcd86e3298 | ||
|
|
286753b464 | ||
|
|
a227b31162 | ||
|
|
d246bab0eb | ||
|
|
78a199f946 | ||
|
|
54009a05cf | ||
|
|
eb9bfcda4a | ||
|
|
91c51c88cd | ||
|
|
2a69a371fd | ||
|
|
2e6398d5b5 | ||
|
|
8b622c964f | ||
|
|
fd8049c997 | ||
|
|
2ab21ed710 | ||
|
|
01be413927 | ||
|
|
b93563bfeb | ||
|
|
c68de1066d | ||
|
|
23484b18b7 | ||
|
|
aea03ebba4 | ||
|
|
0522535a15 | ||
|
|
2799a8c34d | ||
|
|
d22f3653b7 | ||
|
|
509e0e8363 | ||
|
|
1cce96f4ee | ||
|
|
7981d8ed8c | ||
|
|
b78fd60343 | ||
|
|
f9c8442260 | ||
|
|
f84cbc3091 | ||
|
|
f6406c95aa | ||
|
|
e83f47b82e | ||
|
|
145e0b228e | ||
|
|
ca1c84c9d4 | ||
|
|
a9bb1aabbb | ||
|
|
e31a9af5be | ||
|
|
c941434510 | ||
|
|
7a305ca456 | ||
|
|
e2e509b8a4 | ||
|
|
92d08a5682 | ||
|
|
0fa4523450 | ||
|
|
dbcb9c562f | ||
|
|
aff1d1d9b3 | ||
|
|
226247d858 | ||
|
|
f9bc7c56e5 | ||
|
|
4910d6cc9d | ||
|
|
036fc07500 | ||
|
|
189c1c0e0c | ||
|
|
017f4175f8 | ||
|
|
8cd93f51c2 | ||
|
|
cbd824ceda | ||
|
|
1d91890c9f | ||
|
|
726004bb3f | ||
|
|
cd5d39dbce | ||
|
|
29a14cf0c3 | ||
|
|
b83b82849f | ||
|
|
c4892d9f33 | ||
|
|
3ebe95d669 | ||
|
|
cfb55efcf7 | ||
|
|
5283002951 | ||
|
|
6e5456d14c | ||
|
|
5e706816e4 | ||
|
|
2cf2c82899 | ||
|
|
9ea6e8d0d0 | ||
|
|
0563061b25 | ||
|
|
5937c20ed1 | ||
|
|
58fcbcf7a5 | ||
|
|
34980655b2 | ||
|
|
d51ebac71b | ||
|
|
5e52c64153 | ||
|
|
4cb4318426 | ||
|
|
d23b1aa755 | ||
|
|
7151ac1f81 | ||
|
|
ee921cc0ca | ||
|
|
5dc776f857 | ||
|
|
c4288b1e82 | ||
|
|
5b29c19afe | ||
|
|
ac1b906888 | ||
|
|
3e09319f8c | ||
|
|
1fab0aba7e | ||
|
|
2d52eda730 | ||
|
|
6f51e0de13 | ||
|
|
491ad692d7 | ||
|
|
26f573fdc8 | ||
|
|
689a0ea4cc | ||
|
|
5c6f4ff637 | ||
|
|
693574212d | ||
|
|
cd0e119737 | ||
|
|
64193b5cc9 | ||
|
|
4a4fdb1452 | ||
|
|
23bfc67d62 | ||
|
|
e0f725363c | ||
|
|
1dd4187cd0 | ||
|
|
19f052e016 | ||
|
|
141bdd6f96 | ||
|
|
d1a49d3128 | ||
|
|
48bd3cac39 | ||
|
|
db3c58ddb1 | ||
|
|
279e4d5976 | ||
|
|
15840bd1c7 | ||
|
|
5c3874a392 | ||
|
|
43c5c011d8 | ||
|
|
fb12595bdd | ||
|
|
3163f090dd | ||
|
|
e47b77d50a | ||
|
|
544dbfd746 | ||
|
|
54f548eec8 | ||
|
|
786791670e | ||
|
|
ce58da82c5 | ||
|
|
3437b0f2b9 | ||
|
|
0970285956 | ||
|
|
c0028587dd | ||
|
|
7fafa2adeb | ||
|
|
513a3050c7 | ||
|
|
57a2aae83f | ||
|
|
c621e7d381 | ||
|
|
fe48478993 | ||
|
|
d1cc5deaac | ||
|
|
952b1fc61b | ||
|
|
bd63aac8b3 | ||
|
|
9581f39186 | ||
|
|
6649840b46 | ||
|
|
1b03db6ab5 | ||
|
|
8a19ba4862 | ||
|
|
eb8bce4e22 | ||
|
|
f73d561557 | ||
|
|
83003a7d8f | ||
|
|
01534c1fc1 | ||
|
|
84a991520f | ||
|
|
89c4dbcdba | ||
|
|
e7a24a2062 | ||
|
|
0e897f7751 | ||
|
|
ea898e25ad | ||
|
|
6d38226a1c | ||
|
|
b415e0eed3 | ||
|
|
90e8e75716 | ||
|
|
5a74918e1e | ||
|
|
d8551a6c9c | ||
|
|
51b546f50f | ||
|
|
4bc4c65795 | ||
|
|
a0493b000e | ||
|
|
7071c1d200 | ||
|
|
64ee763c99 | ||
|
|
e78d979d4c | ||
|
|
845529cc9d | ||
|
|
c71b2dc312 | ||
|
|
d8775d1054 | ||
|
|
db2e564e7a | ||
|
|
7fc71272fd | ||
|
|
8011bc7b7a | ||
|
|
171fdf792e | ||
|
|
7a01ba7b49 | ||
|
|
403d000a6f | ||
|
|
880b6666ef | ||
|
|
2aee9eedad | ||
|
|
db203371fe | ||
|
|
50fa1baf96 | ||
|
|
da7c9501d2 | ||
|
|
be53b67e68 | ||
|
|
ffcaac5b4c | ||
|
|
65c3bf6d0f | ||
|
|
58976b46f9 | ||
|
|
fe2a9fe097 | ||
|
|
3e3c69eaf9 | ||
|
|
bcb3df383f | ||
|
|
24c0179048 | ||
|
|
e54661b5df | ||
|
|
85cc8cd542 | ||
|
|
9076806a9b | ||
|
|
17378812a4 | ||
|
|
0e97b53e70 | ||
|
|
0caa90751b | ||
|
|
c38c8d539b | ||
|
|
23883303ff | ||
|
|
d08d2915e0 | ||
|
|
1a618e7240 | ||
|
|
60f66fc288 | ||
|
|
4faa633fb2 | ||
|
|
8bb99547e3 | ||
|
|
e867cfa4a2 | ||
|
|
7901413a97 | ||
|
|
b7e8670b43 | ||
|
|
e2531edb02 | ||
|
|
96913732e4 | ||
|
|
06a89446ab | ||
|
|
ac128da45a | ||
|
|
265f6d3ce5 | ||
|
|
388fd0bd00 | ||
|
|
1c6945cb75 | ||
|
|
772cb0d326 | ||
|
|
b2f2e69377 | ||
|
|
377d5b7f58 | ||
|
|
b4da0686c8 | ||
|
|
f3baa69aad | ||
|
|
9527626ffe | ||
|
|
e13568117b | ||
|
|
57c6b11d47 | ||
|
|
800355ec2f | ||
|
|
4478a89f28 | ||
|
|
07aedc0e61 | ||
|
|
115ef7dd97 | ||
|
|
9e07912e1c | ||
|
|
8fd768e46b | ||
|
|
dad3dcacfe | ||
|
|
0927093303 | ||
|
|
3099119815 | ||
|
|
7a026c7e10 | ||
|
|
7177a6543b | ||
|
|
b2b0f78587 | ||
|
|
7001afbcbe | ||
|
|
3eb28198bd | ||
|
|
81deb8d828 | ||
|
|
e30ba80d6a | ||
|
|
22d13ba881 | ||
|
|
6cfbcf1ac8 | ||
|
|
21daca9faf | ||
|
|
142edcff38 | ||
|
|
70d82ed48e | ||
|
|
52c9234621 | ||
|
|
78d309551f | ||
|
|
f91109570c | ||
|
|
291ef56bc7 | ||
|
|
b12484bb6f | ||
|
|
709ee2a0e8 | ||
|
|
8d4f2bd126 | ||
|
|
b6e7351431 | ||
|
|
329145c13f | ||
|
|
35d1a7a1b5 | ||
|
|
a2e801c2de | ||
|
|
e86c11e5de | ||
|
|
b0217ba299 | ||
|
|
62c3318fe5 | ||
|
|
b9a3fe59c8 | ||
|
|
e46800f5b4 | ||
|
|
0648737fc1 | ||
|
|
1a4daaba10 | ||
|
|
6be4ac044c | ||
|
|
c0aabc7d0b | ||
|
|
4df0bbd814 | ||
|
|
5e6e746951 | ||
|
|
a55a27acf6 | ||
|
|
8046992e08 | ||
|
|
de56e88a00 | ||
|
|
72afd76e1a | ||
|
|
12e53c732e | ||
|
|
49f7cf8eca | ||
|
|
10be7bac2f | ||
|
|
fa6fb1b53d | ||
|
|
5a1f0bdda5 | ||
|
|
405748c744 | ||
|
|
cc90b048af | ||
|
|
6886228992 | ||
|
|
4569c905a5 | ||
|
|
453e81e895 | ||
|
|
3af8696761 | ||
|
|
399b33822a | ||
|
|
913a296cc4 | ||
|
|
bd55569703 | ||
|
|
0e2d3f833d | ||
|
|
3646969779 | ||
|
|
d5bfb9f6aa | ||
|
|
5c05f709d8 | ||
|
|
30a270b5f4 | ||
|
|
671836c47d | ||
|
|
56a2d9dc0a | ||
|
|
e90c05c8f8 | ||
|
|
08d79cc324 | ||
|
|
990b0c93a5 | ||
|
|
14d638c588 | ||
|
|
df8b12b2c0 | ||
|
|
2645667257 | ||
|
|
38c2d82c88 | ||
|
|
986f5b7b4e | ||
|
|
ef76c04ae8 | ||
|
|
d4d5393faa | ||
|
|
9ffc4bf928 | ||
|
|
b9bfff19bb | ||
|
|
51b6fd6f86 | ||
|
|
be765dc4e8 | ||
|
|
7febd9328d | ||
|
|
33a80fca5a | ||
|
|
b0c6233b44 | ||
|
|
f7158b233f | ||
|
|
f368820b7e | ||
|
|
2f1dbc2dae | ||
|
|
3ad30217c4 | ||
|
|
4a10f2cb37 | ||
|
|
f3948295e9 | ||
|
|
42fb12027c | ||
|
|
91124d2d4f | ||
|
|
19fe9119cb | ||
|
|
2e2ce9a57a | ||
|
|
fdcb395739 | ||
|
|
7d1fed2eb9 | ||
|
|
462a1516d2 | ||
|
|
654f70d901 | ||
|
|
b09b1da1f4 | ||
|
|
8ca49cb7af | ||
|
|
e4d3942f36 | ||
|
|
dd2f673083 | ||
|
|
43df84ae8f | ||
|
|
f211d4c8ec | ||
|
|
31f20348c9 | ||
|
|
8c721ba561 | ||
|
|
01c0a5462b | ||
|
|
f73575a8c1 | ||
|
|
c7385ec512 | ||
|
|
bd0876e2e6 | ||
|
|
4f3dc7629a | ||
|
|
a47c09e5d2 | ||
|
|
164468f990 | ||
|
|
c56b631700 | ||
|
|
84c487b577 | ||
|
|
2636969807 | ||
|
|
1e17303b97 | ||
|
|
7fa8d891ef | ||
|
|
c029dfe3fa | ||
|
|
c4ef120b74 | ||
|
|
8644ce1cb8 | ||
|
|
b0b3cdc21f | ||
|
|
e3d30d8a35 | ||
|
|
0ef9c189c0 | ||
|
|
18953536f1 | ||
|
|
dd02f33482 | ||
|
|
3576839199 | ||
|
|
4ed668ef93 | ||
|
|
f8344469e1 | ||
|
|
1d94fb67da | ||
|
|
9ad85127e4 | ||
|
|
3845abeff8 | ||
|
|
f95c712f95 | ||
|
|
f5c3300304 | ||
|
|
84fa83b4dd | ||
|
|
927c37470a | ||
|
|
b80fa53b2a | ||
|
|
cce5ae01f8 | ||
|
|
92ef783948 | ||
|
|
2e7c1fd2cc | ||
|
|
85164b616e | ||
|
|
c67a1ac9f7 | ||
|
|
a2cae7df29 | ||
|
|
f44e3bc9d8 | ||
|
|
03bc58116c | ||
|
|
9b88eb67a1 | ||
|
|
6e5fa7de3c | ||
|
|
d327394057 | ||
|
|
b7c06941e3 | ||
|
|
57adfc8683 | ||
|
|
e13d805197 | ||
|
|
f0635a0df4 | ||
|
|
24adb74498 | ||
|
|
16519b11aa | ||
|
|
0a666caa13 | ||
|
|
7525da70ef | ||
|
|
5833acbc8c | ||
|
|
e57c7b651d | ||
|
|
9a40395cdb | ||
|
|
03aecc8d0c | ||
|
|
a554b383a2 | ||
|
|
a4be1e1dcb | ||
|
|
7b498f4179 | ||
|
|
4a10c8dce7 | ||
|
|
0cc99c50a7 | ||
|
|
92fdf45f0c | ||
|
|
f3cafae030 | ||
|
|
12e5bf6036 | ||
|
|
f0ab957edb | ||
|
|
4308b28aa6 | ||
|
|
f79619ec0a | ||
|
|
5dd147123a | ||
|
|
63d6ab2006 | ||
|
|
1951e0f5e8 | ||
|
|
fb69b89ef9 | ||
|
|
2d84dd4fbd | ||
|
|
d760229abc | ||
|
|
ffb38a21e3 | ||
|
|
85a2f48c80 | ||
|
|
9f208f711e | ||
|
|
ea2637395c | ||
|
|
0ca6026124 | ||
|
|
b8d5a341f4 | ||
|
|
014a8f9222 | ||
|
|
9599b3f584 | ||
|
|
ca31b79cc0 | ||
|
|
675e858050 | ||
|
|
c04517bf04 | ||
|
|
eb12ef1dcd | ||
|
|
03f50f560b | ||
|
|
c1387c5692 | ||
|
|
a4a9221d95 | ||
|
|
7ed0036af0 | ||
|
|
1489393489 | ||
|
|
c4939368ae | ||
|
|
ab59ebe4f2 | ||
|
|
f3701ab837 | ||
|
|
f48f78fc58 | ||
|
|
ddb1785698 | ||
|
|
963b0e23ee | ||
|
|
2a9ae83270 | ||
|
|
788507e046 | ||
|
|
60001ad6b4 | ||
|
|
6c9d5c7370 | ||
|
|
1c3a02b2c8 | ||
|
|
28dc10ed98 | ||
|
|
0d49295b94 | ||
|
|
679784735e | ||
|
|
99f62e850f | ||
|
|
001b9abce9 | ||
|
|
248ca3ff76 | ||
|
|
704ee58846 | ||
|
|
f26948dde2 | ||
|
|
3cac2d3ab7 | ||
|
|
82793a02d3 | ||
|
|
434eb94730 | ||
|
|
73b70622a1 | ||
|
|
00cbe3ec3b | ||
|
|
8ecdab817a | ||
|
|
94ae9fd4a7 | ||
|
|
75c8f32186 | ||
|
|
69621fe6f9 | ||
|
|
81f4c1bd7c | ||
|
|
bb7cfb7dc2 | ||
|
|
b98ea32add | ||
|
|
2b89c5cd9a | ||
|
|
74e5b8bdff | ||
|
|
17157f2973 | ||
|
|
478b17a4dc | ||
|
|
f14225f7e4 | ||
|
|
8a8f7a9929 | ||
|
|
7d490d4886 | ||
|
|
af13b2c745 | ||
|
|
a164d83dad | ||
|
|
d10e340823 | ||
|
|
e7bb9d14b2 | ||
|
|
4e7c096c34 | ||
|
|
0f5ce12dad | ||
|
|
2f4e92a728 | ||
|
|
b047ed82b6 | ||
|
|
82f165625f | ||
|
|
2f5aa3bd0e | ||
|
|
63ef8e3f17 | ||
|
|
f1eaeb4ed2 | ||
|
|
a2ffe8e938 | ||
|
|
355d2ad6fc | ||
|
|
f089b4b077 | ||
|
|
73607b85b7 | ||
|
|
7b022f3afa | ||
|
|
0a2e4f761b | ||
|
|
ddd73cd73b | ||
|
|
efdb837a50 | ||
|
|
90abc2d2f3 | ||
|
|
f60938aab0 | ||
|
|
af7c752fc6 | ||
|
|
a80989952a | ||
|
|
8d74550609 | ||
|
|
d5774b3da4 | ||
|
|
7f7002f36c | ||
|
|
b62ba4a985 | ||
|
|
2b3370887e | ||
|
|
26427d2af0 | ||
|
|
d502b33ef4 | ||
|
|
210cf43574 | ||
|
|
d74e37d4ea | ||
|
|
ea8825996d | ||
|
|
336368195e | ||
|
|
70206b1635 | ||
|
|
191ae61b02 | ||
|
|
f6bf8d912a | ||
|
|
30b7407597 | ||
|
|
58bf73d4bf | ||
|
|
ec61073560 | ||
|
|
b0f81edf96 | ||
|
|
cc8ef47747 | ||
|
|
5d05bf0aba | ||
|
|
1d18a54b5e | ||
|
|
a4605ec844 | ||
|
|
9f58400ba8 | ||
|
|
ec8277b0e4 | ||
|
|
8c7763acf6 | ||
|
|
8e426e60c9 | ||
|
|
3a7f931a3a | ||
|
|
314ca6c361 | ||
|
|
12f0887d28 | ||
|
|
a986ae247d | ||
|
|
9507669b42 | ||
|
|
715155a1e9 | ||
|
|
32c5679039 | ||
|
|
89d211b061 | ||
|
|
f938fd4540 | ||
|
|
61e4fbf152 | ||
|
|
fda38e7cbb | ||
|
|
c0a3b02fb4 | ||
|
|
80482aae34 | ||
|
|
c19085862a | ||
|
|
9672d55c1e | ||
|
|
e043643a54 | ||
|
|
ade108f01c | ||
|
|
6b08fd5e8d | ||
|
|
3c8bbddb5f | ||
|
|
4c02b4a6c3 | ||
|
|
786726e626 | ||
|
|
1f97fe463e | ||
|
|
5a3e3ba90f | ||
|
|
154aae5093 | ||
|
|
3af88b0699 | ||
|
|
0fb620c697 | ||
|
|
7d66965d0b | ||
|
|
acca6a7161 | ||
|
|
ad2f228048 | ||
|
|
3a7dcd9736 | ||
|
|
ca15b44d0f | ||
|
|
bf9c466855 | ||
|
|
abcbacaf69 | ||
|
|
ffceff7498 | ||
|
|
25ac1db646 | ||
|
|
c6d6dae352 | ||
|
|
aa4f771df2 | ||
|
|
4715ad9009 | ||
|
|
a4e5a5ac78 | ||
|
|
f587efde60 | ||
|
|
d57f4c54d8 | ||
|
|
b9c74e0b07 | ||
|
|
aebefe09b5 | ||
|
|
78c13d252e | ||
|
|
8270c85ffd | ||
|
|
7a2279e433 | ||
|
|
3ef519d139 | ||
|
|
85eac05a38 | ||
|
|
8af79841f7 | ||
|
|
afe236d9ac | ||
|
|
90f8e0b70e | ||
|
|
0432c1d7e3 | ||
|
|
08820a2061 | ||
|
|
4f62978de5 | ||
|
|
3ab8238cfb | ||
|
|
43a9db55b1 | ||
|
|
08fdcad282 | ||
|
|
7df6e0dc68 | ||
|
|
5c1c5880b6 | ||
|
|
f99efbf72e | ||
|
|
e90b60eecd | ||
|
|
1205e15be2 | ||
|
|
32f39f35f6 | ||
|
|
9591b86430 | ||
|
|
b54c6023eb | ||
|
|
00ba5d36b9 | ||
|
|
89eeaf1390 | ||
|
|
3f18b93980 | ||
|
|
96cb056ec9 | ||
|
|
1042f1ac8b | ||
|
|
dd232594a5 | ||
|
|
5762e5fdfb | ||
|
|
0f1c154a1a | ||
|
|
ad1fcbe8b2 | ||
|
|
877440b1e6 | ||
|
|
ca02a58519 | ||
|
|
2834f2d8b6 | ||
|
|
cc4f285765 | ||
|
|
2efae5a9c3 | ||
|
|
416cc72498 | ||
|
|
d66584f35c | ||
|
|
2b80d7ad8d | ||
|
|
be3445b78a | ||
|
|
ab63042dfa | ||
|
|
af8622e8ff | ||
|
|
53d083ec58 | ||
|
|
32f74b60ef | ||
|
|
0885be1dc0 | ||
|
|
8b58b208ca | ||
|
|
3188aeaac4 | ||
|
|
e83edd30de | ||
|
|
04333699a0 | ||
|
|
95c9f76e32 | ||
|
|
2830b6a899 | ||
|
|
54c02ef4b4 | ||
|
|
dacce7c864 | ||
|
|
08c5f9aed8 | ||
|
|
fb26ce5c24 | ||
|
|
91d869e61a | ||
|
|
d168bdd0c8 | ||
|
|
6a104c1938 | ||
|
|
2d5525e887 | ||
|
|
a35ad186e3 | ||
|
|
dd469adf29 | ||
|
|
4ffc5cbe6a | ||
|
|
f3449dcfcb | ||
|
|
4e8c7b9216 | ||
|
|
473078986c | ||
|
|
5b74c58a43 | ||
|
|
a72a4e1fcb | ||
|
|
13dd72029f | ||
|
|
fc4fc5762b | ||
|
|
d515903172 | ||
|
|
97a6a51bec | ||
|
|
9222c79043 | ||
|
|
38254a8695 | ||
|
|
9b1aede911 | ||
|
|
ac71888236 | ||
|
|
3f31b52daf | ||
|
|
e3230aad55 | ||
|
|
ac40aa9b02 | ||
|
|
fa480e61a1 | ||
|
|
c19644882b | ||
|
|
e29eccd741 | ||
|
|
4dd80567e1 | ||
|
|
2654c79548 | ||
|
|
3b357340fd | ||
|
|
6c8e74bac9 | ||
|
|
182714b6b8 | ||
|
|
8410be848f | ||
|
|
3f9b9962c3 | ||
|
|
ec1f4bc33d | ||
|
|
f2a0d36b34 | ||
|
|
fc2b86df4f | ||
|
|
0cd0792b65 | ||
|
|
122ee5ab7d | ||
|
|
8270ef71b2 | ||
|
|
d59a13079f | ||
|
|
ed59bd22f3 | ||
|
|
8d32c870fc | ||
|
|
bea2d4fb34 | ||
|
|
fcb97bf78a | ||
|
|
161a78dc23 | ||
|
|
4b7bddb481 | ||
|
|
0879b1b38b | ||
|
|
8bf81a581a | ||
|
|
759da58648 | ||
|
|
ec4a4fe7df | ||
|
|
4dedd62aea | ||
|
|
cf4030ed94 | ||
|
|
35df887307 | ||
|
|
d41adaa51b | ||
|
|
9373a45870 | ||
|
|
a2db88c9c3 | ||
|
|
a26d7bf9e8 | ||
|
|
4225047b06 | ||
|
|
0a9ed9977b | ||
|
|
fc1b4ce88e | ||
|
|
6295c96852 | ||
|
|
73418b5c16 | ||
|
|
98d530f0b1 | ||
|
|
e45ed0ed8c | ||
|
|
fc04e2db89 | ||
|
|
cfaacc5a76 | ||
|
|
b91c3edb16 | ||
|
|
0a503a0bed | ||
|
|
015e4d66b2 | ||
|
|
9e36ebdaab | ||
|
|
da4c135e23 | ||
|
|
588851ac1c | ||
|
|
2b7d7061f9 | ||
|
|
24bc3b6d76 | ||
|
|
cd52a152f6 | ||
|
|
3ecf8be873 | ||
|
|
f38466db11 | ||
|
|
3b8d5c0609 | ||
|
|
60539aaa56 | ||
|
|
6d53e67670 | ||
|
|
9771e63247 | ||
|
|
02c9bcabe0 | ||
|
|
69c8a09d43 | ||
|
|
9ae229a0d5 | ||
|
|
38921fad17 | ||
|
|
633858a218 | ||
|
|
70262b38f8 | ||
|
|
f96dedf3a9 | ||
|
|
1ce0493488 | ||
|
|
027a0d2ee6 | ||
|
|
9c8e08811b | ||
|
|
c1c09f3342 | ||
|
|
05b82a22bc | ||
|
|
067aa2ced0 | ||
|
|
f18733fd09 | ||
|
|
a981f657ec | ||
|
|
81426ae800 | ||
|
|
4771452590 | ||
|
|
10bfaf0e4b | ||
|
|
9e25ec2f07 | ||
|
|
90fb908376 | ||
|
|
a08c0753e7 | ||
|
|
fc22cb2bf0 | ||
|
|
fbaae2e493 | ||
|
|
c86390e139 | ||
|
|
d890642c28 | ||
|
|
6620a3cd90 | ||
|
|
7e181280b3 | ||
|
|
53499e6b14 | ||
|
|
3f948ae501 | ||
|
|
2090887a07 | ||
|
|
a7bfae061c | ||
|
|
fb1926a571 | ||
|
|
c215158d67 | ||
|
|
74152562fe | ||
|
|
e2af871a5a | ||
|
|
2ad116649a | ||
|
|
03aa086e0b | ||
|
|
a335857695 | ||
|
|
95a88b9d5d | ||
|
|
2501783d62 | ||
|
|
7367f0ffbd | ||
|
|
088442e9c1 | ||
|
|
ec14c6b2dc | ||
|
|
7eb033e71a | ||
|
|
1a700c1212 | ||
|
|
78fbde0e1f | ||
|
|
a30cc5e41e | ||
|
|
804a495d82 | ||
|
|
0a4eea4388 | ||
|
|
8471ce8d46 | ||
|
|
f9b1cf6c1c | ||
|
|
22a2887df2 | ||
|
|
02e88a31df | ||
|
|
98d5bc1285 | ||
|
|
436815f313 | ||
|
|
328ce82bae | ||
|
|
d39a048e6e | ||
|
|
67b56488d3 | ||
|
|
07397edd47 | ||
|
|
1e3bdc9c06 | ||
|
|
12bfeaa0d3 | ||
|
|
56801f9095 | ||
|
|
54b0b38900 | ||
|
|
e4660969cf | ||
|
|
1090a1612a | ||
|
|
f4baba8cc1 | ||
|
|
28fa237f87 | ||
|
|
f19f0e8193 | ||
|
|
72d6ee2446 | ||
|
|
f8dfe48653 | ||
|
|
9c9c0d991a | ||
|
|
8d2cb5fdf1 | ||
|
|
1486796973 | ||
|
|
1300f8f49f | ||
|
|
29e9c63e94 | ||
|
|
4bda6769c5 | ||
|
|
dc5c59b40b | ||
|
|
a90ef2504e | ||
|
|
0f3967e87d | ||
|
|
1a0df174c9 | ||
|
|
a1b5375e50 | ||
|
|
101fd139c7 | ||
|
|
25d5fb1655 | ||
|
|
e672042306 | ||
|
|
42ec3f9f06 | ||
|
|
a4fd148a80 | ||
|
|
e9927fb09c | ||
|
|
da31c9a708 | ||
|
|
1be8cb8e91 | ||
|
|
a6ae79faf4 | ||
|
|
61c345955e | ||
|
|
ebce6da788 | ||
|
|
9772c1caeb | ||
|
|
5661855dcc | ||
|
|
11319171be | ||
|
|
3134b59637 | ||
|
|
dfb87dce38 | ||
|
|
ce105d2350 | ||
|
|
e039397ab1 | ||
|
|
b54aadf968 | ||
|
|
e9cf074b45 | ||
|
|
c417c2011b | ||
|
|
2c3c3c9a84 | ||
|
|
f546390221 | ||
|
|
40034e056f | ||
|
|
47e9896d54 | ||
|
|
cda365f22a | ||
|
|
36e1252824 | ||
|
|
cf873d0fc5 | ||
|
|
026c50deb3 | ||
|
|
c5138eb30f | ||
|
|
0bd59998c0 | ||
|
|
7cd336081f | ||
|
|
47cc931b0f | ||
|
|
5462216bb3 | ||
|
|
fe86615625 | ||
|
|
9a74abf4cc | ||
|
|
2847f690f1 | ||
|
|
2c94e9e650 | ||
|
|
95ab2ab443 | ||
|
|
3dcfa2c475 | ||
|
|
7bab31543e | ||
|
|
08c4c2a8b5 | ||
|
|
9fcad69bf4 | ||
|
|
b406a536a9 | ||
|
|
5e8ab9ce4a | ||
|
|
562d47f12a | ||
|
|
ab6d4d7de5 | ||
|
|
4b9ad21e9e | ||
|
|
f4c233aeba | ||
|
|
90497b1e47 | ||
|
|
9b9319cbd0 | ||
|
|
fe948d6805 | ||
|
|
b17d70679f | ||
|
|
5290451a65 | ||
|
|
faadb34aff | ||
|
|
314b4530d8 | ||
|
|
843d342137 | ||
|
|
f0981964f3 | ||
|
|
da409549d4 | ||
|
|
dc7ad2afc7 | ||
|
|
7e8ea51c1a | ||
|
|
343afc2374 | ||
|
|
c2a0a2092a | ||
|
|
f74a252b95 | ||
|
|
f5cfadae67 | ||
|
|
7c1ecaf2b8 | ||
|
|
ea5edf20ba | ||
|
|
c5f8cc7816 | ||
|
|
60d75d26f0 | ||
|
|
f6402d3390 | ||
|
|
7b23a1b9a8 | ||
|
|
7714d12f7c | ||
|
|
fc813e40eb | ||
|
|
96578c3d2f | ||
|
|
f47be0808f | ||
|
|
b6d1e15a9f | ||
|
|
7d332aaa35 | ||
|
|
9ff8705cd7 | ||
|
|
76669995f6 | ||
|
|
1079e9bfaf | ||
|
|
67d85316ac | ||
|
|
7d41f4e22c | ||
|
|
cdeb0d1423 | ||
|
|
477699a102 | ||
|
|
63047d01ab | ||
|
|
5e53e64df2 | ||
|
|
89207155fd | ||
|
|
b7d9eecf86 | ||
|
|
8212994018 | ||
|
|
c283cb0ff4 | ||
|
|
a34ab9a268 | ||
|
|
2a2a2b26ef | ||
|
|
49fe2c3bb3 | ||
|
|
170942c8be | ||
|
|
438b3529ae | ||
|
|
2bff901860 | ||
|
|
d45fff3c5a | ||
|
|
8843a22854 | ||
|
|
d9ba73af44 | ||
|
|
491d5935cf | ||
|
|
85389afb3e | ||
|
|
16d75249c5 | ||
|
|
27842f4cff | ||
|
|
016afd4b2c | ||
|
|
cdb0cbdca7 | ||
|
|
da2d7ea8c0 | ||
|
|
f6278d590a | ||
|
|
3b0d14bd5d | ||
|
|
66d1f2099b | ||
|
|
1ae32c12ab | ||
|
|
c626de76ff | ||
|
|
c80792d713 | ||
|
|
a16958e35d | ||
|
|
3e4c75e41b | ||
|
|
1596174a36 | ||
|
|
f4084f069a | ||
|
|
aceee680c8 | ||
|
|
48c47bf11e | ||
|
|
8783c013e5 | ||
|
|
aaa0127354 | ||
|
|
70069f53b1 | ||
|
|
bf665e1c14 | ||
|
|
e359a69b18 | ||
|
|
c01caefac9 | ||
|
|
2653201fe1 | ||
|
|
dadab35aee | ||
|
|
c218af8512 | ||
|
|
874ead9751 | ||
|
|
1e5308db31 | ||
|
|
83c8381c19 | ||
|
|
b5f5117555 | ||
|
|
b0a4f5c981 | ||
|
|
1da50abe2f | ||
|
|
6d69ff0bda | ||
|
|
7179d135fa | ||
|
|
3e4523e1ef | ||
|
|
69a65dd63f | ||
|
|
3c25a9b40d | ||
|
|
f43d9b5e88 | ||
|
|
5826bc80bc | ||
|
|
6563b03299 | ||
|
|
406ff55c7d | ||
|
|
8882bbe617 | ||
|
|
769a886b93 | ||
|
|
f2fc002f5c | ||
|
|
836998c9e9 | ||
|
|
f90a5b14ad | ||
|
|
6004bdf012 | ||
|
|
d7610a7c55 | ||
|
|
2ecc1e06cf | ||
|
|
dcc017d681 | ||
|
|
ea401f6556 | ||
|
|
09fbe288d8 | ||
|
|
7786175d32 | ||
|
|
f558516f40 | ||
|
|
d8c46d94df | ||
|
|
f64e335735 | ||
|
|
b263ba7df2 | ||
|
|
d2a68d92a3 | ||
|
|
4cbff8e1a1 | ||
|
|
33ffafc7d6 | ||
|
|
4780c4bb18 | ||
|
|
c61561aab2 | ||
|
|
9466862560 | ||
|
|
931dd4e301 | ||
|
|
e52475cac7 | ||
|
|
afa9fc051e | ||
|
|
3cbf49cba7 | ||
|
|
5deb7e8c2d | ||
|
|
2003222691 | ||
|
|
08913bf96b | ||
|
|
15c047077a | ||
|
|
260bcfd532 | ||
|
|
3deb295d29 | ||
|
|
f56ac542bc | ||
|
|
ebd6d3ef19 | ||
|
|
808ed75858 | ||
|
|
d2c704884e | ||
|
|
91a5b1ce52 | ||
|
|
bec30efec5 | ||
|
|
2cd24cfa9e | ||
|
|
963fb84cb7 | ||
|
|
65729c4acc | ||
|
|
9b8e8ff17a | ||
|
|
8ea9c68be0 | ||
|
|
282774cbdf | ||
|
|
eb504ae866 | ||
|
|
0f1520c392 | ||
|
|
009eaa3a59 | ||
|
|
80232ff8e8 | ||
|
|
9938af1580 | ||
|
|
6935a4a2e4 | ||
|
|
eef5024354 | ||
|
|
bb6b469768 | ||
|
|
d4c2dfedb2 | ||
|
|
3434ad9ca0 | ||
|
|
937219dd91 | ||
|
|
5bdd1ebdbc | ||
|
|
17f3f24652 | ||
|
|
d80b37854a | ||
|
|
cbfa21ce45 | ||
|
|
3665e65986 | ||
|
|
0daca0276b | ||
|
|
8769118471 | ||
|
|
863dbd2f4d | ||
|
|
eb00b1a1b9 | ||
|
|
953ba9b8eb | ||
|
|
aa9d43a3fc | ||
|
|
9d5cbf7e51 | ||
|
|
1744f21084 | ||
|
|
adf05bd11d | ||
|
|
3d205c3597 | ||
|
|
9f9b861769 | ||
|
|
6025d3d843 | ||
|
|
3cf03f3018 | ||
|
|
1c0caf9a81 | ||
|
|
4dc79f655f | ||
|
|
5a06d57d7e | ||
|
|
84ba7f57d0 | ||
|
|
8af30611f3 | ||
|
|
6e27476faa | ||
|
|
acddb3b939 | ||
|
|
b6193be1ef | ||
|
|
e7b1a093a3 | ||
|
|
5be8c7f85f | ||
|
|
b751ed6c6a | ||
|
|
d16ca86782 | ||
|
|
b92d6692ce | ||
|
|
dab2ff402f | ||
|
|
51252b06b9 | ||
|
|
1fd84ad9d5 | ||
|
|
c4d6b2ed0f | ||
|
|
71a239825a | ||
|
|
f72e0a8ddf | ||
|
|
069bc3a905 | ||
|
|
0307d78236 | ||
|
|
e543dc4278 | ||
|
|
029ef1795f | ||
|
|
12433fdba4 | ||
|
|
0a66adf707 | ||
|
|
b5aab26c38 | ||
|
|
416173a03c | ||
|
|
e82361c893 | ||
|
|
7d3bf03404 | ||
|
|
eb50b8319b | ||
|
|
cfd2d60575 | ||
|
|
d4c3fb8261 | ||
|
|
7940b71ffe | ||
|
|
6dd04b1a43 | ||
|
|
399a6854c5 | ||
|
|
8eded7081c | ||
|
|
3bdebba18d | ||
|
|
8aab340a2a | ||
|
|
0138228309 | ||
|
|
e60280c4d6 | ||
|
|
3912028318 | ||
|
|
59cea11ef5 | ||
|
|
d9c12abd2d | ||
|
|
4b981caa53 | ||
|
|
735ff8831d | ||
|
|
0264386c18 | ||
|
|
6529c3edd3 | ||
|
|
8840996a30 | ||
|
|
d35e549dbf | ||
|
|
7195f07b3d | ||
|
|
9398ccd820 | ||
|
|
cfd4aad49c | ||
|
|
a97620f8f0 | ||
|
|
351542257e | ||
|
|
8927aa8e02 | ||
|
|
aef7866e29 | ||
|
|
70694e3bb9 | ||
|
|
717d1ed995 | ||
|
|
3773843094 | ||
|
|
9bee0190d2 | ||
|
|
60c4619862 | ||
|
|
9ffbb3ad02 | ||
|
|
350b81db99 | ||
|
|
412b165dc9 | ||
|
|
531d7c687e | ||
|
|
e866caa900 | ||
|
|
ec466067b2 | ||
|
|
59b6f78c71 | ||
|
|
7757c85d4f | ||
|
|
3077eecb97 | ||
|
|
e6fc0f6724 | ||
|
|
42f817abf5 | ||
|
|
e716db7983 | ||
|
|
51f68e3aab | ||
|
|
3dc2c9126d | ||
|
|
3f409a7183 | ||
|
|
b3133f7cdf | ||
|
|
ac8ed74f28 | ||
|
|
b6af6994cb | ||
|
|
371e8b438a | ||
|
|
91efd7fc78 | ||
|
|
133c857c8b | ||
|
|
3b0c9f8b48 | ||
|
|
37a888deac | ||
|
|
46dc4eaa87 | ||
|
|
20132085b5 | ||
|
|
20417a02a4 | ||
|
|
aa16f3dcb3 | ||
|
|
23dfcdd396 | ||
|
|
bf00abf71e | ||
|
|
8ff06d8b82 | ||
|
|
61af974a83 | ||
|
|
e3cc9f0f92 | ||
|
|
92670cbce0 | ||
|
|
8a921360d7 | ||
|
|
045783e296 | ||
|
|
15ae37a70c | ||
|
|
a31745683b | ||
|
|
1f5d0fb72c | ||
|
|
6702d5e35c | ||
|
|
9afc06c3c7 | ||
|
|
5ef8df2fae | ||
|
|
fe4389509a | ||
|
|
a0856c1785 | ||
|
|
f3441662b5 | ||
|
|
bd40ff337f | ||
|
|
f312583627 | ||
|
|
eb066ec337 | ||
|
|
a20d98aeaf | ||
|
|
9f5701680f | ||
|
|
29578858c6 | ||
|
|
31bd22f753 | ||
|
|
74ad1ca1d1 | ||
|
|
5c6e464f98 | ||
|
|
2451b78cdf | ||
|
|
c8fc558099 | ||
|
|
1f4c35f3d8 | ||
|
|
0b135772d2 | ||
|
|
014672637a | ||
|
|
84588a366c | ||
|
|
ab14380eb8 | ||
|
|
6d66ab06d1 | ||
|
|
a829da5d48 | ||
|
|
72996a2250 | ||
|
|
00fb0c4965 | ||
|
|
e15a04ade9 | ||
|
|
9d1b25d424 | ||
|
|
791fa2ba16 | ||
|
|
e13b31d99b | ||
|
|
4ec391b9fe | ||
|
|
84c0c355d5 | ||
|
|
e07d1aa621 | ||
|
|
34b98e06a3 | ||
|
|
c6dba6a83a | ||
|
|
e6b105837d | ||
|
|
1cf12ff6ae | ||
|
|
56523a5d82 | ||
|
|
0ca86a5cf4 | ||
|
|
4c4bd0cd8f | ||
|
|
8da3438ae7 | ||
|
|
dd25750f7d | ||
|
|
8210b1b86f | ||
|
|
d644d05245 | ||
|
|
33b14fd7d8 | ||
|
|
b8c0602a37 | ||
|
|
e0de86ec88 | ||
|
|
8d418604f0 | ||
|
|
77b19b834b | ||
|
|
2f4b3c0e26 | ||
|
|
b87d24de91 | ||
|
|
7b30e0e406 | ||
|
|
618dee031d | ||
|
|
c233caf50b | ||
|
|
a54b5e39ab | ||
|
|
c25919d87e | ||
|
|
fb0da4578c | ||
|
|
7e328b5c4c | ||
|
|
e6f044c516 | ||
|
|
9ecd5a7a3c | ||
|
|
a96dba2dd0 | ||
|
|
45bbaf2af1 | ||
|
|
0c7ef07cf3 | ||
|
|
069b8ebf4d | ||
|
|
f1d01877dc | ||
|
|
c6acf94914 | ||
|
|
c85cb43c4d | ||
|
|
4c928c6157 | ||
|
|
ce660cb826 | ||
|
|
8377522f61 | ||
|
|
a03ca11ab9 | ||
|
|
780512c279 | ||
|
|
57d814fda2 | ||
|
|
3883ad351d | ||
|
|
7be91155e1 | ||
|
|
a1cc37c6d9 | ||
|
|
2cbae63649 | ||
|
|
f8a78c39a0 | ||
|
|
f4afd495ad | ||
|
|
2a7cebcf30 | ||
|
|
4d0abe0961 | ||
|
|
9b3df57588 | ||
|
|
fc146be08a | ||
|
|
d359d0574b | ||
|
|
69cbb609ba | ||
|
|
d61b28e767 | ||
|
|
6f43c8fe50 | ||
|
|
6e30cd87aa | ||
|
|
5163529b69 | ||
|
|
418f4adc6a | ||
|
|
b7b03c7064 | ||
|
|
2e4bc56b73 | ||
|
|
9f5040d8cc | ||
|
|
cf77a9a744 | ||
|
|
32765ed706 | ||
|
|
ccee039c76 | ||
|
|
29584e3c51 | ||
|
|
4ccab99765 | ||
|
|
b523590f9e | ||
|
|
2f72c0e496 | ||
|
|
45ddd3d7f1 | ||
|
|
deab38a4e1 | ||
|
|
30b6868d95 | ||
|
|
4b8d19c75c | ||
|
|
a1b44201d4 | ||
|
|
52c1d5ace2 | ||
|
|
e5e93f5f43 | ||
|
|
389c4af010 | ||
|
|
bc8d523a4e | ||
|
|
42d6c9ff43 | ||
|
|
4cc5e6d648 | ||
|
|
e4ca350391 | ||
|
|
36dcca2f1f | ||
|
|
ba4cc78a75 | ||
|
|
af44abf7a6 | ||
|
|
db71b1a43d | ||
|
|
a9487e89bf | ||
|
|
addcb1460b | ||
|
|
95f3064aef | ||
|
|
4da156f392 | ||
|
|
5e5916ce08 | ||
|
|
16e055a740 | ||
|
|
c2bc1c5361 | ||
|
|
2cb7394583 | ||
|
|
cbb256adde | ||
|
|
0b51d18b3a | ||
|
|
6454a81593 | ||
|
|
08150b09fc | ||
|
|
738304f438 | ||
|
|
5b94bc3259 | ||
|
|
ad02e8cd05 | ||
|
|
665264723d | ||
|
|
f588876461 | ||
|
|
cce9e3a100 | ||
|
|
b7134385b2 | ||
|
|
63793b74f2 | ||
|
|
0b0e9e02e7 | ||
|
|
5c60f18146 | ||
|
|
c0ce5cb3e3 | ||
|
|
8e84f53c65 | ||
|
|
ea974fde74 | ||
|
|
a5ee60c56a | ||
|
|
2a8f0b8d0b | ||
|
|
4f2f5bb700 | ||
|
|
5fc97bc7f3 | ||
|
|
c11cd92b83 | ||
|
|
9a91aa2584 | ||
|
|
15b13054d1 | ||
|
|
22d4a1d73d | ||
|
|
fb970192cd | ||
|
|
c58daa1dc9 | ||
|
|
28ef796d47 | ||
|
|
8135948e0d | ||
|
|
1c67d19b37 | ||
|
|
bb8883b7ef | ||
|
|
361eee66ca | ||
|
|
228524fa46 | ||
|
|
acbda732a8 | ||
|
|
18a5e44dbc | ||
|
|
6652eced95 | ||
|
|
b4772bc3b6 | ||
|
|
273af5368f | ||
|
|
582f9f9143 | ||
|
|
1620a17eca | ||
|
|
16fa082e47 | ||
|
|
f473eae8c3 | ||
|
|
40f009f017 | ||
|
|
97536e53a8 | ||
|
|
7cbec9ee8f | ||
|
|
18f3849678 | ||
|
|
f2d153779c | ||
|
|
d66f3a8bf4 | ||
|
|
197c05106e | ||
|
|
8e9a44ee3f | ||
|
|
5c2fa708e7 | ||
|
|
6e620589b5 | ||
|
|
97d836eb1d | ||
|
|
13ebfdf831 | ||
|
|
130cac96d3 | ||
|
|
dd8307268e | ||
|
|
99a04e9512 | ||
|
|
0cf38bcd19 | ||
|
|
19e4e0cbeb | ||
|
|
59ad09cfbd | ||
|
|
c1c9fc1ed4 | ||
|
|
df570f6889 | ||
|
|
669a29bded | ||
|
|
ff047a27c9 | ||
|
|
957115e467 | ||
|
|
be541237e8 | ||
|
|
b801518195 | ||
|
|
32615f48b6 | ||
|
|
6a62ec43e9 | ||
|
|
74b00f42df | ||
|
|
7954ff688f | ||
|
|
05777ebf38 | ||
|
|
26a1fb06a0 | ||
|
|
b370852272 | ||
|
|
f143cfe213 | ||
|
|
0d074c36e5 | ||
|
|
c358fc0c04 | ||
|
|
f316b07a2b | ||
|
|
025a87222f | ||
|
|
492305e965 | ||
|
|
ac16fefbf8 | ||
|
|
386abbee66 | ||
|
|
3a8ffa7e0f | ||
|
|
75d6413f49 | ||
|
|
6ef1ef6a3d | ||
|
|
d820d68a8c | ||
|
|
0f164c9204 | ||
|
|
7145fa5528 | ||
|
|
4f4810c327 | ||
|
|
d24cf0ad27 | ||
|
|
36cd3331a7 | ||
|
|
e0d87eee71 | ||
|
|
5d4c770b6c | ||
|
|
fe2e22f5b8 | ||
|
|
78ca9d3ab8 | ||
|
|
5c5f471731 | ||
|
|
4d27585b34 | ||
|
|
0fddcfef32 | ||
|
|
d56800f638 | ||
|
|
c38e34fe1b | ||
|
|
68047d6fa7 | ||
|
|
94c2b05a60 | ||
|
|
3046eee5dc | ||
|
|
6d6cb201ca | ||
|
|
8587bd4435 | ||
|
|
50efa65c12 | ||
|
|
aa06a8a606 | ||
|
|
606014642d | ||
|
|
dd406a8cdb | ||
|
|
9b15377cee | ||
|
|
ed4b8f0c8f | ||
|
|
d946ac2c99 | ||
|
|
383ea3542a | ||
|
|
c367d5bc75 | ||
|
|
4ab0ec96a8 | ||
|
|
9955070085 | ||
|
|
56957d4940 | ||
|
|
4bb01e5fe8 | ||
|
|
01212cb19d | ||
|
|
15ca05d7e1 | ||
|
|
1876257610 | ||
|
|
00fdf6a1c1 | ||
|
|
b99b4d5ef3 | ||
|
|
c1c88a2ca7 | ||
|
|
9214e98c78 | ||
|
|
3b12b93e09 | ||
|
|
96172da83f | ||
|
|
8240542d3e | ||
|
|
088553d308 | ||
|
|
39cab15994 | ||
|
|
045fccc8c0 | ||
|
|
0e5a8f158e | ||
|
|
72c39bcfc8 | ||
|
|
017e08747d | ||
|
|
b1e186a132 | ||
|
|
78fd05ab73 | ||
|
|
435f1b4781 | ||
|
|
287c4cf89f | ||
|
|
3601f1c9ee | ||
|
|
ca42b63bc2 | ||
|
|
06725c5a51 | ||
|
|
bc23db08fb | ||
|
|
6802237479 | ||
|
|
dbb32e99c7 | ||
|
|
9593a6f720 | ||
|
|
2d91be0329 | ||
|
|
796130066d | ||
|
|
554ecb0e33 | ||
|
|
c9b1cade48 | ||
|
|
8570c632b9 | ||
|
|
eb9e4f7133 | ||
|
|
5fe95db75e | ||
|
|
88f3430f1e | ||
|
|
0822af4e68 | ||
|
|
d4bdc50b57 | ||
|
|
c027a12654 | ||
|
|
4b5417ead3 | ||
|
|
2a6277cc19 | ||
|
|
cd0263c2e4 | ||
|
|
f7c0b6f59c | ||
|
|
090172ca66 | ||
|
|
66fc74ac18 | ||
|
|
9a395facfd | ||
|
|
1a1d45c9a0 | ||
|
|
3868f70b18 | ||
|
|
90ea0e601b | ||
|
|
7846a2ecba | ||
|
|
1edfb50000 | ||
|
|
6435c0f5f7 | ||
|
|
35333c5fe2 | ||
|
|
ebdc11b380 | ||
|
|
b15c5a7278 | ||
|
|
48810996b3 | ||
|
|
19ccbf2d47 | ||
|
|
02fc867ef4 | ||
|
|
bb5d211c94 | ||
|
|
afba7f7294 | ||
|
|
d050b3268a | ||
|
|
e7ef99bae9 | ||
|
|
f14ad85402 | ||
|
|
457df4de2c | ||
|
|
b33dbf0717 | ||
|
|
dad9970ca6 | ||
|
|
afe1489a73 | ||
|
|
7cd544d33e | ||
|
|
13cb504c38 | ||
|
|
d528d25f8c | ||
|
|
5c062d6700 | ||
|
|
e2e26141d9 | ||
|
|
0e580ca6a6 | ||
|
|
70299041b0 | ||
|
|
fb25258a62 | ||
|
|
0a21938fe3 | ||
|
|
768abdeea3 | ||
|
|
62228291ab | ||
|
|
f4cbf85e2e | ||
|
|
81e31e7be2 | ||
|
|
7baf983574 | ||
|
|
8122c1b692 | ||
|
|
e752345cc5 | ||
|
|
e57c497f7d | ||
|
|
cbc675e584 | ||
|
|
a698486c2d | ||
|
|
5a4bdd44e4 | ||
|
|
1d8b2370d3 | ||
|
|
b71ff3799c | ||
|
|
dd4f4c4e8c | ||
|
|
cb285f55d1 | ||
|
|
8428740098 | ||
|
|
7b70efe4cd | ||
|
|
af59fd8514 | ||
|
|
d1afb27fe9 | ||
|
|
545cf0b0c5 | ||
|
|
a57a487513 | ||
|
|
4b2332ae39 | ||
|
|
f739bd3927 | ||
|
|
825df517db | ||
|
|
afb46586ab | ||
|
|
9bcd4e3061 | ||
|
|
ee1f385ed2 | ||
|
|
1f2d9ca2ea | ||
|
|
eaea480060 | ||
|
|
9cda84f855 | ||
|
|
6e82e31c77 | ||
|
|
f58751b356 | ||
|
|
34c113ad98 | ||
|
|
679e57cfa9 | ||
|
|
49373e54d1 | ||
|
|
c4262a7734 | ||
|
|
e07a017984 | ||
|
|
6d5802c788 | ||
|
|
02799e5297 | ||
|
|
b89018eb84 | ||
|
|
b6f5283dd1 | ||
|
|
4459c0d04c | ||
|
|
5ccaf5b7e2 | ||
|
|
92566fdbb1 | ||
|
|
9c9baf98a3 | ||
|
|
4b43b6d2b3 | ||
|
|
b801f9d762 | ||
|
|
57eaa0cfa4 | ||
|
|
77eb04a5c3 | ||
|
|
c815004860 | ||
|
|
3cb27fbd50 | ||
|
|
8256706f40 | ||
|
|
ceb51dfdcf | ||
|
|
e79904fbd7 | ||
|
|
ba6dca6e3b | ||
|
|
ccd16a2b8a | ||
|
|
903612dc56 | ||
|
|
7c286fc8b9 | ||
|
|
30f03692ef | ||
|
|
bd79dd1aec | ||
|
|
b4480cb88f | ||
|
|
10ceaa256f | ||
|
|
74b33f483b | ||
|
|
5fef7529c2 | ||
|
|
1209212f45 | ||
|
|
d9001f8765 | ||
|
|
1df26d5ac7 | ||
|
|
b8aeb40ca5 | ||
|
|
2a11069380 | ||
|
|
cad3f9a5ac | ||
|
|
b5c2ce3521 | ||
|
|
6d969817d0 | ||
|
|
2690b50986 | ||
|
|
5957195aaf | ||
|
|
76f526d167 | ||
|
|
4136e96ce3 | ||
|
|
1c2f6b6284 | ||
|
|
37cd3e10ed | ||
|
|
4a9e3ee937 | ||
|
|
1e98c5467d | ||
|
|
9767d11162 | ||
|
|
c60187f78a | ||
|
|
73febed2dc | ||
|
|
832f4286bb | ||
|
|
befe9c2e52 | ||
|
|
c8d329ebf4 | ||
|
|
777510edec | ||
|
|
4a15f5e1f5 | ||
|
|
4e57b17c0b | ||
|
|
f44a5121f4 | ||
|
|
56b7aacb8a | ||
|
|
1123f7e16f | ||
|
|
a4b6048fea | ||
|
|
c67924f0e2 | ||
|
|
94046075c1 | ||
|
|
ec83c0256f | ||
|
|
3b3a486966 | ||
|
|
2e1aaac1ed | ||
|
|
89001e15b8 | ||
|
|
5d0624becc | ||
|
|
fd3f9efdd0 | ||
|
|
44e06eecee | ||
|
|
12d5c58e3c | ||
|
|
37c4279629 | ||
|
|
4cc8de920c | ||
|
|
9ead2663c2 | ||
|
|
af42a20f4f | ||
|
|
e6550b464d | ||
|
|
fdbf030723 | ||
|
|
42e611af67 | ||
|
|
1e513f3f47 | ||
|
|
a2f8f48e48 | ||
|
|
ad7800695e | ||
|
|
87d04cee9e | ||
|
|
6f01836f10 | ||
|
|
975df131da | ||
|
|
2e1d7c7668 | ||
|
|
27d62b87d5 | ||
|
|
d6ee3ad160 | ||
|
|
e6b21796c1 | ||
|
|
a7ef822636 | ||
|
|
b995cbbbee | ||
|
|
ed22de1847 | ||
|
|
cea11a3165 | ||
|
|
38dc9fa23d | ||
|
|
c7c3d09355 | ||
|
|
3cb174feb2 | ||
|
|
5d049b0ede | ||
|
|
012a98949c | ||
|
|
736c5aed2f | ||
|
|
adca49cc9d | ||
|
|
2330e67499 | ||
|
|
9ad1dd10bf | ||
|
|
c5c09d077f | ||
|
|
b28b23c7df | ||
|
|
a57c0b2428 | ||
|
|
be8b9c0b0b | ||
|
|
9655869416 | ||
|
|
526a449e5d | ||
|
|
bf67477cac | ||
|
|
eed4b5c388 | ||
|
|
9afe77a0bb | ||
|
|
98295558a6 | ||
|
|
6bf9c326f9 | ||
|
|
ff64f8166a | ||
|
|
fa7f5070c4 | ||
|
|
9bcaf2b059 | ||
|
|
743fef66d8 | ||
|
|
ece36751f0 | ||
|
|
610ae5d9e4 | ||
|
|
c79c41298b | ||
|
|
ce0e31c1d9 | ||
|
|
95fa78ac80 | ||
|
|
5c56653c1f | ||
|
|
f3854a7164 | ||
|
|
26af941e5d | ||
|
|
3a6f6d4fa5 | ||
|
|
f1824469cd | ||
|
|
ccef3c4697 | ||
|
|
05f8c28ed1 | ||
|
|
03d9e0b24d | ||
|
|
5e877c055b | ||
|
|
c3c824330a | ||
|
|
5579a30392 | ||
|
|
3794336920 | ||
|
|
574d859bed | ||
|
|
e5bc9c08bc | ||
|
|
7e1f6eef66 | ||
|
|
9d8275c7d6 | ||
|
|
4b02265c1b | ||
|
|
c13f16ca6d | ||
|
|
7f9be89b8d | ||
|
|
568c82e25c | ||
|
|
111142ba4f | ||
|
|
b5a5003921 | ||
|
|
577609b392 | ||
|
|
3c767e015e | ||
|
|
c68bf7a7d8 | ||
|
|
1b947ec180 | ||
|
|
0b05355522 | ||
|
|
3effb00c80 | ||
|
|
e86bbe0816 | ||
|
|
8548b6d340 | ||
|
|
587d525d98 | ||
|
|
4d70120b1d | ||
|
|
376709c944 | ||
|
|
6bf8028b1f | ||
|
|
a2ef3741ad | ||
|
|
cdfdb24f92 | ||
|
|
3ca2d5c6b8 | ||
|
|
5f6bda072a | ||
|
|
40afdf4a76 | ||
|
|
fa4a2f9eeb | ||
|
|
c402207f74 | ||
|
|
56e30286ba | ||
|
|
19be61ac97 | ||
|
|
c49970bcf0 | ||
|
|
1889b5b7b4 | ||
|
|
9d09931903 | ||
|
|
d05f9959f3 | ||
|
|
07903368d4 | ||
|
|
684bde9039 | ||
|
|
3aab1a558f | ||
|
|
2ff8a25192 | ||
|
|
285f1da847 | ||
|
|
5b2ba18bfe | ||
|
|
8aba382350 | ||
|
|
540631f696 | ||
|
|
145a82b738 | ||
|
|
5d9b8c5995 | ||
|
|
b20fa52bcd | ||
|
|
89e45fb738 | ||
|
|
0f37c9811e | ||
|
|
e7abe27bfa | ||
|
|
bf7608550d | ||
|
|
a894ca9e65 | ||
|
|
58c184a1f4 | ||
|
|
1c4ead3572 | ||
|
|
e20ac4193b | ||
|
|
d13d85681f | ||
|
|
a382def2d0 | ||
|
|
d28407d735 | ||
|
|
457db9d09e | ||
|
|
13691adc63 | ||
|
|
fa43d9d117 | ||
|
|
5f0c645d5a | ||
|
|
a52b30aa2a | ||
|
|
c19a42625a | ||
|
|
f6fcbaffad | ||
|
|
d1d7dcb9cc | ||
|
|
5b76eb1161 | ||
|
|
f65b3d677a | ||
|
|
688fa467b2 | ||
|
|
bc432f9584 | ||
|
|
4133656bea | ||
|
|
0413b6c841 | ||
|
|
8c874176f4 | ||
|
|
3d0c026835 | ||
|
|
d4966b6bca | ||
|
|
7be0fb0d56 | ||
|
|
f5f7bea3db | ||
|
|
5344f54c3c | ||
|
|
7d7b557142 | ||
|
|
0a797af081 | ||
|
|
70464529a6 | ||
|
|
5fefbbd214 | ||
|
|
3567e205a9 | ||
|
|
e20796e828 | ||
|
|
c81417dc1a | ||
|
|
d966ec28aa | ||
|
|
64a6ec552c | ||
|
|
e680e46671 | ||
|
|
edbc7ca885 | ||
|
|
4131c06e12 | ||
|
|
848ff6a3f9 | ||
|
|
68634a2e87 | ||
|
|
2d441f8ebd | ||
|
|
79c60f68c5 | ||
|
|
5664fe4a2e | ||
|
|
69616bb3c9 | ||
|
|
32bdc15dc0 | ||
|
|
d62ce34fc3 | ||
|
|
2ff26aa41a | ||
|
|
24c50341fb | ||
|
|
fc9601e73f | ||
|
|
10a8ad29d9 | ||
|
|
09e28a7226 | ||
|
|
600c5edaed | ||
|
|
c6baa16aa8 | ||
|
|
ab5dd3e7f2 | ||
|
|
30d6068278 | ||
|
|
1d086e4145 | ||
|
|
5473639b9f | ||
|
|
862211458c | ||
|
|
a5d17a30c7 | ||
|
|
262ab34158 | ||
|
|
1b69d553c5 | ||
|
|
5fbcd12218 | ||
|
|
6783966d0d | ||
|
|
032a56372a | ||
|
|
4eb75ec5b6 | ||
|
|
0372fefae0 | ||
|
|
76098ea883 | ||
|
|
a1aa8639fe | ||
|
|
4d56644db0 | ||
|
|
d3170e606f | ||
|
|
0f899bf029 | ||
|
|
011434d314 | ||
|
|
94033d22f8 | ||
|
|
4415cb65f5 | ||
|
|
598aeeb4df | ||
|
|
788bbdb65a | ||
|
|
eba47d0af3 | ||
|
|
8ff58a2e0e | ||
|
|
323ab1e02d | ||
|
|
6dbe79bfb5 | ||
|
|
850d00ccbe | ||
|
|
33da2db729 | ||
|
|
ed60db0d3b | ||
|
|
1c7f64ecde | ||
|
|
2afc202678 | ||
|
|
f7f78c0853 | ||
|
|
f039bb632d | ||
|
|
c104fdb3fc | ||
|
|
e716cba0ab | ||
|
|
e57a770adb | ||
|
|
9c05b6eb45 | ||
|
|
c82d69f115 | ||
|
|
cc9c1855b0 | ||
|
|
fdf4a42eba | ||
|
|
a552e3a0a0 | ||
|
|
3489e90f28 | ||
|
|
c643ac96aa | ||
|
|
b648b993b1 | ||
|
|
fe477aecb5 | ||
|
|
969cc9bca0 | ||
|
|
da941a3da2 | ||
|
|
cee92f5f8c | ||
|
|
f2e8e8b163 | ||
|
|
478f1e4f82 | ||
|
|
c750ab9d09 | ||
|
|
e40ed43d7b | ||
|
|
fd17c2e404 | ||
|
|
9f365eb68d | ||
|
|
c395b47f7b | ||
|
|
bc50a251e3 | ||
|
|
c02caa0e5b | ||
|
|
6e52b528e2 | ||
|
|
9a282d25e7 | ||
|
|
478490286d | ||
|
|
230329ee64 | ||
|
|
bc15f9a025 | ||
|
|
efe4ecec89 | ||
|
|
5cc2e13c37 | ||
|
|
1e10f5bf9c | ||
|
|
335e5ff5ea | ||
|
|
6eadce1922 | ||
|
|
238808acfc | ||
|
|
aea38650d3 | ||
|
|
cda3a680ff | ||
|
|
7a5670b977 | ||
|
|
6762fc22f4 | ||
|
|
b72854daa2 | ||
|
|
130d8d2878 | ||
|
|
fa13e6178e | ||
|
|
70bd0deb28 | ||
|
|
d248955d33 | ||
|
|
af7958471d | ||
|
|
3783515dff | ||
|
|
dd31dae017 | ||
|
|
1227c4b0a8 | ||
|
|
44ebe6e823 | ||
|
|
ecf7596492 | ||
|
|
a37374d1a4 | ||
|
|
8d8a7fb6ba | ||
|
|
8fa29fe3ce | ||
|
|
82c75a5334 | ||
|
|
1df97bf577 | ||
|
|
70d7e042cb | ||
|
|
2d6a81f761 | ||
|
|
66d8673aed | ||
|
|
e0430e6d11 | ||
|
|
bda33e18ee | ||
|
|
82580dc9da | ||
|
|
852dc49546 | ||
|
|
8634f9b2ba | ||
|
|
9046d8517b | ||
|
|
21d2c7a9f7 | ||
|
|
88ca656a7c | ||
|
|
2487369abb | ||
|
|
3c6a99a6c5 | ||
|
|
d34c69d9d3 | ||
|
|
08b2d778ea | ||
|
|
8d23ac8b28 | ||
|
|
ff6a0e22d1 | ||
|
|
f458c426f5 | ||
|
|
8b8b3aa47f | ||
|
|
52adcd74e1 |
@@ -1,9 +1,44 @@
|
||||
[bumpversion]
|
||||
current_version = 0.8.2
|
||||
commit = True
|
||||
tag = True
|
||||
current_version = 0.19.0
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prerelease>[a-z]+)(?P<num>\d+))?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prerelease}{num}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
|
||||
[bumpversion:part:prerelease]
|
||||
first_value = a
|
||||
values =
|
||||
a
|
||||
b
|
||||
rc
|
||||
|
||||
[bumpversion:part:num]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
|
||||
[bumpversion:file:dbt/version.py]
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/dbt/adapters/redshift/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/dbt/adapters/snowflake/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/dbt/adapters/bigquery/__version__.py]
|
||||
|
||||
|
||||
218
.circleci/config.yml
Normal file
218
.circleci/config.yml
Normal file
@@ -0,0 +1,218 @@
|
||||
version: 2.1
|
||||
jobs:
|
||||
unit:
|
||||
docker: &test_only
|
||||
- image: fishtownanalytics/test-container:9
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
steps:
|
||||
- checkout
|
||||
- run: tox -e flake8,mypy,unit-py36,unit-py38
|
||||
build-wheels:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Build wheels
|
||||
command: |
|
||||
python3.8 -m venv "${PYTHON_ENV}"
|
||||
export PYTHON_BIN="${PYTHON_ENV}/bin/python"
|
||||
$PYTHON_BIN -m pip install -U pip setuptools
|
||||
$PYTHON_BIN -m pip install -r requirements.txt
|
||||
$PYTHON_BIN -m pip install -r dev_requirements.txt
|
||||
/bin/bash ./scripts/build-wheels.sh
|
||||
$PYTHON_BIN ./scripts/collect-dbt-contexts.py > ./dist/context_metadata.json
|
||||
$PYTHON_BIN ./scripts/collect-artifact-schema.py > ./dist/artifact_schemas.json
|
||||
environment:
|
||||
PYTHON_ENV: /home/tox/build_venv/
|
||||
- store_artifacts:
|
||||
path: ./dist
|
||||
destination: dist
|
||||
integration-postgres-py36:
|
||||
docker: &test_and_postgres
|
||||
- image: fishtownanalytics/test-container:9
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
- image: postgres
|
||||
name: database
|
||||
environment: &pgenv
|
||||
POSTGRES_USER: "root"
|
||||
POSTGRES_PASSWORD: "password"
|
||||
POSTGRES_DB: "dbt"
|
||||
steps:
|
||||
- checkout
|
||||
- run: &setupdb
|
||||
name: Setup postgres
|
||||
command: bash test/setup_db.sh
|
||||
environment:
|
||||
PGHOST: database
|
||||
PGUSER: root
|
||||
PGPASSWORD: password
|
||||
PGDATABASE: postgres
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py36
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py36:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py36
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py36:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py36
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py36:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py36
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-postgres-py38:
|
||||
docker: *test_and_postgres
|
||||
steps:
|
||||
- checkout
|
||||
- run: *setupdb
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py38
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py38:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py38
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py38:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py38
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py38:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py38
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
integration-postgres-py39:
|
||||
docker: *test_and_postgres
|
||||
steps:
|
||||
- checkout
|
||||
- run: *setupdb
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py39
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test-everything:
|
||||
jobs:
|
||||
- unit
|
||||
- integration-postgres-py36:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py36:
|
||||
requires:
|
||||
- integration-postgres-py36
|
||||
- integration-bigquery-py36:
|
||||
requires:
|
||||
- integration-postgres-py36
|
||||
- integration-snowflake-py36:
|
||||
requires:
|
||||
- integration-postgres-py36
|
||||
- integration-postgres-py38:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-bigquery-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-snowflake-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-postgres-py39:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py39:
|
||||
requires:
|
||||
- integration-postgres-py39
|
||||
- integration-bigquery-py39:
|
||||
requires:
|
||||
- integration-postgres-py39
|
||||
# - integration-snowflake-py39:
|
||||
# requires:
|
||||
# - integration-postgres-py39
|
||||
- build-wheels:
|
||||
requires:
|
||||
- unit
|
||||
- integration-postgres-py36
|
||||
- integration-redshift-py36
|
||||
- integration-bigquery-py36
|
||||
- integration-snowflake-py36
|
||||
- integration-postgres-py38
|
||||
- integration-redshift-py38
|
||||
- integration-bigquery-py38
|
||||
- integration-snowflake-py38
|
||||
- integration-postgres-py39
|
||||
- integration-redshift-py39
|
||||
- integration-bigquery-py39
|
||||
# - integration-snowflake-py39
|
||||
@@ -1,3 +0,0 @@
|
||||
[report]
|
||||
include =
|
||||
dbt/*
|
||||
3
.dockerignore
Normal file
3
.dockerignore
Normal file
@@ -0,0 +1,3 @@
|
||||
*
|
||||
!docker/requirements/*.txt
|
||||
!dist
|
||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a bug or an issue you've found with dbt
|
||||
title: ''
|
||||
labels: bug, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the bug
|
||||
A clear and concise description of what the bug is. What command did you run? What happened?
|
||||
|
||||
### Steps To Reproduce
|
||||
In as much detail as possible, please provide steps to reproduce the issue. Sample data that triggers the issue, example model code, etc is all very helpful here.
|
||||
|
||||
### Expected behavior
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
### Screenshots and log output
|
||||
If applicable, add screenshots or log output to help explain your problem.
|
||||
|
||||
### System information
|
||||
**Which database are you using dbt with?**
|
||||
- [ ] postgres
|
||||
- [ ] redshift
|
||||
- [ ] bigquery
|
||||
- [ ] snowflake
|
||||
- [ ] other (specify: ____________)
|
||||
|
||||
|
||||
**The output of `dbt --version`:**
|
||||
```
|
||||
<output goes here>
|
||||
```
|
||||
|
||||
**The operating system you're using:**
|
||||
|
||||
**The output of `python --version`:**
|
||||
|
||||
### Additional context
|
||||
Add any other context about the problem here.
|
||||
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for dbt
|
||||
title: ''
|
||||
labels: enhancement, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the feature
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
### Describe alternatives you've considered
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
### Additional context
|
||||
Is this feature database-specific? Which database(s) is/are relevant? Please include any other relevant context here.
|
||||
|
||||
### Who will this benefit?
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
|
||||
### Are you interested in contributing this feature?
|
||||
Let us know if you want to write some code, and how we can help.
|
||||
22
.github/pull_request_template.md
vendored
Normal file
22
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Example:
|
||||
resolves #1234
|
||||
-->
|
||||
|
||||
|
||||
### Description
|
||||
|
||||
<!--- Describe the Pull Request here -->
|
||||
|
||||
|
||||
### Checklist
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section.
|
||||
30
.gitignore
vendored
30
.gitignore
vendored
@@ -8,7 +8,8 @@ __pycache__/
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
env*/
|
||||
dbt_env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
@@ -23,6 +24,7 @@ var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
*.mypy_cache/
|
||||
logs/
|
||||
|
||||
# PyInstaller
|
||||
@@ -41,12 +43,16 @@ htmlcov/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
.env
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
test.env
|
||||
|
||||
# Mypy
|
||||
.mypy_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
@@ -65,3 +71,25 @@ target/
|
||||
|
||||
#Emacs
|
||||
*~
|
||||
|
||||
# Sublime Text
|
||||
*.sublime-*
|
||||
|
||||
# Vim
|
||||
*.sw*
|
||||
|
||||
.python-version
|
||||
|
||||
# Vim
|
||||
*.sw*
|
||||
|
||||
# pycharm
|
||||
.idea/
|
||||
|
||||
# AWS credentials
|
||||
.aws/
|
||||
|
||||
.DS_Store
|
||||
|
||||
# vscode
|
||||
.vscode/
|
||||
|
||||
1637
CHANGELOG.md
1637
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
222
CONTRIBUTING.md
Normal file
222
CONTRIBUTING.md
Normal file
@@ -0,0 +1,222 @@
|
||||
# Contributing to dbt
|
||||
|
||||
1. [About this document](#about-this-document)
|
||||
2. [Proposing a change](#proposing-a-change)
|
||||
3. [Getting the code](#getting-the-code)
|
||||
4. [Setting up an environment](#setting-up-an-environment)
|
||||
5. [Running dbt in development](#running-dbt-in-development)
|
||||
6. [Testing](#testing)
|
||||
7. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
|
||||
## About this document
|
||||
|
||||
This document is a guide intended for folks interested in contributing to dbt. Below, we document the process by which members of the community should create issues and submit pull requests (PRs) in this repository. It is not intended as a guide for using dbt, and it assumes a certain level of familiarity with Python concepts such as virtualenvs, `pip`, python modules, filesystems, and so on. This guide assumes you are using macOS or Linux and are comfortable with the command line.
|
||||
|
||||
If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the #development channel on [slack](community.getdbt.com).
|
||||
|
||||
### Signing the CLA
|
||||
|
||||
Please note that all contributors to dbt must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the dbt codebase. If you are unable to sign the CLA, then the dbt maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones.
|
||||
|
||||
## Proposing a change
|
||||
|
||||
dbt is Apache 2.0-licensed open source software. dbt is what it is today because community members like you have opened issues, provided feedback, and contributed to the knowledge loop for the entire communtiy. Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
|
||||
|
||||
### Defining the problem
|
||||
|
||||
If you have an idea for a new feature or if you've discovered a bug in dbt, the first step is to open an issue. Please check the list of [open issues](https://github.com/fishtown-analytics/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The dbt maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
|
||||
**Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed.
|
||||
|
||||
### Discussing the idea
|
||||
|
||||
After you open an issue, a dbt maintainer will follow up by commenting on your issue (usually within 1-3 days) to explore your idea further and advise on how to implement the suggested changes. In many cases, community members will chime in with their own thoughts on the problem statement. If you as the issue creator are interested in submitting a Pull Request to address the issue, you should indicate this in the body of the issue. The dbt maintainers are _always_ happy to help contributors with the implementation of fixes and features, so please also indicate if there's anything you're unsure about or could use guidance around in the issue.
|
||||
|
||||
### Submitting a change
|
||||
|
||||
If an issue is appropriately well scoped and describes a beneficial change to the dbt codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this.
|
||||
|
||||
The dbt maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/fishtown-analytics/dbt/contribute) page.
|
||||
|
||||
Here's a good workflow:
|
||||
- Comment on the open issue, expressing your interest in contributing the required code change
|
||||
- Outline your planned implementation. If you want help getting started, ask!
|
||||
- Follow the steps outlined below to develop locally. Once you have opened a PR, one of the dbt maintainers will work with you to review your code.
|
||||
- Add a test! Tests are crucial for both fixes and new features alike. We want to make sure that code works as intended, and that it avoids any bugs previously encountered. Currently, the best resource for understanding dbt's [unit](test/unit) and [integration](test/integration) tests is the tests themselves. One of the maintainers can help by pointing out relevant examples.
|
||||
|
||||
In some cases, the right resolution to an open issue might be tangential to the dbt codebase. The right path forward might be a documentation update or a change that can be made in user-space. In other cases, the issue might describe functionality that the dbt maintainers are unwilling or unable to incorporate into the dbt codebase. When it is determined that an open issue describes functionality that will not translate to a code change in the dbt repository, the issue will be tagged with the `wontfix` label (see below) and closed.
|
||||
|
||||
### Using issue labels
|
||||
|
||||
The dbt maintainers use labels to categorize open issues. Some labels indicate the databases impacted by the issue, while others describe the domain in the dbt codebase germane to the discussion. While most of these labels are self-explanatory (eg. `snowflake` or `bigquery`), there are others that are worth describing.
|
||||
|
||||
| tag | description |
|
||||
| --- | ----------- |
|
||||
| [triage](https://github.com/fishtown-analytics/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a dbt maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/fishtown-analytics/dbt/labels/bug) | This issue represents a defect or regression in dbt |
|
||||
| [enhancement](https://github.com/fishtown-analytics/dbt/labels/enhancement) | This issue represents net-new functionality in dbt |
|
||||
| [good first issue](https://github.com/fishtown-analytics/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the dbt codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/fishtown-analytics/dbt/labels/help%20wanted) / [discussion](https://github.com/fishtown-analytics/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/fishtown-analytics/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The dbt maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/fishtown-analytics/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The dbt maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/fishtown-analytics/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by dbt maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/fishtown-analytics/dbt/labels/wontfix) | This issue does not require a code change in the dbt repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
|
||||
|
||||
## Getting the code
|
||||
|
||||
### Installing git
|
||||
|
||||
You will need `git` in order to download and modify the dbt source code. On macOS, the best way to download git is to just install [Xcode](https://developer.apple.com/support/xcode/).
|
||||
|
||||
### External contributors
|
||||
|
||||
If you are not a member of the `fishtown-analytics` GitHub organization, you can contribute to dbt by forking the dbt repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
|
||||
1. fork the dbt repository
|
||||
2. clone your fork locally
|
||||
3. check out a new branch for your proposed changes
|
||||
4. push changes to your fork
|
||||
5. open a pull request against `fishtown-analytics/dbt` from your forked repository
|
||||
|
||||
### Core contributors
|
||||
|
||||
If you are a member of the `fishtown-analytics` GitHub organization, you will have push access to the dbt repo. Rather than
|
||||
forking dbt to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
|
||||
|
||||
## Setting up an environment
|
||||
|
||||
There are some tools that will be helpful to you in developing locally. While this is the list relevant for dbt development, many of these tools are used commonly across open-source python projects.
|
||||
|
||||
### Tools
|
||||
|
||||
A short list of tools used in dbt testing that will be helpful to your understanding:
|
||||
|
||||
- [virtualenv](https://virtualenv.pypa.io/en/stable/) to manage dependencies
|
||||
- [tox](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions
|
||||
- [pytest](https://docs.pytest.org/en/latest/) to discover/run tests
|
||||
- [make](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple
|
||||
- [flake8](https://gitlab.com/pycqa/flake8) for code linting
|
||||
- [CircleCI](https://circleci.com/product/) and [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/)
|
||||
|
||||
A deep understanding of these tools in not required to effectively contribute to dbt, but we recommend checking out the attached documentation if you're interested in learning more about them.
|
||||
|
||||
#### virtual environments
|
||||
|
||||
We strongly recommend using virtual environments when developing code in dbt. We recommend creating this virtualenv
|
||||
in the root of the dbt repository. To create a new virtualenv, run:
|
||||
```
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
```
|
||||
|
||||
This will create and activate a new Python virtual environment.
|
||||
|
||||
#### docker and docker-compose
|
||||
|
||||
Docker and docker-compose are both used in testing. For macOS, the easiest thing to do is to [download docker for mac](https://store.docker.com/editions/community/docker-ce-desktop-mac). You'll need to make an account. On Linux, you can use one of the packages [here](https://docs.docker.com/install/#server). We recommend installing from docker.com instead of from your package manager. On Linux you also have to install docker-compose separately, following [these instructions](https://docs.docker.com/compose/install/#install-compose).
|
||||
|
||||
|
||||
#### postgres (optional)
|
||||
|
||||
For testing, and later in the examples in this document, you may want to have `psql` available so you can poke around in the database and see what happened. We recommend that you use [homebrew](https://brew.sh/) for that on macOS, and your package manager on Linux. You can install any version of the postgres client that you'd like. On macOS, with homebrew setup, you can run:
|
||||
|
||||
```
|
||||
brew install postgresql
|
||||
```
|
||||
|
||||
## Running dbt in development
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in section _Setting up an environment_. Next, install dbt (and its dependencies) with:
|
||||
|
||||
```
|
||||
pip install -r editable_requirements.txt
|
||||
```
|
||||
|
||||
When dbt is installed from source in this way, any changes you make to the dbt source code will be reflected immediately in your next `dbt` run.
|
||||
|
||||
### Running dbt
|
||||
|
||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
||||
|
||||
## Testing
|
||||
|
||||
Getting the dbt integration tests set up in your local environment will be very helpful as you start to make changes to your local version of dbt. The section that follows outlines some helpful tips for setting up the test environment.
|
||||
|
||||
### Running tests via Docker
|
||||
|
||||
dbt's unit and integration tests run in Docker. Because dbt works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, Fishtown Analytics provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup.
|
||||
|
||||
|
||||
### Specifying your test credentials
|
||||
|
||||
dbt uses test credentials specified in a `test.env` file in the root of the repository. This `test.env` file is git-ignored, but please be _extra_ careful to never check in credentials or other sensitive information when developing against dbt. To create your `test.env` file, copy the provided sample file, then supply your relevant credentials:
|
||||
|
||||
```
|
||||
cp test.env.sample test.env
|
||||
atom test.env # supply your credentials
|
||||
```
|
||||
|
||||
We recommend starting with dbt's Postgres tests. These tests cover most of the functionality in dbt, are the fastest to run, and are the easiest to set up. dbt's test suite runs Postgres in a Docker container, so no setup should be required to run these tests.
|
||||
|
||||
If you additionally want to test Snowflake, Bigquery, or Redshift, locally you'll need to get credentials and add them to the `test.env` file. In general, it's most important to have successful unit and Postgres tests. Once you open a PR, dbt will automatically run integration tests for the other three core database adapters. Of course, if you are a BigQuery user, contributing a BigQuery-only feature, it's important to run BigQuery tests as well.
|
||||
|
||||
### Test commands
|
||||
|
||||
dbt's unit tests and Python linter can be run with:
|
||||
|
||||
```
|
||||
make test-unit
|
||||
```
|
||||
|
||||
To run the Postgres + Python 3.6 integration tests, you'll have to do one extra step of setting up the test database:
|
||||
|
||||
```
|
||||
docker-compose up -d database
|
||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
||||
```
|
||||
|
||||
To run a quick test for Python3 integration tests on Postgres, you can run:
|
||||
|
||||
```
|
||||
make test-quick
|
||||
```
|
||||
|
||||
To run tests for a specific database, invoke `tox` directly with the required flags:
|
||||
```
|
||||
# Run Postgres py36 tests
|
||||
docker-compose run test tox -e integration-postgres-py36 -- -x
|
||||
|
||||
# Run Snowflake py36 tests
|
||||
docker-compose run test tox -e integration-snowflake-py36 -- -x
|
||||
|
||||
# Run BigQuery py36 tests
|
||||
docker-compose run test tox -e integration-bigquery-py36 -- -x
|
||||
|
||||
# Run Redshift py36 tests
|
||||
docker-compose run test tox -e integration-redshift-py36 -- -x
|
||||
```
|
||||
|
||||
To run a specific test by itself:
|
||||
```
|
||||
docker-compose run test tox -e explicit-py36 -- -s -x -m profile_{adapter} {path_to_test_file_or_folder}
|
||||
```
|
||||
E.g.
|
||||
```
|
||||
docker-compose run test tox -e explicit-py36 -- -s -x -m profile_snowflake test/integration/001_simple_copy_test
|
||||
```
|
||||
|
||||
See the `Makefile` contents for more some other examples of ways to run `tox`.
|
||||
|
||||
## Submitting a Pull Request
|
||||
|
||||
Fishtown Analytics provides a sandboxed Redshift, Snowflake, and BigQuery database for use in a CI environment. When pull requests are submitted to the `fishtown-analytics/dbt` repo, GitHub will trigger automated tests in CircleCI and Azure Pipelines.
|
||||
|
||||
A dbt maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
|
||||
Once all tests are passing and your PR has been approved, a dbt maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
14
Dockerfile
14
Dockerfile
@@ -1,14 +0,0 @@
|
||||
FROM python:3.6
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y python-pip netcat
|
||||
RUN apt-get install -y python-dev python3-dev
|
||||
|
||||
RUN pip install pip --upgrade
|
||||
RUN pip install virtualenv
|
||||
RUN pip install virtualenvwrapper
|
||||
RUN pip install tox
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
RUN cd /usr/src/app
|
||||
74
Dockerfile.test
Normal file
74
Dockerfile.test
Normal file
@@ -0,0 +1,74 @@
|
||||
FROM ubuntu:18.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get dist-upgrade -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
netcat \
|
||||
postgresql \
|
||||
curl \
|
||||
git \
|
||||
ssh \
|
||||
software-properties-common \
|
||||
make \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
libpq-dev \
|
||||
libsasl2-dev \
|
||||
libsasl2-2 \
|
||||
libsasl2-modules-gssapi-mit \
|
||||
libyaml-dev \
|
||||
unixodbc-dev \
|
||||
&& add-apt-repository ppa:deadsnakes/ppa \
|
||||
&& apt-get install -y \
|
||||
python \
|
||||
python-dev \
|
||||
python-pip \
|
||||
python3.6 \
|
||||
python3.6-dev \
|
||||
python3-pip \
|
||||
python3.6-venv \
|
||||
python3.7 \
|
||||
python3.7-dev \
|
||||
python3.7-venv \
|
||||
python3.8 \
|
||||
python3.8-dev \
|
||||
python3.8-venv \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
ARG DOCKERIZE_VERSION=v0.6.1
|
||||
RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||
|
||||
RUN pip3 install -U "tox==3.14.4" wheel "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
# tox fails if the 'python' interpreter (python2) doesn't have `tox` installed
|
||||
RUN pip install -U "tox==3.14.4" "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
|
||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||
# On Linux, run `make .env` to create the .env file for the current user.
|
||||
# On MacOS and Windows, these can stay unset.
|
||||
ARG USER_ID
|
||||
ARG GROUP_ID
|
||||
|
||||
RUN if [ ${USER_ID:-0} -ne 0 ] && [ ${GROUP_ID:-0} -ne 0 ]; then \
|
||||
groupadd -g ${GROUP_ID} dbt_test_user && \
|
||||
useradd -m -l -u ${USER_ID} -g ${GROUP_ID} dbt_test_user; \
|
||||
else \
|
||||
useradd -mU -l dbt_test_user; \
|
||||
fi
|
||||
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
||||
RUN mkdir /home/tox && chown dbt_test_user /home/tox
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_test_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
@@ -1 +0,0 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml
|
||||
49
Makefile
49
Makefile
@@ -3,22 +3,47 @@
|
||||
changed_tests := `git status --porcelain | grep '^\(M\| M\|A\| A\)' | awk '{ print $$2 }' | grep '\/test_[a-zA-Z_\-\.]\+.py'`
|
||||
|
||||
install:
|
||||
pip install --upgrade .
|
||||
pip install -e .
|
||||
|
||||
test:
|
||||
test: .env
|
||||
@echo "Full test run starting..."
|
||||
@time docker-compose run test tox
|
||||
@time docker-compose run --rm test tox
|
||||
|
||||
test-unit:
|
||||
test-unit: .env
|
||||
@echo "Unit test run starting..."
|
||||
@time docker-compose run test tox -e unit-py27,unit-py35,pep8
|
||||
@time docker-compose run --rm test tox -e unit-py36,flake8
|
||||
|
||||
test-integration:
|
||||
test-integration: .env
|
||||
@echo "Integration test run starting..."
|
||||
@time docker-compose run test tox -e integration-postgres-py27,integration-postgres-py35,integration-snowflake-py27,integration-snowflake-py35
|
||||
@time docker-compose run --rm test tox -e integration-postgres-py36,integration-redshift-py36,integration-snowflake-py36,integration-bigquery-py36
|
||||
|
||||
test-new:
|
||||
@echo "Test run starting..."
|
||||
@echo "Changed test files:"
|
||||
@echo "${changed_tests}"
|
||||
@docker-compose run test /usr/src/app/test/runner.sh ${changed_tests}
|
||||
test-quick: .env
|
||||
@echo "Integration test run starting..."
|
||||
@time docker-compose run --rm test tox -e integration-postgres-py36 -- -x
|
||||
|
||||
# This rule creates a file named .env that is used by docker-compose for passing
|
||||
# the USER_ID and GROUP_ID arguments to the Docker image.
|
||||
.env:
|
||||
@touch .env
|
||||
ifneq ($(OS),Windows_NT)
|
||||
ifneq ($(shell uname -s), Darwin)
|
||||
@echo USER_ID=$(shell id -u) > .env
|
||||
@echo GROUP_ID=$(shell id -g) >> .env
|
||||
endif
|
||||
endif
|
||||
@time docker-compose build
|
||||
|
||||
clean:
|
||||
rm -f .coverage
|
||||
rm -rf .eggs/
|
||||
rm -f .env
|
||||
rm -rf .tox/
|
||||
rm -rf build/
|
||||
rm -rf dbt.egg-info/
|
||||
rm -f dbt_project.yml
|
||||
rm -rf dist/
|
||||
rm -f htmlcov/*.{css,html,js,json,png}
|
||||
rm -rf logs/
|
||||
rm -rf target/
|
||||
find . -type f -name '*.pyc' -delete
|
||||
find . -type d -name '__pycache__' -depth -delete
|
||||
|
||||
71
README.md
71
README.md
@@ -1,35 +1,58 @@
|
||||
# dbt
|
||||
<p align="center">
|
||||
<img src="/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://codeclimate.com/github/fishtown-analytics/dbt">
|
||||
<img src="https://codeclimate.com/github/fishtown-analytics/dbt/badges/gpa.svg" alt="Code Climate"/>
|
||||
</a>
|
||||
<a href="https://circleci.com/gh/fishtown-analytics/dbt/tree/master">
|
||||
<img src="https://circleci.com/gh/fishtown-analytics/dbt/tree/master.svg?style=svg" alt="CircleCI" />
|
||||
</a>
|
||||
<a href="https://ci.appveyor.com/project/DrewBanin/dbt/branch/development">
|
||||
<img src="https://ci.appveyor.com/api/projects/status/v01rwd3q91jnwp9m/branch/development?svg=true" alt="AppVeyor" />
|
||||
</a>
|
||||
<a href="https://community.getdbt.com">
|
||||
<img src="https://community.getdbt.com/badge.svg" alt="Slack" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
dbt (data build tool) helps analysts write reliable, modular code using a workflow that closely mirrors software development.
|
||||
**[dbt](https://www.getdbt.com/)** (data build tool) enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
|
||||
---
|
||||
dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis.
|
||||
|
||||
- [What is dbt]?
|
||||
- Read the [dbt viewpoint]
|
||||
- [Installation]
|
||||
- Join the [chat][slack-url] on Slack for live questions and support.
|
||||

|
||||
|
||||
---
|
||||
dbt can be used to [aggregate pageviews into sessions](https://github.com/fishtown-analytics/snowplow), calculate [ad spend ROI](https://github.com/fishtown-analytics/facebook-ads), or report on [email campaign performance](https://github.com/fishtown-analytics/mailchimp).
|
||||
|
||||
[](https://codeclimate.com/github/fishtown-analytics/dbt)
|
||||
## Understanding dbt
|
||||
|
||||
### Testing
|
||||
Analysts using dbt can transform their data by simply writing select statements, while dbt handles turning these statements into tables and views in a data warehouse.
|
||||
|
||||
| service | development | master |
|
||||
| --- | --- | --- |
|
||||
| CircleCI| [](https://circleci.com/gh/fishtown-analytics/dbt/tree/development) | [](https://circleci.com/gh/fishtown-analytics/dbt/tree/master) |
|
||||
| AppVeyor | [](https://ci.appveyor.com/project/DrewBanin/dbt/branch/development) | [](https://ci.appveyor.com/project/DrewBanin/dbt/branch/master) |
|
||||
These select statements, or "models", form a dbt project. Models frequently build on top of one another – dbt makes it easy to [manage relationships](https://docs.getdbt.com/docs/ref) between models, and [visualize these relationships](https://docs.getdbt.com/docs/documentation), as well as assure the quality of your transformations through [testing](https://docs.getdbt.com/docs/testing).
|
||||
|
||||
[Coverage](https://circleci.com/api/v1/project/fishtown-analytics/dbt/latest/artifacts/0/$CIRCLE_ARTIFACTS/htmlcov/index.html?branch=development)
|
||||

|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [documentation](https://docs.getdbt.com/).
|
||||
- Productionize your dbt project with [dbt Cloud](https://www.getdbt.com)
|
||||
|
||||
## Find out more
|
||||
|
||||
- Check out the [Introduction to dbt](https://docs.getdbt.com/docs/introduction/).
|
||||
- Read the [dbt Viewpoint](https://docs.getdbt.com/docs/about/viewpoint/).
|
||||
|
||||
## Join thousands of analysts in the dbt community
|
||||
|
||||
- Join the [chat](http://community.getdbt.com/) on Slack.
|
||||
- Find community posts on [dbt Discourse](https://discourse.getdbt.com).
|
||||
|
||||
## Reporting bugs and contributing code
|
||||
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/fishtown-analytics/dbt/issues/new).
|
||||
- Want to help us build dbt? Check out the [Contributing Getting Started Guide](/CONTRIBUTING.md)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [PyPA Code of Conduct].
|
||||
|
||||
|
||||
|
||||
[PyPA Code of Conduct]: https://www.pypa.io/en/latest/code-of-conduct/
|
||||
[slack-url]: http://ac-slackin.herokuapp.com/
|
||||
[Installation]: https://dbt.readme.io/docs/installation
|
||||
[What is dbt]: https://dbt.readme.io/docs/overview
|
||||
[dbt viewpoint]: https://dbt.readme.io/docs/viewpoint
|
||||
Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct).
|
||||
|
||||
88
RELEASE.md
88
RELEASE.md
@@ -1,63 +1,77 @@
|
||||
### Release Procedure :shipit:
|
||||
|
||||
1. Update changelog
|
||||
1. Bumpversion
|
||||
1. Merge to master
|
||||
- (on master) git pull origin development
|
||||
1. Deploy to pypi
|
||||
- python setup.py sdist upload -r pypi
|
||||
1. Deploy to homebrew
|
||||
- Make a pull request against homebrew-core
|
||||
1. Deploy to conda-forge
|
||||
- Make a pull request against dbt-feedstock
|
||||
1. Git release notes (points to changelog)
|
||||
1. Post to slack (point to changelog)
|
||||
#### Branching Strategy
|
||||
|
||||
dbt has three types of branches:
|
||||
|
||||
- **Trunks** track the latest release of a minor version of dbt. Historically, we used the `master` branch as the trunk. Each minor version release has a corresponding trunk. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of dbt.
|
||||
- **Release Branches** track a specific, not yet complete release of dbt. These releases are codenamed since we don't always know what their semantic version will be. Example: `dev/lucretia-mott` became `0.11.1`.
|
||||
- **Feature Branches** track individual features and fixes. On completion they should be merged into a release branch.
|
||||
|
||||
#### Git & PyPI
|
||||
|
||||
1. Update CHANGELOG.md with the most recent changes
|
||||
2. If this is a release candidate, you want to create it off of your release branch. If it's an actual release, you must first merge to a master branch. Open a Pull Request in Github to merge it into the appropriate trunk (`X.X.latest`)
|
||||
3. Bump the version using `bumpversion`:
|
||||
- Dry run first by running `bumpversion --new-version <desired-version> <part>` and checking the diff. If it looks correct, clean up the chanages and move on:
|
||||
- Alpha releases: `bumpversion --commit --no-tag --new-version 0.10.2a1 num`
|
||||
- Patch releases: `bumpversion --commit --no-tag --new-version 0.10.2 patch`
|
||||
- Minor releases: `bumpversion --commit --no-tag --new-version 0.11.0 minor`
|
||||
- Major releases: `bumpversion --commit --no-tag --new-version 1.0.0 major`
|
||||
4. (If this is a not a release candidate) Merge to `x.x.latest` and (optionally) `master`.
|
||||
5. Update the default branch to the next dev release branch.
|
||||
6. Build source distributions for all packages by running `./scripts/build-sdists.sh`. Note that this will clean out your `dist/` folder, so if you have important stuff in there, don't run it!!!
|
||||
7. Deploy to pypi
|
||||
- `twine upload dist/*`
|
||||
8. Deploy to homebrew (see below)
|
||||
9. Deploy to conda-forge (see below)
|
||||
10. Git release notes (points to changelog)
|
||||
11. Post to slack (point to changelog)
|
||||
|
||||
After releasing a new version, it's important to merge the changes back into the other outstanding release branches. This avoids merge conflicts moving forward.
|
||||
|
||||
In some cases, where the branches have diverged wildly, it's ok to skip this step. But this means that the changes you just released won't be included in future releases.
|
||||
|
||||
#### Homebrew Release Process
|
||||
|
||||
1. fork homebrew and add a remote:
|
||||
1. Clone the `homebrew-dbt` repository:
|
||||
|
||||
```
|
||||
cd $(brew --repo homebrew/core)
|
||||
git remote add origin <your-github-username> <your-fork-url>
|
||||
git clone git@github.com:fishtown-analytics/homebrew-dbt.git
|
||||
```
|
||||
|
||||
2. edit the formula.
|
||||
2. For ALL releases (prereleases and version releases), copy the relevant formula. To copy from the latest version release of dbt, do:
|
||||
|
||||
```bash
|
||||
brew update
|
||||
mkvirtualenv --python="$(which python3)" brew
|
||||
pip install homebrew-pypi-poet dbt
|
||||
diff "$(brew --repo homebrew/core)"/Formula/dbt.rb <(poet -f dbt)
|
||||
cp Formula/dbt.rb Formula/dbt@{NEW-VERSION}.rb
|
||||
```
|
||||
|
||||
find any differences in resource stanzas, and incorporate them into the formula
|
||||
To copy from a different version, simply copy the corresponding file.
|
||||
|
||||
3. Open the file, and edit the following:
|
||||
- the name of the ruby class: this is important, homebrew won't function properly if the class name is wrong. Check historical versions to figure out the right name.
|
||||
- under the `bottle` section, remove all of the hashes (lines starting with `sha256`)
|
||||
|
||||
4. Create a **Python 3.7** virtualenv, activate it, and then install two packages: `homebrew-pypi-poet`, and the version of dbt you are preparing. I use:
|
||||
|
||||
```
|
||||
brew edit dbt
|
||||
...
|
||||
diff "$(brew --repo homebrew/core)"/Formula/dbt.rb <(poet -f dbt)
|
||||
pyenv virtualenv 3.7.0 homebrew-dbt-{VERSION}
|
||||
pyenv activate homebrew-dbt-{VERSION}
|
||||
pip install dbt=={VERSION} homebrew-pypi-poet
|
||||
```
|
||||
|
||||
3. reinstall, test, and audit dbt. if the test or audit fails, fix the formula with step 1.
|
||||
homebrew-pypi-poet is a program that generates a valid homebrew formula for an installed pip package. You want to use it to generate a diff against the existing formula. Then you want to apply the diff for the dependency packages only -- e.g. it will tell you that `google-api-core` has been updated and that you need to use the latest version.
|
||||
|
||||
5. reinstall, test, and audit dbt. if the test or audit fails, fix the formula with step 1.
|
||||
|
||||
```bash
|
||||
brew uninstall --force dbt
|
||||
brew install --build-from-source dbt
|
||||
brew uninstall --force Formula/{YOUR-FILE}.rb
|
||||
brew install Formula/{YOUR-FILE}.rb
|
||||
brew test dbt
|
||||
brew audit --strict dbt
|
||||
```
|
||||
|
||||
4. make a pull request for the change.
|
||||
|
||||
```bash
|
||||
cd $(brew --repo homebrew/core)
|
||||
git pull origin master
|
||||
git checkout -b dbt-<version> origin/master
|
||||
git add . -p
|
||||
git commit -m 'dbt <version>'
|
||||
git push -u <your-github-username> dbt-<version>
|
||||
```
|
||||
6. Ask Connor to bottle the change (only his laptop can do it!)
|
||||
|
||||
#### Conda Forge Release Process
|
||||
|
||||
|
||||
63
appveyor.yml
63
appveyor.yml
@@ -1,63 +0,0 @@
|
||||
version: 1.0.{build}-{branch}
|
||||
|
||||
environment:
|
||||
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
|
||||
# /E:ON and /V:ON options are not enabled in the batch script intepreter
|
||||
# See: http://stackoverflow.com/a/13751649/163740
|
||||
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
|
||||
TOX_ENV: "pywin"
|
||||
|
||||
matrix:
|
||||
- PYTHON: "C:\\Python35"
|
||||
PYTHON_VERSION: "3.5.2"
|
||||
PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python35"
|
||||
# PYTHON_VERSION: "3.5.2"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: Password12!
|
||||
|
||||
services:
|
||||
- postgresql94
|
||||
|
||||
hosts:
|
||||
database: 127.0.0.1
|
||||
|
||||
init:
|
||||
- PATH=C:\Program Files\PostgreSQL\9.4\bin\;%PATH%
|
||||
- ps: Set-Content "c:\program files\postgresql\9.4\data\pg_hba.conf" "host all all ::1/128 trust"
|
||||
- ps: Add-Content "c:\program files\postgresql\9.4\data\pg_hba.conf" "host all all 127.0.0.1/32 trust"
|
||||
|
||||
install:
|
||||
# Download setup scripts and unzip
|
||||
- ps: "wget https://github.com/cloudify-cosmo/appveyor-utils/archive/master.zip -OutFile ./master.zip"
|
||||
- "7z e master.zip */appveyor/* -oappveyor"
|
||||
|
||||
# Install Python (from the official .msi of http://python.org) and pip when
|
||||
# not already installed.
|
||||
- "powershell ./appveyor/install.ps1"
|
||||
|
||||
# Prepend newly installed Python to the PATH of this build (this cannot be
|
||||
# done from inside the powershell script as it would require to restart
|
||||
# the parent CMD process).
|
||||
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
|
||||
# Check that we have the expected version and architecture for Python
|
||||
- "python --version"
|
||||
- "python -c \"import struct; print(struct.calcsize('P') * 8)\""
|
||||
|
||||
build: false # Not a C# project, build stuff at the test step instead.
|
||||
|
||||
before_test:
|
||||
- "%CMD_IN_ENV% pip install psycopg2==2.6.2"
|
||||
- "%CMD_IN_ENV% pip install tox"
|
||||
|
||||
test_script:
|
||||
- "bash test/setup_db.sh"
|
||||
|
||||
# this is generally a bad idea TODO
|
||||
- git config --system http.sslverify false
|
||||
|
||||
- "%CMD_IN_ENV% tox -e %TOX_ENV%"
|
||||
154
azure-pipelines.yml
Normal file
154
azure-pipelines.yml
Normal file
@@ -0,0 +1,154 @@
|
||||
# Python package
|
||||
# Create and test a Python package on multiple Python versions.
|
||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev/*
|
||||
- pr/*
|
||||
|
||||
jobs:
|
||||
- job: UnitTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-unit
|
||||
displayName: Run unit tests
|
||||
|
||||
- job: PostgresIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: UnitTest
|
||||
|
||||
steps:
|
||||
- pwsh: |
|
||||
$serviceName = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $serviceName -StartupType Automatic
|
||||
Start-Service -InputObject $serviceName
|
||||
|
||||
& $env:PGBIN\createdb.exe -U postgres dbt
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD 'password';"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;"
|
||||
displayName: Install postgresql and set up database
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-postgres
|
||||
displayName: Run integration tests
|
||||
|
||||
# These three are all similar except secure environment variables, which MUST be passed along to their tasks,
|
||||
# but there's probably a better way to do this!
|
||||
- job: SnowflakeIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: PostgresIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-snowflake
|
||||
env:
|
||||
SNOWFLAKE_TEST_ACCOUNT: $(SNOWFLAKE_TEST_ACCOUNT)
|
||||
SNOWFLAKE_TEST_PASSWORD: $(SNOWFLAKE_TEST_PASSWORD)
|
||||
SNOWFLAKE_TEST_USER: $(SNOWFLAKE_TEST_USER)
|
||||
SNOWFLAKE_TEST_WAREHOUSE: $(SNOWFLAKE_TEST_WAREHOUSE)
|
||||
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: $(SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN)
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: $(SNOWFLAKE_TEST_OAUTH_CLIENT_ID)
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: $(SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: BigQueryIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: PostgresIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
- script: python -m tox -e pywin-bigquery
|
||||
env:
|
||||
BIGQUERY_SERVICE_ACCOUNT_JSON: $(BIGQUERY_SERVICE_ACCOUNT_JSON)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: RedshiftIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: PostgresIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e pywin-redshift
|
||||
env:
|
||||
REDSHIFT_TEST_DBNAME: $(REDSHIFT_TEST_DBNAME)
|
||||
REDSHIFT_TEST_PASS: $(REDSHIFT_TEST_PASS)
|
||||
REDSHIFT_TEST_USER: $(REDSHIFT_TEST_USER)
|
||||
REDSHIFT_TEST_PORT: $(REDSHIFT_TEST_PORT)
|
||||
REDSHIFT_TEST_HOST: $(REDSHIFT_TEST_HOST)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: BuildWheel
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn:
|
||||
- UnitTest
|
||||
- PostgresIntegrationTest
|
||||
- RedshiftIntegrationTest
|
||||
- SnowflakeIntegrationTest
|
||||
- BigQueryIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
- script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r dev_requirements.txt
|
||||
displayName: Install dependencies
|
||||
- task: ShellScript@2
|
||||
inputs:
|
||||
scriptPath: scripts/build-wheels.sh
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
contents: 'dist\?(*.whl|*.tar.gz)'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: dists
|
||||
25
circle.yml
25
circle.yml
@@ -1,25 +0,0 @@
|
||||
machine:
|
||||
python:
|
||||
version: 3.6.0
|
||||
post:
|
||||
- pyenv global 2.7.12 3.6.0
|
||||
hosts:
|
||||
database: 127.0.0.1
|
||||
|
||||
database:
|
||||
override:
|
||||
- bash test/setup_db.sh
|
||||
|
||||
dependencies:
|
||||
pre:
|
||||
- pip install --upgrade pip setuptools || true
|
||||
- pip install --upgrade tox tox-pyenv
|
||||
override:
|
||||
- pyenv local 2.7.12 3.6.0
|
||||
|
||||
test:
|
||||
override:
|
||||
- sudo chown -R ubuntu:ubuntu /root/
|
||||
- /bin/bash -c 'cd /home/ubuntu/dbt && tox'
|
||||
post:
|
||||
- mv htmlcov $CIRCLE_ARTIFACTS/
|
||||
73
converter.py
Executable file
73
converter.py
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python
|
||||
import json
|
||||
import yaml
|
||||
import sys
|
||||
import argparse
|
||||
from datetime import datetime, timezone
|
||||
import dbt.clients.registry as registry
|
||||
|
||||
|
||||
def yaml_type(fname):
|
||||
with open(fname) as f:
|
||||
return yaml.load(f)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--project", type=yaml_type, default="dbt_project.yml")
|
||||
parser.add_argument("--namespace", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_full_name(args):
|
||||
return "{}/{}".format(args.namespace, args.project["name"])
|
||||
|
||||
|
||||
def init_project_in_packages(args, packages):
|
||||
full_name = get_full_name(args)
|
||||
if full_name not in packages:
|
||||
packages[full_name] = {
|
||||
"name": args.project["name"],
|
||||
"namespace": args.namespace,
|
||||
"latest": args.project["version"],
|
||||
"assets": {},
|
||||
"versions": {},
|
||||
}
|
||||
return packages[full_name]
|
||||
|
||||
|
||||
def add_version_to_package(args, project_json):
|
||||
project_json["versions"][args.project["version"]] = {
|
||||
"id": "{}/{}".format(get_full_name(args), args.project["version"]),
|
||||
"name": args.project["name"],
|
||||
"version": args.project["version"],
|
||||
"description": "",
|
||||
"published_at": datetime.now(timezone.utc).astimezone().isoformat(),
|
||||
"packages": args.project.get("packages") or [],
|
||||
"works_with": [],
|
||||
"_source": {
|
||||
"type": "github",
|
||||
"url": "",
|
||||
"readme": "",
|
||||
},
|
||||
"downloads": {
|
||||
"tarball": "",
|
||||
"format": "tgz",
|
||||
"sha1": "",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
packages = registry.packages()
|
||||
project_json = init_project_in_packages(args, packages)
|
||||
if args.project["version"] in project_json["versions"]:
|
||||
raise Exception("Version {} already in packages JSON"
|
||||
.format(args.project["version"]),
|
||||
file=sys.stderr)
|
||||
add_version_to_package(args, project_json)
|
||||
print(json.dumps(packages, indent=2))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
core/MANIFEST.in
Normal file
1
core/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md
|
||||
14
core/dbt/adapters/base/__init__.py
Normal file
14
core/dbt/adapters/base/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa
|
||||
from dbt.adapters.base.meta import available # noqa
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa
|
||||
from dbt.adapters.base.relation import ( # noqa
|
||||
BaseRelation,
|
||||
RelationType,
|
||||
SchemaSearchMap,
|
||||
)
|
||||
from dbt.adapters.base.column import Column # noqa
|
||||
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter # noqa
|
||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa
|
||||
155
core/dbt/adapters/base/column.py
Normal file
155
core/dbt/adapters/base/column.py
Normal file
@@ -0,0 +1,155 @@
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class Column(JsonSchemaMixin):
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
'STRING': 'TEXT',
|
||||
'TIMESTAMP': 'TIMESTAMP',
|
||||
'FLOAT': 'FLOAT',
|
||||
'INTEGER': 'INT'
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
char_size: Optional[int] = None
|
||||
numeric_precision: Optional[Any] = None
|
||||
numeric_scale: Optional[Any] = None
|
||||
|
||||
@classmethod
|
||||
def translate_type(cls, dtype: str) -> str:
|
||||
return cls.TYPE_LABELS.get(dtype.upper(), dtype)
|
||||
|
||||
@classmethod
|
||||
def create(cls, name, label_or_dtype: str) -> 'Column':
|
||||
column_type = cls.translate_type(label_or_dtype)
|
||||
return cls(name, column_type)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.column
|
||||
|
||||
@property
|
||||
def quoted(self) -> str:
|
||||
return '"{}"'.format(self.column)
|
||||
|
||||
@property
|
||||
def data_type(self) -> str:
|
||||
if self.is_string():
|
||||
return Column.string_type(self.string_size())
|
||||
elif self.is_numeric():
|
||||
return Column.numeric_type(self.dtype, self.numeric_precision,
|
||||
self.numeric_scale)
|
||||
else:
|
||||
return self.dtype
|
||||
|
||||
def is_string(self) -> bool:
|
||||
return self.dtype.lower() in ['text', 'character varying', 'character',
|
||||
'varchar']
|
||||
|
||||
def is_number(self):
|
||||
return any([self.is_integer(), self.is_numeric(), self.is_float()])
|
||||
|
||||
def is_float(self):
|
||||
return self.dtype.lower() in [
|
||||
# floats
|
||||
'real', 'float4', 'float', 'double precision', 'float8'
|
||||
]
|
||||
|
||||
def is_integer(self) -> bool:
|
||||
return self.dtype.lower() in [
|
||||
# real types
|
||||
'smallint', 'integer', 'bigint',
|
||||
'smallserial', 'serial', 'bigserial',
|
||||
# aliases
|
||||
'int2', 'int4', 'int8',
|
||||
'serial2', 'serial4', 'serial8',
|
||||
]
|
||||
|
||||
def is_numeric(self) -> bool:
|
||||
return self.dtype.lower() in ['numeric', 'decimal']
|
||||
|
||||
def string_size(self) -> int:
|
||||
if not self.is_string():
|
||||
raise RuntimeException("Called string_size() on non-string field!")
|
||||
|
||||
if self.dtype == 'text' or self.char_size is None:
|
||||
# char_size should never be None. Handle it reasonably just in case
|
||||
return 256
|
||||
else:
|
||||
return int(self.char_size)
|
||||
|
||||
def can_expand_to(self, other_column: 'Column') -> bool:
|
||||
"""returns True if this column can be expanded to the size of the
|
||||
other column"""
|
||||
if not self.is_string() or not other_column.is_string():
|
||||
return False
|
||||
|
||||
return other_column.string_size() > self.string_size()
|
||||
|
||||
def literal(self, value: Any) -> str:
|
||||
return "{}::{}".format(value, self.data_type)
|
||||
|
||||
@classmethod
|
||||
def string_type(cls, size: int) -> str:
|
||||
return "character varying({})".format(size)
|
||||
|
||||
@classmethod
|
||||
def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str:
|
||||
# This could be decimal(...), numeric(...), number(...)
|
||||
# Just use whatever was fed in here -- don't try to get too clever
|
||||
if precision is None or scale is None:
|
||||
return dtype
|
||||
else:
|
||||
return "{}({},{})".format(dtype, precision, scale)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<Column {} ({})>".format(self.name, self.data_type)
|
||||
|
||||
@classmethod
|
||||
def from_description(cls, name: str, raw_data_type: str) -> 'Column':
|
||||
match = re.match(r'([^(]+)(\([^)]+\))?', raw_data_type)
|
||||
if match is None:
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data type "{raw_data_type}"'
|
||||
)
|
||||
data_type, size_info = match.groups()
|
||||
char_size = None
|
||||
numeric_precision = None
|
||||
numeric_scale = None
|
||||
if size_info is not None:
|
||||
# strip out the parentheses
|
||||
size_info = size_info[1:-1]
|
||||
parts = size_info.split(',')
|
||||
if len(parts) == 1:
|
||||
try:
|
||||
char_size = int(parts[0])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
elif len(parts) == 2:
|
||||
try:
|
||||
numeric_precision = int(parts[0])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
try:
|
||||
numeric_scale = int(parts[1])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[1]}" to an integer'
|
||||
)
|
||||
|
||||
return cls(
|
||||
name, data_type, char_size, numeric_precision, numeric_scale
|
||||
)
|
||||
306
core/dbt/adapters/base/connections.py
Normal file
306
core/dbt/adapters/base/connections.py
Normal file
@@ -0,0 +1,306 @@
|
||||
import abc
|
||||
import os
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from threading import get_ident
|
||||
from typing import (
|
||||
Dict, Tuple, Hashable, Optional, ContextManager, List, Union
|
||||
)
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.connection import (
|
||||
Connection, Identifier, ConnectionState,
|
||||
AdapterRequiredConfig, LazyHandle, AdapterResponse
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import flags
|
||||
|
||||
|
||||
class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Methods to implement:
|
||||
- exception_handler
|
||||
- cancel_open
|
||||
- open
|
||||
- begin
|
||||
- commit
|
||||
- clear_transaction
|
||||
- execute
|
||||
|
||||
You must also set the 'TYPE' class attribute with a class-unique constant
|
||||
string.
|
||||
"""
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
self.query_header: Optional[MacroQueryStringSetter] = None
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
self.query_header = MacroQueryStringSetter(self.profile, manifest)
|
||||
|
||||
@staticmethod
|
||||
def get_thread_identifier() -> Hashable:
|
||||
# note that get_ident() may be re-used, but we should never experience
|
||||
# that within a single process
|
||||
return (os.getpid(), get_ident())
|
||||
|
||||
def get_thread_connection(self) -> Connection:
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionException(
|
||||
key, list(self.thread_connections)
|
||||
)
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'In set_thread_connection, existing connection exists for {}'
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
|
||||
def get_if_exists(self) -> Optional[Connection]:
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
return self.thread_connections.get(key)
|
||||
|
||||
def clear_thread_connection(self) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key in self.thread_connections:
|
||||
del self.thread_connections[key]
|
||||
|
||||
def clear_transaction(self) -> None:
|
||||
"""Clear any existing transactions."""
|
||||
conn = self.get_thread_connection()
|
||||
if conn is not None:
|
||||
if conn.transaction_open:
|
||||
self._rollback(conn)
|
||||
self.begin()
|
||||
self.commit()
|
||||
|
||||
def rollback_if_open(self) -> None:
|
||||
conn = self.get_if_exists()
|
||||
if conn is not None and conn.handle and conn.transaction_open:
|
||||
self._rollback(conn)
|
||||
|
||||
@abc.abstractmethod
|
||||
def exception_handler(self, sql: str) -> ContextManager:
|
||||
"""Create a context manager that handles exceptions caused by database
|
||||
interactions.
|
||||
|
||||
:param str sql: The SQL string that the block inside the context
|
||||
manager is executing.
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`exception_handler` is not implemented for this adapter!')
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
conn_name: str
|
||||
if name is None:
|
||||
# if a name isn't specified, we'll re-use a single handle
|
||||
# named 'master'
|
||||
conn_name = 'master'
|
||||
else:
|
||||
if not isinstance(name, str):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'For connection name, got {name} - not a string!'
|
||||
)
|
||||
assert isinstance(name, str)
|
||||
conn_name = name
|
||||
|
||||
conn = self.get_if_exists()
|
||||
if conn is None:
|
||||
conn = Connection(
|
||||
type=Identifier(self.TYPE),
|
||||
name=None,
|
||||
state=ConnectionState.INIT,
|
||||
transaction_open=False,
|
||||
handle=None,
|
||||
credentials=self.profile.credentials
|
||||
)
|
||||
self.set_thread_connection(conn)
|
||||
|
||||
if conn.name == conn_name and conn.state == 'open':
|
||||
return conn
|
||||
|
||||
logger.debug(
|
||||
'Acquiring new {} connection "{}".'.format(self.TYPE, conn_name))
|
||||
|
||||
if conn.state == 'open':
|
||||
logger.debug(
|
||||
'Re-using an available connection from the pool (formerly {}).'
|
||||
.format(conn.name)
|
||||
)
|
||||
else:
|
||||
conn.handle = LazyHandle(self.open)
|
||||
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`cancel_open` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
def open(cls, connection: Connection) -> Connection:
|
||||
"""Open the given connection on the adapter and return it.
|
||||
|
||||
This may mutate the given connection (in particular, its state and its
|
||||
handle).
|
||||
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`open` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
conn = self.get_if_exists()
|
||||
if conn is None:
|
||||
return
|
||||
|
||||
try:
|
||||
# always close the connection. close() calls _rollback() if there
|
||||
# is an open transaction
|
||||
self.close(conn)
|
||||
except Exception:
|
||||
# if rollback or close failed, remove our busted connection
|
||||
self.clear_thread_connection()
|
||||
raise
|
||||
|
||||
def cleanup_all(self) -> None:
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection.state not in {'closed', 'init'}:
|
||||
logger.debug("Connection '{}' was left open."
|
||||
.format(connection.name))
|
||||
else:
|
||||
logger.debug("Connection '{}' was properly closed."
|
||||
.format(connection.name))
|
||||
self.close(connection)
|
||||
|
||||
# garbage collect these connections
|
||||
self.thread_connections.clear()
|
||||
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`begin` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`commit` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
"""Perform the actual rollback operation."""
|
||||
try:
|
||||
connection.handle.rollback()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
'Failed to rollback {}'.format(connection.name),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _close_handle(cls, connection: Connection) -> None:
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, 'close'):
|
||||
logger.debug(f'On {connection.name}: Close')
|
||||
connection.handle.close()
|
||||
else:
|
||||
logger.debug(f'On {connection.name}: No close available on handle')
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In _rollback, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Tried to rollback transaction on connection '
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
|
||||
logger.debug(f'On {connection.name}: ROLLBACK')
|
||||
cls._rollback_handle(connection)
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@classmethod
|
||||
def close(cls, connection: Connection) -> Connection:
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In close, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
# if the connection is in closed or init, there's nothing to do
|
||||
if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}:
|
||||
return connection
|
||||
|
||||
if connection.transaction_open and connection.handle:
|
||||
logger.debug('On {}: ROLLBACK'.format(connection.name))
|
||||
cls._rollback_handle(connection)
|
||||
connection.transaction_open = False
|
||||
|
||||
cls._close_handle(connection)
|
||||
connection.state = ConnectionState.CLOSED
|
||||
|
||||
return connection
|
||||
|
||||
def commit_if_has_connection(self) -> None:
|
||||
"""If the named connection exists, commit the current transaction."""
|
||||
connection = self.get_if_exists()
|
||||
if connection:
|
||||
self.commit()
|
||||
|
||||
def _add_query_comment(self, sql: str) -> str:
|
||||
if self.query_header is None:
|
||||
return sql
|
||||
return self.query_header.add(sql)
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
:param str sql: The sql to execute.
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`execute` is not implemented for this adapter!'
|
||||
)
|
||||
1261
core/dbt/adapters/base/impl.py
Normal file
1261
core/dbt/adapters/base/impl.py
Normal file
File diff suppressed because it is too large
Load Diff
126
core/dbt/adapters/base/meta.py
Normal file
126
core/dbt/adapters/base/meta.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import abc
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
||||
|
||||
from dbt.deprecations import warn, renamed_method
|
||||
|
||||
|
||||
Decorator = Callable[[Any], Callable]
|
||||
|
||||
|
||||
class _Available:
|
||||
def __call__(self, func: Callable) -> Callable:
|
||||
func._is_available_ = True # type: ignore
|
||||
return func
|
||||
|
||||
def parse(self, parse_replacement: Callable) -> Decorator:
|
||||
"""A decorator factory to indicate that a method on the adapter will be
|
||||
exposed to the database wrapper, and will be stubbed out at parse time
|
||||
with the given function.
|
||||
|
||||
@available.parse()
|
||||
def my_method(self, a, b):
|
||||
if something:
|
||||
return None
|
||||
return big_expensive_db_query()
|
||||
|
||||
@available.parse(lambda *args, **args: {})
|
||||
def my_other_method(self, a, b):
|
||||
x = {}
|
||||
x.update(big_expensive_db_query())
|
||||
return x
|
||||
"""
|
||||
def inner(func):
|
||||
func._parse_replacement_ = parse_replacement
|
||||
return self(func)
|
||||
return inner
|
||||
|
||||
def deprecated(
|
||||
self, supported_name: str, parse_replacement: Optional[Callable] = None
|
||||
) -> Decorator:
|
||||
"""A decorator that marks a function as available, but also prints a
|
||||
deprecation warning. Use like
|
||||
|
||||
@available.deprecated('my_new_method')
|
||||
def my_old_method(self, arg):
|
||||
args = compatability_shim(arg)
|
||||
return self.my_new_method(*args)
|
||||
|
||||
@available.deprecated('my_new_slow_method', lambda *a, **k: (0, ''))
|
||||
def my_old_slow_method(self, arg):
|
||||
args = compatibility_shim(arg)
|
||||
return self.my_new_slow_method(*args)
|
||||
|
||||
To make `adapter.my_old_method` available but also print out a warning
|
||||
on use directing users to `my_new_method`.
|
||||
|
||||
The optional parse_replacement, if provided, will provide a parse-time
|
||||
replacement for the actual method (see `available.parse`).
|
||||
"""
|
||||
def wrapper(func):
|
||||
func_name = func.__name__
|
||||
renamed_method(func_name, supported_name)
|
||||
|
||||
@wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
warn('adapter:{}'.format(func_name))
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if parse_replacement:
|
||||
available_function = self.parse(parse_replacement)
|
||||
else:
|
||||
available_function = self
|
||||
return available_function(inner)
|
||||
return wrapper
|
||||
|
||||
def parse_none(self, func: Callable) -> Callable:
|
||||
wrapper = self.parse(lambda *a, **k: None)
|
||||
return wrapper(func)
|
||||
|
||||
def parse_list(self, func: Callable) -> Callable:
|
||||
wrapper = self.parse(lambda *a, **k: [])
|
||||
return wrapper(func)
|
||||
|
||||
|
||||
available = _Available()
|
||||
|
||||
|
||||
class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
# but having it doesn't hurt on the python side (and omitting it could
|
||||
# hurt for obscure metaclass reasons, for all I know)
|
||||
cls = abc.ABCMeta.__new__( # type: ignore
|
||||
mcls, name, bases, namespace, **kwargs
|
||||
)
|
||||
|
||||
# this is very much inspired by ABCMeta's own implementation
|
||||
|
||||
# dict mapping the method name to whether the model name should be
|
||||
# injected into the arguments. All methods in here are exposed to the
|
||||
# context.
|
||||
available: Set[str] = set()
|
||||
replacements: Dict[str, Any] = {}
|
||||
|
||||
# collect base class data first
|
||||
for base in bases:
|
||||
available.update(getattr(base, '_available_', set()))
|
||||
replacements.update(getattr(base, '_parse_replacements_', set()))
|
||||
|
||||
# override with local data if it exists
|
||||
for name, value in namespace.items():
|
||||
if getattr(value, '_is_available_', False):
|
||||
available.add(name)
|
||||
parse_replacement = getattr(value, '_parse_replacement_', None)
|
||||
if parse_replacement is not None:
|
||||
replacements[name] = parse_replacement
|
||||
|
||||
cls._available_ = frozenset(available)
|
||||
# should this be a namedtuple so it will be immutable like _available_?
|
||||
cls._parse_replacements_ = replacements
|
||||
return cls
|
||||
42
core/dbt/adapters/base/plugin.py
Normal file
42
core/dbt/adapters/base/plugin.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from typing import List, Optional, Type
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import Project
|
||||
partial = Project.partial_load(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationException(
|
||||
f'Invalid project at {include_path}: name not set!'
|
||||
)
|
||||
return partial.project_name
|
||||
|
||||
|
||||
class AdapterPlugin:
|
||||
"""Defines the basic requirements for a dbt adapter plugin.
|
||||
|
||||
:param include_path: The path to this adapter plugin's root
|
||||
:param dependencies: A list of adapter names that this adapter depends
|
||||
upon.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
adapter: Type[AdapterProtocol],
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None
|
||||
):
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
self.include_path: str = include_path
|
||||
self.project_name: str = project_name_from_path(include_path)
|
||||
self.dependencies: List[str]
|
||||
if dependencies is None:
|
||||
self.dependencies = []
|
||||
else:
|
||||
self.dependencies = dependencies
|
||||
101
core/dbt/adapters/base/query_headers.py
Normal file
101
core/dbt/adapters/base/query_headers.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from threading import local
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
|
||||
from dbt.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
self._inner_node = node
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._inner_node, name, '')
|
||||
|
||||
|
||||
class _QueryComment(local):
|
||||
"""A thread-local class storing thread-specific state information for
|
||||
connection management, namely:
|
||||
- the current thread's query comment.
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
def __init__(self, initial):
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
return sql
|
||||
|
||||
if self.append:
|
||||
# replace last ';' with '<comment>;'
|
||||
sql = sql.rstrip()
|
||||
if sql[-1] == ';':
|
||||
sql = sql[:-1]
|
||||
return '{}\n/* {} */;'.format(sql, self.query_comment.strip())
|
||||
|
||||
return '{}\n/* {} */'.format(sql, self.query_comment.strip())
|
||||
|
||||
return '/* {} */\n{}'.format(self.query_comment.strip(), sql)
|
||||
|
||||
def set(self, comment: Optional[str], append: bool):
|
||||
if isinstance(comment, str) and '*/' in comment:
|
||||
# tell the user "no" so they don't hurt themselves by writing
|
||||
# garbage
|
||||
raise RuntimeException(
|
||||
f'query comment contains illegal value "*/": {comment}'
|
||||
)
|
||||
self.query_comment = comment
|
||||
self.append = append
|
||||
|
||||
|
||||
QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
self.manifest = manifest
|
||||
self.config = config
|
||||
|
||||
comment_macro = self._get_comment_macro()
|
||||
self.generator: QueryStringFunc = lambda name, model: ''
|
||||
# if the comment value was None or the empty string, just skip it
|
||||
if comment_macro:
|
||||
assert isinstance(comment_macro, str)
|
||||
macro = '\n'.join((
|
||||
'{%- macro query_comment_macro(connection_name, node) -%}',
|
||||
comment_macro,
|
||||
'{% endmacro %}'
|
||||
))
|
||||
ctx = self._get_context()
|
||||
self.generator = QueryStringGenerator(macro, ctx)
|
||||
self.comment = _QueryComment(None)
|
||||
self.reset()
|
||||
|
||||
def _get_comment_macro(self) -> Optional[str]:
|
||||
return self.config.query_comment.comment
|
||||
|
||||
def _get_context(self) -> Dict[str, Any]:
|
||||
return generate_query_header_context(self.config, self.manifest)
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
return self.comment.add(sql)
|
||||
|
||||
def reset(self):
|
||||
self.set('master', None)
|
||||
|
||||
def set(self, name: str, node: Optional[CompileResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
comment_str = self.generator(name, wrapped)
|
||||
|
||||
append = False
|
||||
if isinstance(self.config.query_comment, QueryComment):
|
||||
append = self.config.query_comment.append
|
||||
self.comment.set(comment_str, append)
|
||||
455
core/dbt/adapters/base/relation.py
Normal file
455
core/dbt/adapters/base/relation.py
Normal file
@@ -0,0 +1,455 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
|
||||
)
|
||||
|
||||
from dbt.contracts.graph.compiled import CompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
RelationType, ComponentName, HasQuoting, FakeAPIObject, Policy, Path
|
||||
)
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
|
||||
import dbt.exceptions
|
||||
|
||||
|
||||
Self = TypeVar('Self', bound='BaseRelation')
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
class BaseRelation(FakeAPIObject, Hashable):
|
||||
type: Optional[RelationType]
|
||||
path: Path
|
||||
quote_character: str = '"'
|
||||
include_policy: Policy = Policy()
|
||||
quote_policy: Policy = Policy()
|
||||
dbt_created: bool = False
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
return self.path.get_lowered_part(field) == value.lower()
|
||||
else:
|
||||
return self.path.get_part(field) == value
|
||||
|
||||
@classmethod
|
||||
def _get_field_named(cls, field_name):
|
||||
for field, _ in cls._get_fields():
|
||||
if field.name == field_name:
|
||||
return field
|
||||
# this should be unreachable
|
||||
raise ValueError(f'BaseRelation has no {field_name} field!')
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self.to_dict() == other.to_dict()
|
||||
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
return cls._get_field_named('quote_policy').default
|
||||
|
||||
@classmethod
|
||||
def get_default_include_policy(cls) -> Policy:
|
||||
return cls._get_field_named('include_policy').default
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Override `.get` to return a metadata object so we don't break
|
||||
dbt_utils.
|
||||
"""
|
||||
if key == 'metadata':
|
||||
return {
|
||||
'type': self.__class__.__name__
|
||||
}
|
||||
return super().get(key, default)
|
||||
|
||||
def matches(
|
||||
self,
|
||||
database: Optional[str] = None,
|
||||
schema: Optional[str] = None,
|
||||
identifier: Optional[str] = None,
|
||||
) -> bool:
|
||||
search = filter_null_values({
|
||||
ComponentName.Database: database,
|
||||
ComponentName.Schema: schema,
|
||||
ComponentName.Identifier: identifier
|
||||
})
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
"Tried to match relation, but no search path was passed!")
|
||||
|
||||
exact_match = True
|
||||
approximate_match = True
|
||||
|
||||
for k, v in search.items():
|
||||
if not self._is_exactish_match(k, v):
|
||||
exact_match = False
|
||||
|
||||
if self.path.get_lowered_part(k) != v.lower():
|
||||
approximate_match = False
|
||||
|
||||
if approximate_match and not exact_match:
|
||||
target = self.create(
|
||||
database=database, schema=schema, identifier=identifier
|
||||
)
|
||||
dbt.exceptions.approximate_relation_match(target, self)
|
||||
|
||||
return exact_match
|
||||
|
||||
def replace_path(self, **kwargs):
|
||||
return self.replace(path=self.path.replace(**kwargs))
|
||||
|
||||
def quote(
|
||||
self: Self,
|
||||
database: Optional[bool] = None,
|
||||
schema: Optional[bool] = None,
|
||||
identifier: Optional[bool] = None,
|
||||
) -> Self:
|
||||
policy = filter_null_values({
|
||||
ComponentName.Database: database,
|
||||
ComponentName.Schema: schema,
|
||||
ComponentName.Identifier: identifier
|
||||
})
|
||||
|
||||
new_quote_policy = self.quote_policy.replace_dict(policy)
|
||||
return self.replace(quote_policy=new_quote_policy)
|
||||
|
||||
def include(
|
||||
self: Self,
|
||||
database: Optional[bool] = None,
|
||||
schema: Optional[bool] = None,
|
||||
identifier: Optional[bool] = None,
|
||||
) -> Self:
|
||||
policy = filter_null_values({
|
||||
ComponentName.Database: database,
|
||||
ComponentName.Schema: schema,
|
||||
ComponentName.Identifier: identifier
|
||||
})
|
||||
|
||||
new_include_policy = self.include_policy.replace_dict(policy)
|
||||
return self.replace(include_policy=new_include_policy)
|
||||
|
||||
def information_schema(self, view_name=None) -> 'InformationSchema':
|
||||
# some of our data comes from jinja, where things can be `Undefined`.
|
||||
if not isinstance(view_name, str):
|
||||
view_name = None
|
||||
|
||||
# Kick the user-supplied schema out of the information schema relation
|
||||
# Instead address this as <database>.information_schema by default
|
||||
info_schema = InformationSchema.from_relation(self, view_name)
|
||||
return info_schema.incorporate(path={"schema": None})
|
||||
|
||||
def information_schema_only(self) -> 'InformationSchema':
|
||||
return self.information_schema()
|
||||
|
||||
def without_identifier(self) -> 'BaseRelation':
|
||||
"""Return a form of this relation that only has the database and schema
|
||||
set to included. To get the appropriately-quoted form the schema out of
|
||||
the result (for use as part of a query), use `.render()`. To get the
|
||||
raw database or schema name, use `.database` or `.schema`.
|
||||
|
||||
The hash of the returned object is the result of render().
|
||||
"""
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(
|
||||
self
|
||||
) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
path_part = self.path.get_part(key)
|
||||
if path_part is not None and self.quote_policy.get_part(key):
|
||||
path_part = self.quoted(path_part)
|
||||
yield key, path_part
|
||||
|
||||
def render(self) -> str:
|
||||
# if there is nothing set, this will return the empty string.
|
||||
return '.'.join(
|
||||
part for _, part in self._render_iterator()
|
||||
if part is not None
|
||||
)
|
||||
|
||||
def quoted(self, identifier):
|
||||
return '{quote_char}{identifier}{quote_char}'.format(
|
||||
quote_char=self.quote_character,
|
||||
identifier=identifier,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(
|
||||
cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any
|
||||
) -> Self:
|
||||
source_quoting = source.quoting.to_dict()
|
||||
source_quoting.pop('column', None)
|
||||
quote_policy = deep_merge(
|
||||
cls.get_default_quote_policy().to_dict(),
|
||||
source_quoting,
|
||||
kwargs.get('quote_policy', {}),
|
||||
)
|
||||
|
||||
return cls.create(
|
||||
database=source.database,
|
||||
schema=source.schema,
|
||||
identifier=source.identifier,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_ephemeral_prefix(name: str):
|
||||
return f'__dbt__cte__{name}'
|
||||
|
||||
@classmethod
|
||||
def create_ephemeral_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
return cls.create(
|
||||
type=cls.CTE,
|
||||
identifier=identifier,
|
||||
).quote(identifier=False)
|
||||
|
||||
@classmethod
|
||||
def create_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if quote_policy is None:
|
||||
quote_policy = {}
|
||||
|
||||
quote_policy = dbt.utils.merge(config.quoting, quote_policy)
|
||||
|
||||
return cls.create(
|
||||
database=node.database,
|
||||
schema=node.schema,
|
||||
identifier=node.alias,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs)
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, ParsedSourceDefinition):
|
||||
raise InternalException(
|
||||
'type mismatch, expected ParsedSourceDefinition but got {}'
|
||||
.format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
if not isinstance(node, (ParsedNode, CompiledNode)):
|
||||
raise InternalException(
|
||||
'type mismatch, expected ParsedNode or CompiledNode but '
|
||||
'got {}'.format(type(node))
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[Self],
|
||||
database: Optional[str] = None,
|
||||
schema: Optional[str] = None,
|
||||
identifier: Optional[str] = None,
|
||||
type: Optional[RelationType] = None,
|
||||
**kwargs,
|
||||
) -> Self:
|
||||
kwargs.update({
|
||||
'path': {
|
||||
'database': database,
|
||||
'schema': schema,
|
||||
'identifier': identifier,
|
||||
},
|
||||
'type': type,
|
||||
})
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.render())
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.render()
|
||||
|
||||
@property
|
||||
def database(self) -> Optional[str]:
|
||||
return self.path.database
|
||||
|
||||
@property
|
||||
def schema(self) -> Optional[str]:
|
||||
return self.path.schema
|
||||
|
||||
@property
|
||||
def identifier(self) -> Optional[str]:
|
||||
return self.path.identifier
|
||||
|
||||
@property
|
||||
def table(self) -> Optional[str]:
|
||||
return self.path.identifier
|
||||
|
||||
# Here for compatibility with old Relation interface
|
||||
@property
|
||||
def name(self) -> Optional[str]:
|
||||
return self.identifier
|
||||
|
||||
@property
|
||||
def is_table(self) -> bool:
|
||||
return self.type == RelationType.Table
|
||||
|
||||
@property
|
||||
def is_cte(self) -> bool:
|
||||
return self.type == RelationType.CTE
|
||||
|
||||
@property
|
||||
def is_view(self) -> bool:
|
||||
return self.type == RelationType.View
|
||||
|
||||
@classproperty
|
||||
def Table(cls) -> str:
|
||||
return str(RelationType.Table)
|
||||
|
||||
@classproperty
|
||||
def CTE(cls) -> str:
|
||||
return str(RelationType.CTE)
|
||||
|
||||
@classproperty
|
||||
def View(cls) -> str:
|
||||
return str(RelationType.View)
|
||||
|
||||
@classproperty
|
||||
def External(cls) -> str:
|
||||
return str(RelationType.External)
|
||||
|
||||
@classproperty
|
||||
def get_relation_type(cls) -> Type[RelationType]:
|
||||
return RelationType
|
||||
|
||||
|
||||
Info = TypeVar('Info', bound='InformationSchema')
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
class InformationSchema(BaseRelation):
|
||||
information_schema_view: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationException(
|
||||
'Got an invalid name: {}'.format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_path(
|
||||
cls, relation: BaseRelation, information_schema_view: Optional[str]
|
||||
) -> Path:
|
||||
return Path(
|
||||
database=relation.database,
|
||||
schema=relation.schema,
|
||||
identifier='INFORMATION_SCHEMA',
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_include_policy(
|
||||
cls,
|
||||
relation,
|
||||
information_schema_view: Optional[str],
|
||||
) -> Policy:
|
||||
return relation.include_policy.replace(
|
||||
database=relation.database is not None,
|
||||
schema=False,
|
||||
identifier=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_quote_policy(
|
||||
cls,
|
||||
relation,
|
||||
information_schema_view: Optional[str],
|
||||
) -> Policy:
|
||||
return relation.quote_policy.replace(
|
||||
identifier=False,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_relation(
|
||||
cls: Type[Info],
|
||||
relation: BaseRelation,
|
||||
information_schema_view: Optional[str],
|
||||
) -> Info:
|
||||
include_policy = cls.get_include_policy(
|
||||
relation, information_schema_view
|
||||
)
|
||||
quote_policy = cls.get_quote_policy(relation, information_schema_view)
|
||||
path = cls.get_path(relation, information_schema_view)
|
||||
return cls(
|
||||
type=RelationType.View,
|
||||
path=path,
|
||||
include_policy=include_policy,
|
||||
quote_policy=quote_policy,
|
||||
information_schema_view=information_schema_view,
|
||||
)
|
||||
|
||||
def _render_iterator(self):
|
||||
for k, v in super()._render_iterator():
|
||||
yield k, v
|
||||
yield None, self.information_schema_view
|
||||
|
||||
|
||||
class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
"""A utility class to keep track of what information_schema tables to
|
||||
search for what schemas. The schema values are all lowercased to avoid
|
||||
duplication.
|
||||
"""
|
||||
def add(self, relation: BaseRelation):
|
||||
key = relation.information_schema_only()
|
||||
if key not in self:
|
||||
self[key] = set()
|
||||
schema: Optional[str] = None
|
||||
if relation.schema is not None:
|
||||
schema = relation.schema.lower()
|
||||
self[key].add(schema)
|
||||
|
||||
def search(
|
||||
self
|
||||
) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||
for information_schema_name, schemas in self.items():
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
|
||||
def flatten(self):
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have duplicates
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {
|
||||
'database': information_schema_name.database,
|
||||
'schema': schema
|
||||
}
|
||||
new.add(information_schema_name.incorporate(
|
||||
path=path,
|
||||
quote_policy={'database': False},
|
||||
include_policy={'database': False},
|
||||
))
|
||||
|
||||
return new
|
||||
523
core/dbt/adapters/cache.py
Normal file
523
core/dbt/adapters/cache.py
Normal file
@@ -0,0 +1,523 @@
|
||||
from collections import namedtuple
|
||||
from copy import deepcopy
|
||||
from typing import List, Iterable, Optional, Dict, Set, Tuple, Any
|
||||
import threading
|
||||
|
||||
from dbt.logger import CACHE_LOGGER as logger
|
||||
from dbt.utils import lowercase
|
||||
import dbt.exceptions
|
||||
|
||||
_ReferenceKey = namedtuple('_ReferenceKey', 'database schema identifier')
|
||||
|
||||
|
||||
def _make_key(relation) -> _ReferenceKey:
|
||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||
to keep track of quoting
|
||||
"""
|
||||
# databases and schemas can both be None
|
||||
return _ReferenceKey(lowercase(relation.database),
|
||||
lowercase(relation.schema),
|
||||
lowercase(relation.identifier))
|
||||
|
||||
|
||||
def dot_separated(key: _ReferenceKey) -> str:
|
||||
"""Return the key in dot-separated string form.
|
||||
|
||||
:param _ReferenceKey key: The key to stringify.
|
||||
"""
|
||||
return '.'.join(map(str, key))
|
||||
|
||||
|
||||
class _CachedRelation:
|
||||
"""Nothing about _CachedRelation is guaranteed to be thread-safe!
|
||||
|
||||
:attr str schema: The schema of this relation.
|
||||
:attr str identifier: The identifier of this relation.
|
||||
:attr Dict[_ReferenceKey, _CachedRelation] referenced_by: The relations
|
||||
that refer to this relation.
|
||||
:attr BaseRelation inner: The underlying dbt relation.
|
||||
"""
|
||||
def __init__(self, inner):
|
||||
self.referenced_by = {}
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
'_CachedRelation(database={}, schema={}, identifier={}, inner={})'
|
||||
).format(self.database, self.schema, self.identifier, self.inner)
|
||||
|
||||
@property
|
||||
def database(self) -> Optional[str]:
|
||||
return lowercase(self.inner.database)
|
||||
|
||||
@property
|
||||
def schema(self) -> Optional[str]:
|
||||
return lowercase(self.inner.schema)
|
||||
|
||||
@property
|
||||
def identifier(self) -> Optional[str]:
|
||||
return lowercase(self.inner.identifier)
|
||||
|
||||
def __copy__(self):
|
||||
new = self.__class__(self.inner)
|
||||
new.__dict__.update(self.__dict__)
|
||||
return new
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
new = self.__class__(self.inner.incorporate())
|
||||
new.__dict__.update(self.__dict__)
|
||||
new.referenced_by = deepcopy(self.referenced_by, memo)
|
||||
|
||||
def is_referenced_by(self, key):
|
||||
return key in self.referenced_by
|
||||
|
||||
def key(self):
|
||||
"""Get the _ReferenceKey that represents this relation
|
||||
|
||||
:return _ReferenceKey: A key for this relation.
|
||||
"""
|
||||
return _make_key(self)
|
||||
|
||||
def add_reference(self, referrer: '_CachedRelation'):
|
||||
"""Add a reference from referrer to self, indicating that if this node
|
||||
were drop...cascaded, the referrer would be dropped as well.
|
||||
|
||||
:param _CachedRelation referrer: The node that refers to this node.
|
||||
"""
|
||||
self.referenced_by[referrer.key()] = referrer
|
||||
|
||||
def collect_consequences(self):
|
||||
"""Recursively collect a set of _ReferenceKeys that would
|
||||
consequentially get dropped if this were dropped via
|
||||
"drop ... cascade".
|
||||
|
||||
:return Set[_ReferenceKey]: All the relations that would be dropped
|
||||
"""
|
||||
consequences = {self.key()}
|
||||
for relation in self.referenced_by.values():
|
||||
consequences.update(relation.collect_consequences())
|
||||
return consequences
|
||||
|
||||
def release_references(self, keys):
|
||||
"""Non-recursively indicate that an iterable of _ReferenceKey no longer
|
||||
exist. Unknown keys are ignored.
|
||||
|
||||
:param Iterable[_ReferenceKey] keys: The keys to drop.
|
||||
"""
|
||||
keys = set(self.referenced_by) & set(keys)
|
||||
for key in keys:
|
||||
self.referenced_by.pop(key)
|
||||
|
||||
def rename(self, new_relation):
|
||||
"""Rename this cached relation to new_relation.
|
||||
Note that this will change the output of key(), all refs must be
|
||||
updated!
|
||||
|
||||
:param _CachedRelation new_relation: The new name to apply to the
|
||||
relation
|
||||
"""
|
||||
# Relations store this stuff inside their `path` dict. But they
|
||||
# also store a table_name, and usually use it in their .render(),
|
||||
# so we need to update that as well. It doesn't appear that
|
||||
# table_name is ever anything but the identifier (via .create())
|
||||
self.inner = self.inner.incorporate(
|
||||
path={
|
||||
'database': new_relation.inner.database,
|
||||
'schema': new_relation.inner.schema,
|
||||
'identifier': new_relation.inner.identifier
|
||||
},
|
||||
)
|
||||
|
||||
def rename_key(self, old_key, new_key):
|
||||
"""Rename a reference that may or may not exist. Only handles the
|
||||
reference itself, so this is the other half of what `rename` does.
|
||||
|
||||
If old_key is not in referenced_by, this is a no-op.
|
||||
|
||||
:param _ReferenceKey old_key: The old key to be renamed.
|
||||
:param _ReferenceKey new_key: The new key to rename to.
|
||||
:raises InternalError: If the new key already exists.
|
||||
"""
|
||||
if new_key in self.referenced_by:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in rename of "{}" -> "{}", new name is in the cache already'
|
||||
.format(old_key, new_key)
|
||||
)
|
||||
|
||||
if old_key not in self.referenced_by:
|
||||
return
|
||||
value = self.referenced_by.pop(old_key)
|
||||
self.referenced_by[new_key] = value
|
||||
|
||||
def dump_graph_entry(self):
|
||||
"""Return a key/value pair representing this key and its referents.
|
||||
|
||||
return List[str]: The dot-separated form of all referent keys.
|
||||
"""
|
||||
return [dot_separated(r) for r in self.referenced_by]
|
||||
|
||||
|
||||
def lazy_log(msg, func):
|
||||
if logger.disabled:
|
||||
return
|
||||
logger.debug(msg.format(func()))
|
||||
|
||||
|
||||
class RelationsCache:
|
||||
"""A cache of the relations known to dbt. Keeps track of relationships
|
||||
declared between tables and handles renames/drops as a real database would.
|
||||
|
||||
:attr Dict[_ReferenceKey, _CachedRelation] relations: The known relations.
|
||||
:attr threading.RLock lock: The lock around relations, held during updates.
|
||||
The adapters also hold this lock while filling the cache.
|
||||
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
||||
"""
|
||||
def __init__(self) -> None:
|
||||
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.lock = threading.RLock()
|
||||
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
|
||||
def add_schema(
|
||||
self, database: Optional[str], schema: Optional[str],
|
||||
) -> None:
|
||||
"""Add a schema to the set of known schemas (case-insensitive)
|
||||
|
||||
:param database: The database name to add.
|
||||
:param schema: The schema name to add.
|
||||
"""
|
||||
self.schemas.add((lowercase(database), lowercase(schema)))
|
||||
|
||||
def drop_schema(
|
||||
self, database: Optional[str], schema: Optional[str],
|
||||
) -> None:
|
||||
"""Drop the given schema and remove it from the set of known schemas.
|
||||
|
||||
Then remove all its contents (and their dependents, etc) as well.
|
||||
"""
|
||||
key = (lowercase(database), lowercase(schema))
|
||||
if key not in self.schemas:
|
||||
return
|
||||
|
||||
# avoid iterating over self.relations while removing things by
|
||||
# collecting the list first.
|
||||
|
||||
with self.lock:
|
||||
to_remove = self._list_relations_in_schema(database, schema)
|
||||
self._remove_all(to_remove)
|
||||
# handle a drop_schema race by using discard() over remove()
|
||||
self.schemas.discard(key)
|
||||
|
||||
def update_schemas(self, schemas: Iterable[Tuple[Optional[str], str]]):
|
||||
"""Add multiple schemas to the set of known schemas (case-insensitive)
|
||||
|
||||
:param schemas: An iterable of the schema names to add.
|
||||
"""
|
||||
self.schemas.update((lowercase(d), s.lower()) for (d, s) in schemas)
|
||||
|
||||
def __contains__(self, schema_id: Tuple[Optional[str], str]):
|
||||
"""A schema is 'in' the relations cache if it is in the set of cached
|
||||
schemas.
|
||||
|
||||
:param schema_id: The db name and schema name to look up.
|
||||
"""
|
||||
db, schema = schema_id
|
||||
return (lowercase(db), schema.lower()) in self.schemas
|
||||
|
||||
def dump_graph(self):
|
||||
"""Dump a key-only representation of the schema to a dictionary. Every
|
||||
known relation is a key with a value of a list of keys it is referenced
|
||||
by.
|
||||
"""
|
||||
# we have to hold the lock for the entire dump, if other threads modify
|
||||
# self.relations or any cache entry's referenced_by during iteration
|
||||
# it's a runtime error!
|
||||
with self.lock:
|
||||
return {
|
||||
dot_separated(k): v.dump_graph_entry()
|
||||
for k, v in self.relations.items()
|
||||
}
|
||||
|
||||
def _setdefault(self, relation: _CachedRelation):
|
||||
"""Add a relation to the cache, or return it if it already exists.
|
||||
|
||||
:param _CachedRelation relation: The relation to set or get.
|
||||
:return _CachedRelation: The relation stored under the given relation's
|
||||
key
|
||||
"""
|
||||
self.add_schema(relation.database, relation.schema)
|
||||
key = relation.key()
|
||||
return self.relations.setdefault(key, relation)
|
||||
|
||||
def _add_link(self, referenced_key, dependent_key):
|
||||
"""Add a link between two relations to the database. Both the old and
|
||||
new entries must alraedy exist in the database.
|
||||
|
||||
:param _ReferenceKey referenced_key: The key identifying the referenced
|
||||
model (the one that if dropped will drop the dependent model).
|
||||
:param _ReferenceKey dependent_key: The key identifying the dependent
|
||||
model.
|
||||
:raises InternalError: If either entry does not exist.
|
||||
"""
|
||||
referenced = self.relations.get(referenced_key)
|
||||
if referenced is None:
|
||||
return
|
||||
if referenced is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in add_link, referenced link key {} not in cache!'
|
||||
.format(referenced_key)
|
||||
)
|
||||
|
||||
dependent = self.relations.get(dependent_key)
|
||||
if dependent is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in add_link, dependent link key {} not in cache!'
|
||||
.format(dependent_key)
|
||||
)
|
||||
|
||||
assert dependent is not None # we just raised!
|
||||
|
||||
referenced.add_reference(dependent)
|
||||
|
||||
def add_link(self, referenced, dependent):
|
||||
"""Add a link between two relations to the database. If either relation
|
||||
does not exist, it will be added as an "external" relation.
|
||||
|
||||
The dependent model refers _to_ the referenced model. So, given
|
||||
arguments of (jake_test, bar, jake_test, foo):
|
||||
both values are in the schema jake_test and foo is a view that refers
|
||||
to bar, so "drop bar cascade" will drop foo and all of foo's
|
||||
dependents.
|
||||
|
||||
:param BaseRelation referenced: The referenced model.
|
||||
:param BaseRelation dependent: The dependent model.
|
||||
:raises InternalError: If either entry does not exist.
|
||||
"""
|
||||
ref_key = _make_key(referenced)
|
||||
if (ref_key.database, ref_key.schema) not in self:
|
||||
# if we have not cached the referenced schema at all, we must be
|
||||
# referring to a table outside our control. There's no need to make
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
logger.debug(
|
||||
'{dep!s} references {ref!s} but {ref.database}.{ref.schema} '
|
||||
'is not in the cache, skipping assumed external relation'
|
||||
.format(dep=dependent, ref=ref_key)
|
||||
)
|
||||
return
|
||||
if ref_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
referenced = referenced.replace(
|
||||
type=referenced.External
|
||||
)
|
||||
self.add(referenced)
|
||||
|
||||
dep_key = _make_key(dependent)
|
||||
if dep_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
dependent = dependent.replace(
|
||||
type=referenced.External
|
||||
)
|
||||
self.add(dependent)
|
||||
logger.debug(
|
||||
'adding link, {!s} references {!s}'.format(dep_key, ref_key)
|
||||
)
|
||||
with self.lock:
|
||||
self._add_link(ref_key, dep_key)
|
||||
|
||||
def add(self, relation):
|
||||
"""Add the relation inner to the cache, under the schema schema and
|
||||
identifier identifier
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
cached = _CachedRelation(relation)
|
||||
logger.debug('Adding relation: {!s}'.format(cached))
|
||||
|
||||
lazy_log('before adding: {!s}', self.dump_graph)
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
|
||||
lazy_log('after adding: {!s}', self.dump_graph)
|
||||
|
||||
def _remove_refs(self, keys):
|
||||
"""Removes all references to all entries in keys. This does not
|
||||
cascade!
|
||||
|
||||
:param Iterable[_ReferenceKey] keys: The keys to remove.
|
||||
"""
|
||||
# remove direct refs
|
||||
for key in keys:
|
||||
del self.relations[key]
|
||||
# then remove all entries from each child
|
||||
for cached in self.relations.values():
|
||||
cached.release_references(keys)
|
||||
|
||||
def _drop_cascade_relation(self, dropped):
|
||||
"""Drop the given relation and cascade it appropriately to all
|
||||
dependent relations.
|
||||
|
||||
:param _CachedRelation dropped: An existing _CachedRelation to drop.
|
||||
"""
|
||||
if dropped not in self.relations:
|
||||
logger.debug('dropped a nonexistent relationship: {!s}'
|
||||
.format(dropped))
|
||||
return
|
||||
consequences = self.relations[dropped].collect_consequences()
|
||||
logger.debug(
|
||||
'drop {} is cascading to {}'.format(dropped, consequences)
|
||||
)
|
||||
self._remove_refs(consequences)
|
||||
|
||||
def drop(self, relation):
|
||||
"""Drop the named relation and cascade it appropriately to all
|
||||
dependent relations.
|
||||
|
||||
Because dbt proactively does many `drop relation if exist ... cascade`
|
||||
that are noops, nonexistent relation drops cause a debug log and no
|
||||
other actions.
|
||||
|
||||
:param str schema: The schema of the relation to drop.
|
||||
:param str identifier: The identifier of the relation to drop.
|
||||
"""
|
||||
dropped = _make_key(relation)
|
||||
logger.debug('Dropping relation: {!s}'.format(dropped))
|
||||
with self.lock:
|
||||
self._drop_cascade_relation(dropped)
|
||||
|
||||
def _rename_relation(self, old_key, new_relation):
|
||||
"""Rename a relation named old_key to new_key, updating references.
|
||||
Return whether or not there was a key to rename.
|
||||
|
||||
:param _ReferenceKey old_key: The existing key, to rename from.
|
||||
:param _CachedRelation new_key: The new relation, to rename to.
|
||||
"""
|
||||
# On the database level, a rename updates all values that were
|
||||
# previously referenced by old_name to be referenced by new_name.
|
||||
# basically, the name changes but some underlying ID moves. Kind of
|
||||
# like an object reference!
|
||||
relation = self.relations.pop(old_key)
|
||||
new_key = new_relation.key()
|
||||
|
||||
# relaton has to rename its innards, so it needs the _CachedRelation.
|
||||
relation.rename(new_relation)
|
||||
# update all the relations that refer to it
|
||||
for cached in self.relations.values():
|
||||
if cached.is_referenced_by(old_key):
|
||||
logger.debug(
|
||||
'updated reference from {0} -> {2} to {1} -> {2}'
|
||||
.format(old_key, new_key, cached.key())
|
||||
)
|
||||
cached.rename_key(old_key, new_key)
|
||||
|
||||
self.relations[new_key] = relation
|
||||
# also fixup the schemas!
|
||||
self.add_schema(new_key.database, new_key.schema)
|
||||
|
||||
return True
|
||||
|
||||
def _check_rename_constraints(self, old_key, new_key):
|
||||
"""Check the rename constraints, and return whether or not the rename
|
||||
can proceed.
|
||||
|
||||
If the new key is already present, that is an error.
|
||||
If the old key is absent, we debug log and return False, assuming it's
|
||||
a temp table being renamed.
|
||||
|
||||
:param _ReferenceKey old_key: The existing key, to rename from.
|
||||
:param _ReferenceKey new_key: The new key, to rename to.
|
||||
:return bool: If the old relation exists for renaming.
|
||||
:raises InternalError: If the new key is already present.
|
||||
"""
|
||||
if new_key in self.relations:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in rename, new key {} already in cache: {}'
|
||||
.format(new_key, list(self.relations.keys()))
|
||||
)
|
||||
|
||||
if old_key not in self.relations:
|
||||
logger.debug(
|
||||
'old key {} not found in self.relations, assuming temporary'
|
||||
.format(old_key)
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def rename(self, old, new):
|
||||
"""Rename the old schema/identifier to the new schema/identifier and
|
||||
update references.
|
||||
|
||||
If the new schema/identifier is already present, that is an error.
|
||||
If the schema/identifier key is absent, we only debug log and return,
|
||||
assuming it's a temp table being renamed.
|
||||
|
||||
:param BaseRelation old: The existing relation name information.
|
||||
:param BaseRelation new: The new relation name information.
|
||||
:raises InternalError: If the new key is already present.
|
||||
"""
|
||||
old_key = _make_key(old)
|
||||
new_key = _make_key(new)
|
||||
logger.debug('Renaming relation {!s} to {!s}'.format(
|
||||
old_key, new_key
|
||||
))
|
||||
|
||||
lazy_log('before rename: {!s}', self.dump_graph)
|
||||
|
||||
with self.lock:
|
||||
if self._check_rename_constraints(old_key, new_key):
|
||||
self._rename_relation(old_key, _CachedRelation(new))
|
||||
else:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
lazy_log('after rename: {!s}', self.dump_graph)
|
||||
|
||||
def get_relations(
|
||||
self, database: Optional[str], schema: Optional[str]
|
||||
) -> List[Any]:
|
||||
"""Case-insensitively yield all relations matching the given schema.
|
||||
|
||||
:param str schema: The case-insensitive schema name to list from.
|
||||
:return List[BaseRelation]: The list of relations with the given
|
||||
schema
|
||||
"""
|
||||
database = lowercase(database)
|
||||
schema = lowercase(schema)
|
||||
with self.lock:
|
||||
results = [
|
||||
r.inner for r in self.relations.values()
|
||||
if (lowercase(r.schema) == schema and
|
||||
lowercase(r.database) == database)
|
||||
]
|
||||
|
||||
if None in results:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in get_relations, a None relation was found in the cache!'
|
||||
)
|
||||
return results
|
||||
|
||||
def clear(self):
|
||||
"""Clear the cache"""
|
||||
with self.lock:
|
||||
self.relations.clear()
|
||||
self.schemas.clear()
|
||||
|
||||
def _list_relations_in_schema(
|
||||
self, database: Optional[str], schema: Optional[str]
|
||||
) -> List[_CachedRelation]:
|
||||
"""Get the relations in a schema. Callers should hold the lock."""
|
||||
key = (lowercase(database), lowercase(schema))
|
||||
|
||||
to_remove: List[_CachedRelation] = []
|
||||
for cachekey, relation in self.relations.items():
|
||||
if (cachekey.database, cachekey.schema) == key:
|
||||
to_remove.append(relation)
|
||||
return to_remove
|
||||
|
||||
def _remove_all(self, to_remove: List[_CachedRelation]):
|
||||
"""Remove all the listed relations. Ignore relations that have been
|
||||
cascaded out.
|
||||
"""
|
||||
for relation in to_remove:
|
||||
# it may have been cascaded out already
|
||||
drop_key = _make_key(relation)
|
||||
if drop_key in self.relations:
|
||||
self.drop(drop_key)
|
||||
227
core/dbt/adapters/factory.py
Normal file
227
core/dbt/adapters/factory.py
Normal file
@@ -0,0 +1,227 @@
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
from typing import Type, Dict, Any, List, Optional, Set
|
||||
|
||||
from dbt.exceptions import RuntimeException, InternalException
|
||||
from dbt.include.global_project import (
|
||||
PACKAGE_PATH as GLOBAL_PROJECT_PATH,
|
||||
PROJECT_NAME as GLOBAL_PROJECT_NAME,
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.contracts.connection import Credentials, AdapterRequiredConfig
|
||||
|
||||
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterProtocol,
|
||||
AdapterConfig,
|
||||
RelationProtocol,
|
||||
)
|
||||
from dbt.adapters.base.plugin import AdapterPlugin
|
||||
|
||||
|
||||
Adapter = AdapterProtocol
|
||||
|
||||
|
||||
class AdapterContainer:
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.adapters: Dict[str, Adapter] = {}
|
||||
self.plugins: Dict[str, AdapterPlugin] = {}
|
||||
# map package names to their include paths
|
||||
self.packages: Dict[str, Path] = {
|
||||
GLOBAL_PROJECT_NAME: Path(GLOBAL_PROJECT_PATH),
|
||||
}
|
||||
|
||||
def get_plugin_by_name(self, name: str) -> AdapterPlugin:
|
||||
with self.lock:
|
||||
if name in self.plugins:
|
||||
return self.plugins[name]
|
||||
names = ", ".join(self.plugins.keys())
|
||||
|
||||
message = f"Invalid adapter type {name}! Must be one of {names}"
|
||||
raise RuntimeException(message)
|
||||
|
||||
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
||||
plugin = self.get_plugin_by_name(name)
|
||||
return plugin.adapter
|
||||
|
||||
def get_relation_class_by_name(self, name: str) -> Type[RelationProtocol]:
|
||||
adapter = self.get_adapter_class_by_name(name)
|
||||
return adapter.Relation
|
||||
|
||||
def get_config_class_by_name(
|
||||
self, name: str
|
||||
) -> Type[AdapterConfig]:
|
||||
adapter = self.get_adapter_class_by_name(name)
|
||||
return adapter.AdapterSpecificConfigs
|
||||
|
||||
def load_plugin(self, name: str) -> Type[Credentials]:
|
||||
# this doesn't need a lock: in the worst case we'll overwrite packages
|
||||
# and adapter_type entries with the same value, as they're all
|
||||
# singletons
|
||||
try:
|
||||
# mypy doesn't think modules have any attributes.
|
||||
mod: Any = import_module('.' + name, 'dbt.adapters')
|
||||
except ModuleNotFoundError as exc:
|
||||
# if we failed to import the target module in particular, inform
|
||||
# the user about it via a runtime error
|
||||
if exc.name == 'dbt.adapters.' + name:
|
||||
raise RuntimeException(f'Could not find adapter type {name}!')
|
||||
logger.info(f'Error importing adapter: {exc}')
|
||||
# otherwise, the error had to have come from some underlying
|
||||
# library. Log the stack trace.
|
||||
logger.debug('', exc_info=True)
|
||||
raise
|
||||
plugin: AdapterPlugin = mod.Plugin
|
||||
plugin_type = plugin.adapter.type()
|
||||
|
||||
if plugin_type != name:
|
||||
raise RuntimeException(
|
||||
f'Expected to find adapter with type named {name}, got '
|
||||
f'adapter with type {plugin_type}'
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
# things do hold the lock to iterate over it so we need it to add
|
||||
self.plugins[name] = plugin
|
||||
|
||||
self.packages[plugin.project_name] = Path(plugin.include_path)
|
||||
|
||||
for dep in plugin.dependencies:
|
||||
self.load_plugin(dep)
|
||||
|
||||
return plugin.credentials
|
||||
|
||||
def register_adapter(self, config: AdapterRequiredConfig) -> None:
|
||||
adapter_name = config.credentials.type
|
||||
adapter_type = self.get_adapter_class_by_name(adapter_name)
|
||||
|
||||
with self.lock:
|
||||
if adapter_name in self.adapters:
|
||||
# this shouldn't really happen...
|
||||
return
|
||||
|
||||
adapter: Adapter = adapter_type(config) # type: ignore
|
||||
self.adapters[adapter_name] = adapter
|
||||
|
||||
def lookup_adapter(self, adapter_name: str) -> Adapter:
|
||||
return self.adapters[adapter_name]
|
||||
|
||||
def reset_adapters(self):
|
||||
"""Clear the adapters. This is useful for tests, which change configs.
|
||||
"""
|
||||
with self.lock:
|
||||
for adapter in self.adapters.values():
|
||||
adapter.cleanup_connections()
|
||||
self.adapters.clear()
|
||||
|
||||
def cleanup_connections(self):
|
||||
"""Only clean up the adapter connections list without resetting the
|
||||
actual adapters.
|
||||
"""
|
||||
with self.lock:
|
||||
for adapter in self.adapters.values():
|
||||
adapter.cleanup_connections()
|
||||
|
||||
def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]:
|
||||
"""Iterate over the known adapter plugins. If a name is provided,
|
||||
iterate in dependency order over the named plugin and its dependencies.
|
||||
"""
|
||||
if name is None:
|
||||
return list(self.plugins.values())
|
||||
|
||||
plugins: List[AdapterPlugin] = []
|
||||
seen: Set[str] = set()
|
||||
plugin_names: List[str] = [name]
|
||||
while plugin_names:
|
||||
plugin_name = plugin_names[0]
|
||||
plugin_names = plugin_names[1:]
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise InternalException(
|
||||
f'No plugin found for {plugin_name}'
|
||||
) from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
if plugin.dependencies is None:
|
||||
continue
|
||||
for dep in plugin.dependencies:
|
||||
if dep not in seen:
|
||||
plugin_names.append(dep)
|
||||
return plugins
|
||||
|
||||
def get_adapter_package_names(self, name: Optional[str]) -> List[str]:
|
||||
package_names: List[str] = [
|
||||
p.project_name for p in self.get_adapter_plugins(name)
|
||||
]
|
||||
package_names.append(GLOBAL_PROJECT_NAME)
|
||||
return package_names
|
||||
|
||||
def get_include_paths(self, name: Optional[str]) -> List[Path]:
|
||||
paths = []
|
||||
for package_name in self.get_adapter_package_names(name):
|
||||
try:
|
||||
path = self.packages[package_name]
|
||||
except KeyError:
|
||||
raise InternalException(
|
||||
f'No internal package listing found for {package_name}'
|
||||
)
|
||||
paths.append(path)
|
||||
return paths
|
||||
|
||||
def get_adapter_type_names(self, name: Optional[str]) -> List[str]:
|
||||
return [p.adapter.type() for p in self.get_adapter_plugins(name)]
|
||||
|
||||
|
||||
FACTORY: AdapterContainer = AdapterContainer()
|
||||
|
||||
|
||||
def register_adapter(config: AdapterRequiredConfig) -> None:
|
||||
FACTORY.register_adapter(config)
|
||||
|
||||
|
||||
def get_adapter(config: AdapterRequiredConfig):
|
||||
return FACTORY.lookup_adapter(config.credentials.type)
|
||||
|
||||
|
||||
def reset_adapters():
|
||||
"""Clear the adapters. This is useful for tests, which change configs.
|
||||
"""
|
||||
FACTORY.reset_adapters()
|
||||
|
||||
|
||||
def cleanup_connections():
|
||||
"""Only clean up the adapter connections list without resetting the actual
|
||||
adapters.
|
||||
"""
|
||||
FACTORY.cleanup_connections()
|
||||
|
||||
|
||||
def get_adapter_class_by_name(name: str) -> Type[AdapterProtocol]:
|
||||
return FACTORY.get_adapter_class_by_name(name)
|
||||
|
||||
|
||||
def get_config_class_by_name(name: str) -> Type[AdapterConfig]:
|
||||
return FACTORY.get_config_class_by_name(name)
|
||||
|
||||
|
||||
def get_relation_class_by_name(name: str) -> Type[RelationProtocol]:
|
||||
return FACTORY.get_relation_class_by_name(name)
|
||||
|
||||
|
||||
def load_plugin(name: str) -> Type[Credentials]:
|
||||
return FACTORY.load_plugin(name)
|
||||
|
||||
|
||||
def get_include_paths(name: Optional[str]) -> List[Path]:
|
||||
return FACTORY.get_include_paths(name)
|
||||
|
||||
|
||||
def get_adapter_package_names(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_package_names(name)
|
||||
|
||||
|
||||
def get_adapter_type_names(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_type_names(name)
|
||||
163
core/dbt/adapters/protocol.py
Normal file
163
core/dbt/adapters/protocol.py
Normal file
@@ -0,0 +1,163 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Type, Hashable, Optional, ContextManager, List, Generic, TypeVar, ClassVar,
|
||||
Tuple, Union, Dict, Any
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import (
|
||||
Connection, AdapterRequiredConfig, AdapterResponse
|
||||
)
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledNode, ManifestNode, NonSourceCompiledNode
|
||||
)
|
||||
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.relation import Policy, HasQuoting
|
||||
|
||||
from dbt.graph import Graph
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterConfig(BaseConfig):
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionManagerProtocol(Protocol):
|
||||
TYPE: str
|
||||
|
||||
|
||||
class ColumnProtocol(Protocol):
|
||||
pass
|
||||
|
||||
|
||||
Self = TypeVar('Self', bound='RelationProtocol')
|
||||
|
||||
|
||||
class RelationProtocol(Protocol):
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
) -> Self:
|
||||
...
|
||||
|
||||
|
||||
class CompilerProtocol(Protocol):
|
||||
def compile(self, manifest: Manifest, write=True) -> Graph:
|
||||
...
|
||||
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> NonSourceCompiledNode:
|
||||
...
|
||||
|
||||
|
||||
AdapterConfig_T = TypeVar(
|
||||
'AdapterConfig_T', bound=AdapterConfig
|
||||
)
|
||||
ConnectionManager_T = TypeVar(
|
||||
'ConnectionManager_T', bound=ConnectionManagerProtocol
|
||||
)
|
||||
Relation_T = TypeVar(
|
||||
'Relation_T', bound=RelationProtocol
|
||||
)
|
||||
Column_T = TypeVar(
|
||||
'Column_T', bound=ColumnProtocol
|
||||
)
|
||||
Compiler_T = TypeVar('Compiler_T', bound=CompilerProtocol)
|
||||
|
||||
|
||||
class AdapterProtocol(
|
||||
Protocol,
|
||||
Generic[
|
||||
AdapterConfig_T,
|
||||
ConnectionManager_T,
|
||||
Relation_T,
|
||||
Column_T,
|
||||
Compiler_T,
|
||||
]
|
||||
):
|
||||
AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]]
|
||||
Column: ClassVar[Type[Column_T]]
|
||||
Relation: ClassVar[Type[Relation_T]]
|
||||
ConnectionManager: ClassVar[Type[ConnectionManager_T]]
|
||||
connections: ConnectionManager_T
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def type(cls) -> str:
|
||||
pass
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def get_thread_identifier() -> Hashable:
|
||||
...
|
||||
|
||||
def get_thread_connection(self) -> Connection:
|
||||
...
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
...
|
||||
|
||||
def get_if_exists(self) -> Optional[Connection]:
|
||||
...
|
||||
|
||||
def clear_thread_connection(self) -> None:
|
||||
...
|
||||
|
||||
def clear_transaction(self) -> None:
|
||||
...
|
||||
|
||||
def exception_handler(self, sql: str) -> ContextManager:
|
||||
...
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
...
|
||||
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
...
|
||||
|
||||
def open(cls, connection: Connection) -> Connection:
|
||||
...
|
||||
|
||||
def release(self) -> None:
|
||||
...
|
||||
|
||||
def cleanup_all(self) -> None:
|
||||
...
|
||||
|
||||
def begin(self) -> None:
|
||||
...
|
||||
|
||||
def commit(self) -> None:
|
||||
...
|
||||
|
||||
def close(cls, connection: Connection) -> Connection:
|
||||
...
|
||||
|
||||
def commit_if_has_connection(self) -> None:
|
||||
...
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
...
|
||||
|
||||
def get_compiler(self) -> Compiler_T:
|
||||
...
|
||||
3
core/dbt/adapters/sql/__init__.py
Normal file
3
core/dbt/adapters/sql/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
from dbt.adapters.sql.connections import SQLConnectionManager # noqa
|
||||
from dbt.adapters.sql.impl import SQLAdapter # noqa
|
||||
175
core/dbt/adapters/sql/connections.py
Normal file
175
core/dbt/adapters/sql/connections.py
Normal file
@@ -0,0 +1,175 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.clients.agate_helper
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import (
|
||||
Connection, ConnectionState, AdapterResponse
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import flags
|
||||
|
||||
|
||||
class SQLConnectionManager(BaseConnectionManager):
|
||||
"""The default connection manager with some common SQL methods implemented.
|
||||
|
||||
Methods to implement:
|
||||
- exception_handler
|
||||
- cancel
|
||||
- get_response
|
||||
- open
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def cancel(self, connection: Connection):
|
||||
"""Cancel the given connection."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`cancel` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
def cancel_open(self) -> List[str]:
|
||||
names = []
|
||||
this_connection = self.get_if_exists()
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection is this_connection:
|
||||
continue
|
||||
|
||||
# if the connection failed, the handle will be None so we have
|
||||
# nothing to cancel.
|
||||
if (
|
||||
connection.handle is not None and
|
||||
connection.state == ConnectionState.OPEN
|
||||
):
|
||||
self.cancel(connection)
|
||||
if connection.name is not None:
|
||||
names.append(connection.name)
|
||||
return names
|
||||
|
||||
def add_query(
|
||||
self,
|
||||
sql: str,
|
||||
auto_begin: bool = True,
|
||||
bindings: Optional[Any] = None,
|
||||
abridge_sql_log: bool = False
|
||||
) -> Tuple[Connection, Any]:
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
|
||||
logger.debug('Using {} connection "{}".'
|
||||
.format(self.TYPE, connection.name))
|
||||
|
||||
with self.exception_handler(sql):
|
||||
if abridge_sql_log:
|
||||
log_sql = '{}...'.format(sql[:512])
|
||||
else:
|
||||
log_sql = sql
|
||||
|
||||
logger.debug(
|
||||
'On {connection_name}: {sql}',
|
||||
connection_name=connection.name,
|
||||
sql=log_sql,
|
||||
)
|
||||
pre = time.time()
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
cursor.execute(sql, bindings)
|
||||
logger.debug(
|
||||
"SQL status: {status} in {elapsed:0.2f} seconds",
|
||||
status=self.get_response(cursor),
|
||||
elapsed=(time.time() - pre)
|
||||
)
|
||||
|
||||
return connection, cursor
|
||||
|
||||
@abc.abstractclassmethod
|
||||
def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`get_response` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def process_results(
|
||||
cls,
|
||||
column_names: Iterable[str],
|
||||
rows: Iterable[Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
|
||||
return [dict(zip(column_names, row)) for row in rows]
|
||||
|
||||
@classmethod
|
||||
def get_result_from_cursor(cls, cursor: Any) -> agate.Table:
|
||||
data: List[Any] = []
|
||||
column_names: List[str] = []
|
||||
|
||||
if cursor.description is not None:
|
||||
column_names = [col[0] for col in cursor.description]
|
||||
rows = cursor.fetchall()
|
||||
data = cls.process_results(column_names, rows)
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(
|
||||
data,
|
||||
column_names
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[Union[AdapterResponse, str], agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql, auto_begin)
|
||||
response = self.get_response(cursor)
|
||||
if fetch:
|
||||
table = self.get_result_from_cursor(cursor)
|
||||
else:
|
||||
table = dbt.clients.agate_helper.empty_table()
|
||||
return response, table
|
||||
|
||||
def add_begin_query(self):
|
||||
return self.add_query('BEGIN', auto_begin=False)
|
||||
|
||||
def add_commit_query(self):
|
||||
return self.add_query('COMMIT', auto_begin=False)
|
||||
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In begin, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
'it already had one open!'.format(connection.name))
|
||||
|
||||
self.add_begin_query()
|
||||
|
||||
connection.transaction_open = True
|
||||
return connection
|
||||
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In commit, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
'it does not have one open!'.format(connection.name))
|
||||
|
||||
logger.debug('On {}: COMMIT'.format(connection.name))
|
||||
self.add_commit_query()
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
return connection
|
||||
250
core/dbt/adapters/sql/impl.py
Normal file
250
core/dbt/adapters/sql/impl.py
Normal file
@@ -0,0 +1,250 @@
|
||||
import agate
|
||||
from typing import Any, Optional, Tuple, Type, List
|
||||
|
||||
import dbt.clients.agate_helper
|
||||
from dbt.contracts.connection import Connection
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
|
||||
from dbt.adapters.base.relation import BaseRelation
|
||||
|
||||
LIST_RELATIONS_MACRO_NAME = 'list_relations_without_caching'
|
||||
GET_COLUMNS_IN_RELATION_MACRO_NAME = 'get_columns_in_relation'
|
||||
LIST_SCHEMAS_MACRO_NAME = 'list_schemas'
|
||||
CHECK_SCHEMA_EXISTS_MACRO_NAME = 'check_schema_exists'
|
||||
CREATE_SCHEMA_MACRO_NAME = 'create_schema'
|
||||
DROP_SCHEMA_MACRO_NAME = 'drop_schema'
|
||||
RENAME_RELATION_MACRO_NAME = 'rename_relation'
|
||||
TRUNCATE_RELATION_MACRO_NAME = 'truncate_relation'
|
||||
DROP_RELATION_MACRO_NAME = 'drop_relation'
|
||||
ALTER_COLUMN_TYPE_MACRO_NAME = 'alter_column_type'
|
||||
|
||||
|
||||
class SQLAdapter(BaseAdapter):
|
||||
"""The default adapter with the common agate conversions and some SQL
|
||||
methods implemented. This adapter has a different much shorter list of
|
||||
methods to implement, but some more macros that must be implemented.
|
||||
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}". in the
|
||||
adapter's internal project.
|
||||
|
||||
Methods to implement:
|
||||
- date_function
|
||||
|
||||
Macros to implement:
|
||||
- get_catalog
|
||||
- list_relations_without_caching
|
||||
- get_columns_in_relation
|
||||
"""
|
||||
|
||||
ConnectionManager: Type[SQLConnectionManager]
|
||||
connections: SQLConnectionManager
|
||||
|
||||
@available.parse(lambda *a, **k: (None, None))
|
||||
def add_query(
|
||||
self,
|
||||
sql: str,
|
||||
auto_begin: bool = True,
|
||||
bindings: Optional[Any] = None,
|
||||
abridge_sql_log: bool = False,
|
||||
) -> Tuple[Connection, Any]:
|
||||
"""Add a query to the current transaction. A thin wrapper around
|
||||
ConnectionManager.add_query.
|
||||
|
||||
:param sql: The SQL query to add
|
||||
:param auto_begin: If set and there is no transaction in progress,
|
||||
begin a new one.
|
||||
:param bindings: An optional list of bindings for the query.
|
||||
:param abridge_sql_log: If set, limit the raw sql logged to 512
|
||||
characters
|
||||
"""
|
||||
return self.connections.add_query(sql, auto_begin, bindings,
|
||||
abridge_sql_log)
|
||||
|
||||
@classmethod
|
||||
def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "text"
|
||||
|
||||
@classmethod
|
||||
def convert_number_type(
|
||||
cls, agate_table: agate.Table, col_idx: int
|
||||
) -> str:
|
||||
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx))
|
||||
return "float8" if decimals else "integer"
|
||||
|
||||
@classmethod
|
||||
def convert_boolean_type(
|
||||
cls, agate_table: agate.Table, col_idx: int
|
||||
) -> str:
|
||||
return "boolean"
|
||||
|
||||
@classmethod
|
||||
def convert_datetime_type(
|
||||
cls, agate_table: agate.Table, col_idx: int
|
||||
) -> str:
|
||||
return "timestamp without time zone"
|
||||
|
||||
@classmethod
|
||||
def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "date"
|
||||
|
||||
@classmethod
|
||||
def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "time"
|
||||
|
||||
@classmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
return True
|
||||
|
||||
def expand_column_types(self, goal, current):
|
||||
reference_columns = {
|
||||
c.name: c for c in
|
||||
self.get_columns_in_relation(goal)
|
||||
}
|
||||
|
||||
target_columns = {
|
||||
c.name: c for c
|
||||
in self.get_columns_in_relation(current)
|
||||
}
|
||||
|
||||
for column_name, reference_column in reference_columns.items():
|
||||
target_column = target_columns.get(column_name)
|
||||
|
||||
if target_column is not None and \
|
||||
target_column.can_expand_to(reference_column):
|
||||
col_string_size = reference_column.string_size()
|
||||
new_type = self.Column.string_type(col_string_size)
|
||||
logger.debug("Changing col type from {} to {} in table {}",
|
||||
target_column.data_type, new_type, current)
|
||||
|
||||
self.alter_column_type(current, column_name, new_type)
|
||||
|
||||
def alter_column_type(
|
||||
self, relation, column_name, new_column_type
|
||||
) -> None:
|
||||
"""
|
||||
1. Create a new column (w/ temp name and correct type)
|
||||
2. Copy data over to it
|
||||
3. Drop the existing column (cascade!)
|
||||
4. Rename the new column to existing column
|
||||
"""
|
||||
kwargs = {
|
||||
'relation': relation,
|
||||
'column_name': column_name,
|
||||
'new_column_type': new_column_type,
|
||||
}
|
||||
self.execute_macro(
|
||||
ALTER_COLUMN_TYPE_MACRO_NAME,
|
||||
kwargs=kwargs
|
||||
)
|
||||
|
||||
def drop_relation(self, relation):
|
||||
if relation.type is None:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
'Tried to drop relation {}, but its type is null.'
|
||||
.format(relation))
|
||||
|
||||
self.cache_dropped(relation)
|
||||
self.execute_macro(
|
||||
DROP_RELATION_MACRO_NAME,
|
||||
kwargs={'relation': relation}
|
||||
)
|
||||
|
||||
def truncate_relation(self, relation):
|
||||
self.execute_macro(
|
||||
TRUNCATE_RELATION_MACRO_NAME,
|
||||
kwargs={'relation': relation}
|
||||
)
|
||||
|
||||
def rename_relation(self, from_relation, to_relation):
|
||||
self.cache_renamed(from_relation, to_relation)
|
||||
|
||||
kwargs = {'from_relation': from_relation, 'to_relation': to_relation}
|
||||
self.execute_macro(
|
||||
RENAME_RELATION_MACRO_NAME,
|
||||
kwargs=kwargs
|
||||
)
|
||||
|
||||
def get_columns_in_relation(self, relation):
|
||||
return self.execute_macro(
|
||||
GET_COLUMNS_IN_RELATION_MACRO_NAME,
|
||||
kwargs={'relation': relation}
|
||||
)
|
||||
|
||||
def create_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
logger.debug('Creating schema "{}"', relation)
|
||||
kwargs = {
|
||||
'relation': relation,
|
||||
}
|
||||
self.execute_macro(CREATE_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||
self.commit_if_has_connection()
|
||||
# we can't update the cache here, as if the schema already existed we
|
||||
# don't want to (incorrectly) say that it's empty
|
||||
|
||||
def drop_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
logger.debug('Dropping schema "{}".', relation)
|
||||
kwargs = {
|
||||
'relation': relation,
|
||||
}
|
||||
self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||
# we can update the cache here
|
||||
self.cache.drop_schema(relation.database, relation.schema)
|
||||
|
||||
def list_relations_without_caching(
|
||||
self, schema_relation: BaseRelation,
|
||||
) -> List[BaseRelation]:
|
||||
kwargs = {'schema_relation': schema_relation}
|
||||
results = self.execute_macro(
|
||||
LIST_RELATIONS_MACRO_NAME,
|
||||
kwargs=kwargs
|
||||
)
|
||||
|
||||
relations = []
|
||||
quote_policy = {
|
||||
'database': True,
|
||||
'schema': True,
|
||||
'identifier': True
|
||||
}
|
||||
for _database, name, _schema, _type in results:
|
||||
try:
|
||||
_type = self.Relation.get_relation_type(_type)
|
||||
except ValueError:
|
||||
_type = self.Relation.External
|
||||
relations.append(self.Relation.create(
|
||||
database=_database,
|
||||
schema=_schema,
|
||||
identifier=name,
|
||||
quote_policy=quote_policy,
|
||||
type=_type
|
||||
))
|
||||
return relations
|
||||
|
||||
def quote(self, identifier):
|
||||
return '"{}"'.format(identifier)
|
||||
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
results = self.execute_macro(
|
||||
LIST_SCHEMAS_MACRO_NAME,
|
||||
kwargs={'database': database}
|
||||
)
|
||||
|
||||
return [row[0] for row in results]
|
||||
|
||||
def check_schema_exists(self, database: str, schema: str) -> bool:
|
||||
information_schema = self.Relation.create(
|
||||
database=database,
|
||||
schema=schema,
|
||||
identifier='INFORMATION_SCHEMA',
|
||||
quote_policy=self.config.quoting
|
||||
).information_schema()
|
||||
|
||||
kwargs = {'information_schema': information_schema, 'schema': schema}
|
||||
results = self.execute_macro(
|
||||
CHECK_SCHEMA_EXISTS_MACRO_NAME,
|
||||
kwargs=kwargs
|
||||
)
|
||||
return results[0][0] > 0
|
||||
393
core/dbt/clients/_jinja_blocks.py
Normal file
393
core/dbt/clients/_jinja_blocks.py
Normal file
@@ -0,0 +1,393 @@
|
||||
import re
|
||||
from collections import namedtuple
|
||||
|
||||
import dbt.exceptions
|
||||
|
||||
|
||||
def regex(pat):
|
||||
return re.compile(pat, re.DOTALL | re.MULTILINE)
|
||||
|
||||
|
||||
class BlockData:
|
||||
"""raw plaintext data from the top level of the file."""
|
||||
def __init__(self, contents):
|
||||
self.block_type_name = '__dbt__data'
|
||||
self.contents = contents
|
||||
self.full_block = contents
|
||||
|
||||
|
||||
class BlockTag:
|
||||
def __init__(self, block_type_name, block_name, contents=None,
|
||||
full_block=None, **kw):
|
||||
self.block_type_name = block_type_name
|
||||
self.block_name = block_name
|
||||
self.contents = contents
|
||||
self.full_block = full_block
|
||||
|
||||
def __str__(self):
|
||||
return 'BlockTag({!r}, {!r})'.format(self.block_type_name,
|
||||
self.block_name)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
@property
|
||||
def end_block_type_name(self):
|
||||
return 'end{}'.format(self.block_type_name)
|
||||
|
||||
def end_pat(self):
|
||||
# we don't want to use string formatting here because jinja uses most
|
||||
# of the string formatting operators in its syntax...
|
||||
pattern = ''.join((
|
||||
r'(?P<endblock>((?:\s*\{\%\-|\{\%)\s*',
|
||||
self.end_block_type_name,
|
||||
r'\s*(?:\-\%\}\s*|\%\})))',
|
||||
))
|
||||
return regex(pattern)
|
||||
|
||||
|
||||
Tag = namedtuple('Tag', 'block_type_name block_name start end')
|
||||
|
||||
|
||||
_NAME_PATTERN = r'[A-Za-z_][A-Za-z_0-9]*'
|
||||
|
||||
COMMENT_START_PATTERN = regex(r'(?:(?P<comment_start>(\s*\{\#)))')
|
||||
COMMENT_END_PATTERN = regex(r'(.*?)(\s*\#\})')
|
||||
RAW_START_PATTERN = regex(
|
||||
r'(?:\s*\{\%\-|\{\%)\s*(?P<raw_start>(raw))\s*(?:\-\%\}\s*|\%\})'
|
||||
)
|
||||
EXPR_START_PATTERN = regex(r'(?P<expr_start>(\{\{\s*))')
|
||||
EXPR_END_PATTERN = regex(r'(?P<expr_end>(\s*\}\}))')
|
||||
|
||||
BLOCK_START_PATTERN = regex(''.join((
|
||||
r'(?:\s*\{\%\-|\{\%)\s*',
|
||||
r'(?P<block_type_name>({}))'.format(_NAME_PATTERN),
|
||||
# some blocks have a 'block name'.
|
||||
r'(?:\s+(?P<block_name>({})))?'.format(_NAME_PATTERN),
|
||||
)))
|
||||
|
||||
|
||||
RAW_BLOCK_PATTERN = regex(''.join((
|
||||
r'(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})',
|
||||
r'(?:.*?)',
|
||||
r'(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})',
|
||||
)))
|
||||
|
||||
TAG_CLOSE_PATTERN = regex(r'(?:(?P<tag_close>(\-\%\}\s*|\%\})))')
|
||||
|
||||
# stolen from jinja's lexer. Note that we've consumed all prefix whitespace by
|
||||
# the time we want to use this.
|
||||
STRING_PATTERN = regex(
|
||||
r"(?P<string>('([^'\\]*(?:\\.[^'\\]*)*)'|"
|
||||
r'"([^"\\]*(?:\\.[^"\\]*)*)"))'
|
||||
)
|
||||
|
||||
QUOTE_START_PATTERN = regex(r'''(?P<quote>(['"]))''')
|
||||
|
||||
|
||||
class TagIterator:
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
self.blocks = []
|
||||
self._parenthesis_stack = []
|
||||
self.pos = 0
|
||||
|
||||
def linepos(self, end=None) -> str:
|
||||
"""Given an absolute position in the input data, return a pair of
|
||||
line number + relative position to the start of the line.
|
||||
"""
|
||||
end_val: int = self.pos if end is None else end
|
||||
data = self.data[:end_val]
|
||||
# if not found, rfind returns -1, and -1+1=0, which is perfect!
|
||||
last_line_start = data.rfind('\n') + 1
|
||||
# it's easy to forget this, but line numbers are 1-indexed
|
||||
line_number = data.count('\n') + 1
|
||||
return f'{line_number}:{end_val - last_line_start}'
|
||||
|
||||
def advance(self, new_position):
|
||||
self.pos = new_position
|
||||
|
||||
def rewind(self, amount=1):
|
||||
self.pos -= amount
|
||||
|
||||
def _search(self, pattern):
|
||||
return pattern.search(self.data, self.pos)
|
||||
|
||||
def _match(self, pattern):
|
||||
return pattern.match(self.data, self.pos)
|
||||
|
||||
def _first_match(self, *patterns, **kwargs):
|
||||
matches = []
|
||||
for pattern in patterns:
|
||||
# default to 'search', but sometimes we want to 'match'.
|
||||
if kwargs.get('method', 'search') == 'search':
|
||||
match = self._search(pattern)
|
||||
else:
|
||||
match = self._match(pattern)
|
||||
if match:
|
||||
matches.append(match)
|
||||
if not matches:
|
||||
return None
|
||||
# if there are multiple matches, pick the least greedy match
|
||||
# TODO: do I need to account for m.start(), or is this ok?
|
||||
return min(matches, key=lambda m: m.end())
|
||||
|
||||
def _expect_match(self, expected_name, *patterns, **kwargs):
|
||||
match = self._first_match(*patterns, **kwargs)
|
||||
if match is None:
|
||||
msg = 'unexpected EOF, expected {}, got "{}"'.format(
|
||||
expected_name, self.data[self.pos:]
|
||||
)
|
||||
dbt.exceptions.raise_compiler_error(msg)
|
||||
return match
|
||||
|
||||
def handle_expr(self, match):
|
||||
"""Handle an expression. At this point we're at a string like:
|
||||
{{ 1 + 2 }}
|
||||
^ right here
|
||||
|
||||
And the match contains "{{ "
|
||||
|
||||
We expect to find a `}}`, but we might find one in a string before
|
||||
that. Imagine the case of `{{ 2 * "}}" }}`...
|
||||
|
||||
You're not allowed to have blocks or comments inside an expr so it is
|
||||
pretty straightforward, I hope: only strings can get in the way.
|
||||
"""
|
||||
self.advance(match.end())
|
||||
while True:
|
||||
match = self._expect_match('}}',
|
||||
EXPR_END_PATTERN,
|
||||
QUOTE_START_PATTERN)
|
||||
if match.groupdict().get('expr_end') is not None:
|
||||
break
|
||||
else:
|
||||
# it's a quote. we haven't advanced for this match yet, so
|
||||
# just slurp up the whole string, no need to rewind.
|
||||
match = self._expect_match('string', STRING_PATTERN)
|
||||
self.advance(match.end())
|
||||
|
||||
self.advance(match.end())
|
||||
|
||||
def handle_comment(self, match):
|
||||
self.advance(match.end())
|
||||
match = self._expect_match('#}', COMMENT_END_PATTERN)
|
||||
self.advance(match.end())
|
||||
|
||||
def _expect_block_close(self):
|
||||
"""Search for the tag close marker.
|
||||
To the right of the type name, there are a few possiblities:
|
||||
- a name (handled by the regex's 'block_name')
|
||||
- any number of: `=`, `(`, `)`, strings, etc (arguments)
|
||||
- nothing
|
||||
|
||||
followed eventually by a %}
|
||||
|
||||
So the only characters we actually have to worry about in this context
|
||||
are quote and `%}` - nothing else can hide the %} and be valid jinja.
|
||||
"""
|
||||
while True:
|
||||
end_match = self._expect_match(
|
||||
'tag close ("%}")',
|
||||
QUOTE_START_PATTERN,
|
||||
TAG_CLOSE_PATTERN
|
||||
)
|
||||
self.advance(end_match.end())
|
||||
if end_match.groupdict().get('tag_close') is not None:
|
||||
return
|
||||
# must be a string. Rewind to its start and advance past it.
|
||||
self.rewind()
|
||||
string_match = self._expect_match('string', STRING_PATTERN)
|
||||
self.advance(string_match.end())
|
||||
|
||||
def handle_raw(self):
|
||||
# raw blocks are super special, they are a single complete regex
|
||||
match = self._expect_match('{% raw %}...{% endraw %}',
|
||||
RAW_BLOCK_PATTERN)
|
||||
self.advance(match.end())
|
||||
return match.end()
|
||||
|
||||
def handle_tag(self, match):
|
||||
"""The tag could be one of a few things:
|
||||
|
||||
{% mytag %}
|
||||
{% mytag x = y %}
|
||||
{% mytag x = "y" %}
|
||||
{% mytag x.y() %}
|
||||
{% mytag foo("a", "b", c="d") %}
|
||||
|
||||
But the key here is that it's always going to be `{% mytag`!
|
||||
"""
|
||||
groups = match.groupdict()
|
||||
# always a value
|
||||
block_type_name = groups['block_type_name']
|
||||
# might be None
|
||||
block_name = groups.get('block_name')
|
||||
start_pos = self.pos
|
||||
if block_type_name == 'raw':
|
||||
match = self._expect_match('{% raw %}...{% endraw %}',
|
||||
RAW_BLOCK_PATTERN)
|
||||
self.advance(match.end())
|
||||
else:
|
||||
self.advance(match.end())
|
||||
self._expect_block_close()
|
||||
return Tag(
|
||||
block_type_name=block_type_name,
|
||||
block_name=block_name,
|
||||
start=start_pos,
|
||||
end=self.pos
|
||||
)
|
||||
|
||||
def find_tags(self):
|
||||
while True:
|
||||
match = self._first_match(
|
||||
BLOCK_START_PATTERN,
|
||||
COMMENT_START_PATTERN,
|
||||
EXPR_START_PATTERN
|
||||
)
|
||||
if match is None:
|
||||
break
|
||||
|
||||
self.advance(match.start())
|
||||
# start = self.pos
|
||||
|
||||
groups = match.groupdict()
|
||||
comment_start = groups.get('comment_start')
|
||||
expr_start = groups.get('expr_start')
|
||||
block_type_name = groups.get('block_type_name')
|
||||
|
||||
if comment_start is not None:
|
||||
self.handle_comment(match)
|
||||
elif expr_start is not None:
|
||||
self.handle_expr(match)
|
||||
elif block_type_name is not None:
|
||||
yield self.handle_tag(match)
|
||||
else:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Invalid regex match in next_block, expected block start, '
|
||||
'expr start, or comment start'
|
||||
)
|
||||
|
||||
def __iter__(self):
|
||||
return self.find_tags()
|
||||
|
||||
|
||||
duplicate_tags = (
|
||||
'Got nested tags: {outer.block_type_name} (started at {outer.start}) did '
|
||||
'not have a matching {{% end{outer.block_type_name} %}} before a '
|
||||
'subsequent {inner.block_type_name} was found (started at {inner.start})'
|
||||
)
|
||||
|
||||
|
||||
_CONTROL_FLOW_TAGS = {
|
||||
'if': 'endif',
|
||||
'for': 'endfor',
|
||||
}
|
||||
|
||||
_CONTROL_FLOW_END_TAGS = {
|
||||
v: k
|
||||
for k, v in _CONTROL_FLOW_TAGS.items()
|
||||
}
|
||||
|
||||
|
||||
class BlockIterator:
|
||||
def __init__(self, data):
|
||||
self.tag_parser = TagIterator(data)
|
||||
self.current = None
|
||||
self.stack = []
|
||||
self.last_position = 0
|
||||
|
||||
@property
|
||||
def current_end(self):
|
||||
if self.current is None:
|
||||
return 0
|
||||
else:
|
||||
return self.current.end
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self.tag_parser.data
|
||||
|
||||
def is_current_end(self, tag):
|
||||
return (
|
||||
tag.block_type_name.startswith('end') and
|
||||
self.current is not None and
|
||||
tag.block_type_name[3:] == self.current.block_type_name
|
||||
)
|
||||
|
||||
def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
|
||||
"""Find all top-level blocks in the data."""
|
||||
if allowed_blocks is None:
|
||||
allowed_blocks = {'snapshot', 'macro', 'materialization', 'docs'}
|
||||
|
||||
for tag in self.tag_parser.find_tags():
|
||||
if tag.block_type_name in _CONTROL_FLOW_TAGS:
|
||||
self.stack.append(tag.block_type_name)
|
||||
elif tag.block_type_name in _CONTROL_FLOW_END_TAGS:
|
||||
found = None
|
||||
if self.stack:
|
||||
found = self.stack.pop()
|
||||
else:
|
||||
expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name]
|
||||
dbt.exceptions.raise_compiler_error((
|
||||
'Got an unexpected control flow end tag, got {} but '
|
||||
'never saw a preceeding {} (@ {})'
|
||||
).format(
|
||||
tag.block_type_name,
|
||||
expected,
|
||||
self.tag_parser.linepos(tag.start)
|
||||
))
|
||||
expected = _CONTROL_FLOW_TAGS[found]
|
||||
if expected != tag.block_type_name:
|
||||
dbt.exceptions.raise_compiler_error((
|
||||
'Got an unexpected control flow end tag, got {} but '
|
||||
'expected {} next (@ {})'
|
||||
).format(
|
||||
tag.block_type_name,
|
||||
expected,
|
||||
self.tag_parser.linepos(tag.start)
|
||||
))
|
||||
|
||||
if tag.block_type_name in allowed_blocks:
|
||||
if self.stack:
|
||||
dbt.exceptions.raise_compiler_error((
|
||||
'Got a block definition inside control flow at {}. '
|
||||
'All dbt block definitions must be at the top level'
|
||||
).format(self.tag_parser.linepos(tag.start)))
|
||||
if self.current is not None:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
duplicate_tags.format(outer=self.current, inner=tag)
|
||||
)
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position:tag.start]
|
||||
self.last_position = tag.start
|
||||
if raw_data:
|
||||
yield BlockData(raw_data)
|
||||
self.current = tag
|
||||
|
||||
elif self.is_current_end(tag):
|
||||
self.last_position = tag.end
|
||||
assert self.current is not None
|
||||
yield BlockTag(
|
||||
block_type_name=self.current.block_type_name,
|
||||
block_name=self.current.block_name,
|
||||
contents=self.data[self.current.end:tag.start],
|
||||
full_block=self.data[self.current.start:tag.end]
|
||||
)
|
||||
self.current = None
|
||||
|
||||
if self.current:
|
||||
linecount = self.data[:self.current.end].count('\n') + 1
|
||||
dbt.exceptions.raise_compiler_error((
|
||||
'Reached EOF without finding a close tag for '
|
||||
'{} (searched from line {})'
|
||||
).format(self.current.block_type_name, linecount))
|
||||
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position:]
|
||||
if raw_data:
|
||||
yield BlockData(raw_data)
|
||||
|
||||
def lex_for_blocks(self, allowed_blocks=None, collect_raw_data=True):
|
||||
return list(self.find_blocks(allowed_blocks=allowed_blocks,
|
||||
collect_raw_data=collect_raw_data))
|
||||
203
core/dbt/clients/agate_helper.py
Normal file
203
core/dbt/clients/agate_helper.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from codecs import BOM_UTF8
|
||||
|
||||
import agate
|
||||
import datetime
|
||||
import isodate
|
||||
import json
|
||||
import dbt.utils
|
||||
from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode('utf-8') # '\ufeff'
|
||||
|
||||
|
||||
class ISODateTime(agate.data_types.DateTime):
|
||||
def cast(self, d):
|
||||
# this is agate.data_types.DateTime.cast with the "clever" bits removed
|
||||
# so we only handle ISO8601 stuff
|
||||
if isinstance(d, datetime.datetime) or d is None:
|
||||
return d
|
||||
elif isinstance(d, datetime.date):
|
||||
return datetime.datetime.combine(d, datetime.time(0, 0, 0))
|
||||
elif isinstance(d, str):
|
||||
d = d.strip()
|
||||
if d.lower() in self.null_values:
|
||||
return None
|
||||
try:
|
||||
return isodate.parse_datetime(d)
|
||||
except: # noqa
|
||||
pass
|
||||
|
||||
raise agate.exceptions.CastError(
|
||||
'Can not parse value "%s" as datetime.' % d
|
||||
)
|
||||
|
||||
|
||||
def build_type_tester(text_columns: Iterable[str]) -> agate.TypeTester:
|
||||
types = [
|
||||
agate.data_types.Number(null_values=('null', '')),
|
||||
agate.data_types.Date(null_values=('null', ''),
|
||||
date_format='%Y-%m-%d'),
|
||||
agate.data_types.DateTime(null_values=('null', ''),
|
||||
datetime_format='%Y-%m-%d %H:%M:%S'),
|
||||
ISODateTime(null_values=('null', '')),
|
||||
agate.data_types.Boolean(true_values=('true',),
|
||||
false_values=('false',),
|
||||
null_values=('null', '')),
|
||||
agate.data_types.Text(null_values=('null', ''))
|
||||
]
|
||||
force = {
|
||||
k: agate.data_types.Text(null_values=('null', ''))
|
||||
for k in text_columns
|
||||
}
|
||||
return agate.TypeTester(force=force, types=types)
|
||||
|
||||
|
||||
DEFAULT_TYPE_TESTER = build_type_tester(())
|
||||
|
||||
|
||||
def table_from_rows(
|
||||
rows: List[Any],
|
||||
column_names: Iterable[str],
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> agate.Table:
|
||||
if text_only_columns is None:
|
||||
column_types = DEFAULT_TYPE_TESTER
|
||||
else:
|
||||
column_types = build_type_tester(text_only_columns)
|
||||
return agate.Table(rows, column_names, column_types=column_types)
|
||||
|
||||
|
||||
def table_from_data(data, column_names: Iterable[str]) -> agate.Table:
|
||||
"Convert list of dictionaries into an Agate table"
|
||||
|
||||
# The agate table is generated from a list of dicts, so the column order
|
||||
# from `data` is not preserved. We can use `select` to reorder the columns
|
||||
#
|
||||
# If there is no data, create an empty table with the specified columns
|
||||
|
||||
if len(data) == 0:
|
||||
return agate.Table([], column_names=column_names)
|
||||
else:
|
||||
table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER)
|
||||
return table.select(column_names)
|
||||
|
||||
|
||||
def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table:
|
||||
"Convert list of dictionaries into an Agate table"
|
||||
|
||||
rows = []
|
||||
for _row in data:
|
||||
row = []
|
||||
for value in list(_row.values()):
|
||||
if isinstance(value, (dict, list, tuple)):
|
||||
row.append(json.dumps(value, cls=dbt.utils.JSONEncoder))
|
||||
else:
|
||||
row.append(value)
|
||||
rows.append(row)
|
||||
|
||||
return table_from_rows(rows=rows, column_names=column_names)
|
||||
|
||||
|
||||
def empty_table():
|
||||
"Returns an empty Agate table. To be used in place of None"
|
||||
|
||||
return agate.Table(rows=[])
|
||||
|
||||
|
||||
def as_matrix(table):
|
||||
"Return an agate table as a matrix of data sans columns"
|
||||
|
||||
return [r.values() for r in table.rows.values()]
|
||||
|
||||
|
||||
def from_csv(abspath, text_columns):
|
||||
type_tester = build_type_tester(text_columns=text_columns)
|
||||
with open(abspath, encoding='utf-8') as fp:
|
||||
if fp.read(1) != BOM:
|
||||
fp.seek(0)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester)
|
||||
|
||||
|
||||
class _NullMarker:
|
||||
pass
|
||||
|
||||
|
||||
NullableAgateType = Union[agate.data_types.DataType, _NullMarker]
|
||||
|
||||
|
||||
class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self:
|
||||
super().__setitem__(key, value)
|
||||
return
|
||||
|
||||
existing_type = self[key]
|
||||
if isinstance(existing_type, _NullMarker):
|
||||
# overwrite
|
||||
super().__setitem__(key, value)
|
||||
elif isinstance(value, _NullMarker):
|
||||
# use the existing value
|
||||
return
|
||||
elif not isinstance(value, type(existing_type)):
|
||||
# actual type mismatch!
|
||||
raise RuntimeException(
|
||||
f'Tables contain columns with the same names ({key}), '
|
||||
f'but different types ({value} vs {existing_type})'
|
||||
)
|
||||
|
||||
def finalize(self) -> Dict[str, agate.data_types.DataType]:
|
||||
result: Dict[str, agate.data_types.DataType] = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, _NullMarker):
|
||||
# this is what agate would do.
|
||||
result[key] = agate.data_types.Number()
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def _merged_column_types(
|
||||
tables: List[agate.Table]
|
||||
) -> Dict[str, agate.data_types.DataType]:
|
||||
# this is a lot like agate.Table.merge, but with handling for all-null
|
||||
# rows being "any type".
|
||||
new_columns: ColumnTypeBuilder = ColumnTypeBuilder()
|
||||
for table in tables:
|
||||
for i in range(len(table.columns)):
|
||||
column_name: str = table.column_names[i]
|
||||
column_type: NullableAgateType = table.column_types[i]
|
||||
# avoid over-sensitive type inference
|
||||
if all(x is None for x in table.columns[column_name]):
|
||||
column_type = _NullMarker()
|
||||
new_columns[column_name] = column_type
|
||||
|
||||
return new_columns.finalize()
|
||||
|
||||
|
||||
def merge_tables(tables: List[agate.Table]) -> agate.Table:
|
||||
"""This is similar to agate.Table.merge, but it handles rows of all 'null'
|
||||
values more gracefully during merges.
|
||||
"""
|
||||
new_columns = _merged_column_types(tables)
|
||||
column_names = tuple(new_columns.keys())
|
||||
column_types = tuple(new_columns.values())
|
||||
|
||||
rows: List[agate.Row] = []
|
||||
for table in tables:
|
||||
if (
|
||||
table.column_names == column_names and
|
||||
table.column_types == column_types
|
||||
):
|
||||
rows.extend(table.rows)
|
||||
else:
|
||||
for row in table.rows:
|
||||
data = [row.get(name, None) for name in column_names]
|
||||
rows.append(agate.Row(data, column_names))
|
||||
# _is_fork to tell agate that we already made things into `Row`s.
|
||||
return agate.Table(rows, column_names, column_types, _is_fork=True)
|
||||
26
core/dbt/clients/gcloud.py
Normal file
26
core/dbt/clients/gcloud.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
import dbt.exceptions
|
||||
from dbt.clients.system import run_cmd
|
||||
|
||||
NOT_INSTALLED_MSG = """
|
||||
dbt requires the gcloud SDK to be installed to authenticate with BigQuery.
|
||||
Please download and install the SDK, or use a Service Account instead.
|
||||
|
||||
https://cloud.google.com/sdk/
|
||||
"""
|
||||
|
||||
|
||||
def gcloud_installed():
|
||||
try:
|
||||
run_cmd('.', ['gcloud', '--version'])
|
||||
return True
|
||||
except OSError as e:
|
||||
logger.debug(e)
|
||||
return False
|
||||
|
||||
|
||||
def setup_default_credentials():
|
||||
if gcloud_installed():
|
||||
run_cmd('.', ["gcloud", "auth", "application-default", "login"])
|
||||
else:
|
||||
raise dbt.exceptions.RuntimeException(NOT_INSTALLED_MSG)
|
||||
110
core/dbt/clients/git.py
Normal file
110
core/dbt/clients/git.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import re
|
||||
import os.path
|
||||
|
||||
from dbt.clients.system import run_cmd, rmdir
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
import dbt.exceptions
|
||||
|
||||
|
||||
def clone(repo, cwd, dirname=None, remove_git_dir=False, branch=None):
|
||||
clone_cmd = ['git', 'clone', '--depth', '1']
|
||||
|
||||
if branch is not None:
|
||||
clone_cmd.extend(['--branch', branch])
|
||||
|
||||
clone_cmd.append(repo)
|
||||
|
||||
if dirname is not None:
|
||||
clone_cmd.append(dirname)
|
||||
|
||||
result = run_cmd(cwd, clone_cmd, env={'LC_ALL': 'C'})
|
||||
|
||||
if remove_git_dir:
|
||||
rmdir(os.path.join(dirname, '.git'))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def list_tags(cwd):
|
||||
out, err = run_cmd(cwd, ['git', 'tag', '--list'], env={'LC_ALL': 'C'})
|
||||
tags = out.decode('utf-8').strip().split("\n")
|
||||
return tags
|
||||
|
||||
|
||||
def _checkout(cwd, repo, branch):
|
||||
logger.debug(' Checking out branch {}.'.format(branch))
|
||||
|
||||
run_cmd(cwd, ['git', 'remote', 'set-branches', 'origin', branch])
|
||||
run_cmd(cwd, ['git', 'fetch', '--tags', '--depth', '1', 'origin', branch])
|
||||
|
||||
tags = list_tags(cwd)
|
||||
|
||||
# Prefer tags to branches if one exists
|
||||
if branch in tags:
|
||||
spec = 'tags/{}'.format(branch)
|
||||
else:
|
||||
spec = 'origin/{}'.format(branch)
|
||||
|
||||
out, err = run_cmd(cwd, ['git', 'reset', '--hard', spec],
|
||||
env={'LC_ALL': 'C'})
|
||||
return out, err
|
||||
|
||||
|
||||
def checkout(cwd, repo, branch=None):
|
||||
if branch is None:
|
||||
branch = 'master'
|
||||
try:
|
||||
return _checkout(cwd, repo, branch)
|
||||
except dbt.exceptions.CommandResultError as exc:
|
||||
stderr = exc.stderr.decode('utf-8').strip()
|
||||
dbt.exceptions.bad_package_spec(repo, branch, stderr)
|
||||
|
||||
|
||||
def get_current_sha(cwd):
|
||||
out, err = run_cmd(cwd, ['git', 'rev-parse', 'HEAD'], env={'LC_ALL': 'C'})
|
||||
|
||||
return out.decode('utf-8')
|
||||
|
||||
|
||||
def remove_remote(cwd):
|
||||
return run_cmd(cwd, ['git', 'remote', 'rm', 'origin'], env={'LC_ALL': 'C'})
|
||||
|
||||
|
||||
def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
|
||||
branch=None):
|
||||
exists = None
|
||||
try:
|
||||
_, err = clone(repo, cwd, dirname=dirname,
|
||||
remove_git_dir=remove_git_dir)
|
||||
except dbt.exceptions.CommandResultError as exc:
|
||||
err = exc.stderr.decode('utf-8')
|
||||
exists = re.match("fatal: destination path '(.+)' already exists", err)
|
||||
if not exists: # something else is wrong, raise it
|
||||
raise
|
||||
|
||||
directory = None
|
||||
start_sha = None
|
||||
if exists:
|
||||
directory = exists.group(1)
|
||||
logger.debug('Updating existing dependency {}.', directory)
|
||||
else:
|
||||
matches = re.match("Cloning into '(.+)'", err.decode('utf-8'))
|
||||
if matches is None:
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
f'Error cloning {repo} - never saw "Cloning into ..." from git'
|
||||
)
|
||||
directory = matches.group(1)
|
||||
logger.debug('Pulling new dependency {}.', directory)
|
||||
full_path = os.path.join(cwd, directory)
|
||||
start_sha = get_current_sha(full_path)
|
||||
checkout(full_path, repo, branch)
|
||||
end_sha = get_current_sha(full_path)
|
||||
if exists:
|
||||
if start_sha == end_sha:
|
||||
logger.debug(' Already at {}, nothing to do.', start_sha[:7])
|
||||
else:
|
||||
logger.debug(' Updated checkout from {} to {}.',
|
||||
start_sha[:7], end_sha[:7])
|
||||
else:
|
||||
logger.debug(' Checked out at {}.', end_sha[:7])
|
||||
return directory
|
||||
644
core/dbt/clients/jinja.py
Normal file
644
core/dbt/clients/jinja.py
Normal file
@@ -0,0 +1,644 @@
|
||||
import codecs
|
||||
import linecache
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import threading
|
||||
from ast import literal_eval
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain, islice
|
||||
from typing import (
|
||||
List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple,
|
||||
Callable
|
||||
)
|
||||
|
||||
import jinja2
|
||||
import jinja2.ext
|
||||
import jinja2.nativetypes # type: ignore
|
||||
import jinja2.nodes
|
||||
import jinja2.parser
|
||||
import jinja2.sandbox
|
||||
|
||||
from dbt.utils import (
|
||||
get_dbt_macro_name, get_docs_macro_name, get_materialization_macro_name,
|
||||
deep_map
|
||||
)
|
||||
|
||||
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
|
||||
from dbt.contracts.graph.compiled import CompiledSchemaTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedSchemaTestNode
|
||||
from dbt.exceptions import (
|
||||
InternalException, raise_compiler_error, CompilationException,
|
||||
invalid_materialization_argument, MacroReturn, JinjaRenderingException
|
||||
)
|
||||
from dbt import flags
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
|
||||
|
||||
def _linecache_inject(source, write):
|
||||
if write:
|
||||
# this is the only reliable way to accomplish this. Obviously, it's
|
||||
# really darn noisy and will fill your temporary directory
|
||||
tmp_file = tempfile.NamedTemporaryFile(
|
||||
prefix='dbt-macro-compiled-',
|
||||
suffix='.py',
|
||||
delete=False,
|
||||
mode='w+',
|
||||
encoding='utf-8',
|
||||
)
|
||||
tmp_file.write(source)
|
||||
filename = tmp_file.name
|
||||
else:
|
||||
# `codecs.encode` actually takes a `bytes` as the first argument if
|
||||
# the second argument is 'hex' - mypy does not know this.
|
||||
rnd = codecs.encode(os.urandom(12), 'hex') # type: ignore
|
||||
filename = rnd.decode('ascii')
|
||||
|
||||
# put ourselves in the cache
|
||||
cache_entry = (
|
||||
len(source),
|
||||
None,
|
||||
[line + '\n' for line in source.splitlines()],
|
||||
filename
|
||||
)
|
||||
# linecache does in fact have an attribute `cache`, thanks
|
||||
linecache.cache[filename] = cache_entry # type: ignore
|
||||
return filename
|
||||
|
||||
|
||||
class MacroFuzzParser(jinja2.parser.Parser):
|
||||
def parse_macro(self):
|
||||
node = jinja2.nodes.Macro(lineno=next(self.stream).lineno)
|
||||
|
||||
# modified to fuzz macros defined in the same file. this way
|
||||
# dbt can understand the stack of macros being called.
|
||||
# - @cmcarthur
|
||||
node.name = get_dbt_macro_name(
|
||||
self.parse_assign_target(name_only=True).name)
|
||||
|
||||
self.parse_signature(node)
|
||||
node.body = self.parse_statements(('name:endmacro',),
|
||||
drop_needle=True)
|
||||
return node
|
||||
|
||||
|
||||
class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):
|
||||
def _parse(self, source, name, filename):
|
||||
return MacroFuzzParser(self, source, name, filename).parse()
|
||||
|
||||
def _compile(self, source, filename):
|
||||
"""Override jinja's compilation to stash the rendered source inside
|
||||
the python linecache for debugging when the appropriate environment
|
||||
variable is set.
|
||||
|
||||
If the value is 'write', also write the files to disk.
|
||||
WARNING: This can write a ton of data if you aren't careful.
|
||||
"""
|
||||
if filename == '<template>' and flags.MACRO_DEBUGGING:
|
||||
write = flags.MACRO_DEBUGGING == 'write'
|
||||
filename = _linecache_inject(source, write)
|
||||
|
||||
return super()._compile(source, filename) # type: ignore
|
||||
|
||||
|
||||
class NativeSandboxEnvironment(MacroFuzzEnvironment):
|
||||
code_generator_class = jinja2.nativetypes.NativeCodeGenerator
|
||||
|
||||
|
||||
class TextMarker(str):
|
||||
"""A special native-env marker that indicates that a value is text and is
|
||||
not to be evaluated. Use this to prevent your numbery-strings from becoming
|
||||
numbers!
|
||||
"""
|
||||
|
||||
|
||||
class NativeMarker(str):
|
||||
"""A special native-env marker that indicates the field should be passed to
|
||||
literal_eval.
|
||||
"""
|
||||
|
||||
|
||||
class BoolMarker(NativeMarker):
|
||||
pass
|
||||
|
||||
|
||||
class NumberMarker(NativeMarker):
|
||||
pass
|
||||
|
||||
|
||||
def _is_number(value) -> bool:
|
||||
return isinstance(value, (int, float)) and not isinstance(value, bool)
|
||||
|
||||
|
||||
def quoted_native_concat(nodes):
|
||||
"""This is almost native_concat from the NativeTemplate, except in the
|
||||
special case of a single argument that is a quoted string and returns a
|
||||
string, the quotes are re-inserted.
|
||||
"""
|
||||
head = list(islice(nodes, 2))
|
||||
|
||||
if not head:
|
||||
return ''
|
||||
|
||||
if len(head) == 1:
|
||||
raw = head[0]
|
||||
if isinstance(raw, TextMarker):
|
||||
return str(raw)
|
||||
elif not isinstance(raw, NativeMarker):
|
||||
# return non-strings as-is
|
||||
return raw
|
||||
else:
|
||||
# multiple nodes become a string.
|
||||
return "".join([str(v) for v in chain(head, nodes)])
|
||||
|
||||
try:
|
||||
result = literal_eval(raw)
|
||||
except (ValueError, SyntaxError, MemoryError):
|
||||
result = raw
|
||||
if isinstance(raw, BoolMarker) and not isinstance(result, bool):
|
||||
raise JinjaRenderingException(
|
||||
f"Could not convert value '{raw!s}' into type 'bool'"
|
||||
)
|
||||
if isinstance(raw, NumberMarker) and not _is_number(result):
|
||||
raise JinjaRenderingException(
|
||||
f"Could not convert value '{raw!s}' into type 'number'"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class NativeSandboxTemplate(jinja2.nativetypes.NativeTemplate): # mypy: ignore
|
||||
environment_class = NativeSandboxEnvironment
|
||||
|
||||
def render(self, *args, **kwargs):
|
||||
"""Render the template to produce a native Python type. If the
|
||||
result is a single node, its value is returned. Otherwise, the
|
||||
nodes are concatenated as strings. If the result can be parsed
|
||||
with :func:`ast.literal_eval`, the parsed value is returned.
|
||||
Otherwise, the string is returned.
|
||||
"""
|
||||
vars = dict(*args, **kwargs)
|
||||
|
||||
try:
|
||||
return quoted_native_concat(
|
||||
self.root_render_func(self.new_context(vars))
|
||||
)
|
||||
except Exception:
|
||||
return self.environment.handle_exception()
|
||||
|
||||
|
||||
NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore
|
||||
|
||||
|
||||
class TemplateCache:
|
||||
def __init__(self):
|
||||
self.file_cache: Dict[str, jinja2.Template] = {}
|
||||
|
||||
def get_node_template(self, node) -> jinja2.Template:
|
||||
key = node.macro_sql
|
||||
|
||||
if key in self.file_cache:
|
||||
return self.file_cache[key]
|
||||
|
||||
template = get_template(
|
||||
string=node.macro_sql,
|
||||
ctx={},
|
||||
node=node,
|
||||
)
|
||||
|
||||
self.file_cache[key] = template
|
||||
return template
|
||||
|
||||
def clear(self):
|
||||
self.file_cache.clear()
|
||||
|
||||
|
||||
template_cache = TemplateCache()
|
||||
|
||||
|
||||
class BaseMacroGenerator:
|
||||
def __init__(self, context: Optional[Dict[str, Any]] = None) -> None:
|
||||
self.context: Optional[Dict[str, Any]] = context
|
||||
|
||||
def get_template(self):
|
||||
raise NotImplementedError('get_template not implemented!')
|
||||
|
||||
def get_name(self) -> str:
|
||||
raise NotImplementedError('get_name not implemented!')
|
||||
|
||||
def get_macro(self):
|
||||
name = self.get_name()
|
||||
template = self.get_template()
|
||||
# make the module. previously we set both vars and local, but that's
|
||||
# redundant: They both end up in the same place
|
||||
module = template.make_module(vars=self.context, shared=False)
|
||||
macro = module.__dict__[get_dbt_macro_name(name)]
|
||||
module.__dict__.update(self.context)
|
||||
return macro
|
||||
|
||||
@contextmanager
|
||||
def exception_handler(self) -> Iterator[None]:
|
||||
try:
|
||||
yield
|
||||
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
|
||||
raise_compiler_error(str(e))
|
||||
|
||||
def call_macro(self, *args, **kwargs):
|
||||
if self.context is None:
|
||||
raise InternalException(
|
||||
'Context is still None in call_macro!'
|
||||
)
|
||||
assert self.context is not None
|
||||
|
||||
macro = self.get_macro()
|
||||
|
||||
with self.exception_handler():
|
||||
try:
|
||||
return macro(*args, **kwargs)
|
||||
except MacroReturn as e:
|
||||
return e.value
|
||||
|
||||
|
||||
class MacroStack(threading.local):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.call_stack = []
|
||||
|
||||
@property
|
||||
def depth(self) -> int:
|
||||
return len(self.call_stack)
|
||||
|
||||
def push(self, name):
|
||||
self.call_stack.append(name)
|
||||
|
||||
def pop(self, name):
|
||||
got = self.call_stack.pop()
|
||||
if got != name:
|
||||
raise InternalException(f'popped {got}, expected {name}')
|
||||
|
||||
|
||||
class MacroGenerator(BaseMacroGenerator):
|
||||
def __init__(
|
||||
self,
|
||||
macro,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
node: Optional[Any] = None,
|
||||
stack: Optional[MacroStack] = None
|
||||
) -> None:
|
||||
super().__init__(context)
|
||||
self.macro = macro
|
||||
self.node = node
|
||||
self.stack = stack
|
||||
|
||||
def get_template(self):
|
||||
return template_cache.get_node_template(self.macro)
|
||||
|
||||
def get_name(self) -> str:
|
||||
return self.macro.name
|
||||
|
||||
@contextmanager
|
||||
def exception_handler(self) -> Iterator[None]:
|
||||
try:
|
||||
yield
|
||||
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
|
||||
raise_compiler_error(str(e), self.macro)
|
||||
except CompilationException as e:
|
||||
e.stack.append(self.macro)
|
||||
raise e
|
||||
|
||||
@contextmanager
|
||||
def track_call(self):
|
||||
if self.stack is None or self.node is None:
|
||||
yield
|
||||
else:
|
||||
unique_id = self.macro.unique_id
|
||||
depth = self.stack.depth
|
||||
# only mark depth=0 as a dependency
|
||||
if depth == 0:
|
||||
self.node.depends_on.add_macro(unique_id)
|
||||
self.stack.push(unique_id)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.stack.pop(unique_id)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with self.track_call():
|
||||
return self.call_macro(*args, **kwargs)
|
||||
|
||||
|
||||
class QueryStringGenerator(BaseMacroGenerator):
|
||||
def __init__(
|
||||
self, template_str: str, context: Dict[str, Any]
|
||||
) -> None:
|
||||
super().__init__(context)
|
||||
self.template_str: str = template_str
|
||||
env = get_environment()
|
||||
self.template = env.from_string(
|
||||
self.template_str,
|
||||
globals=self.context,
|
||||
)
|
||||
|
||||
def get_name(self) -> str:
|
||||
return 'query_comment_macro'
|
||||
|
||||
def get_template(self):
|
||||
"""Don't use the template cache, we don't have a node"""
|
||||
return self.template
|
||||
|
||||
def __call__(self, connection_name: str, node) -> str:
|
||||
return str(self.call_macro(connection_name, node))
|
||||
|
||||
|
||||
class MaterializationExtension(jinja2.ext.Extension):
|
||||
tags = ['materialization']
|
||||
|
||||
def parse(self, parser):
|
||||
node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
|
||||
materialization_name = \
|
||||
parser.parse_assign_target(name_only=True).name
|
||||
|
||||
adapter_name = 'default'
|
||||
node.args = []
|
||||
node.defaults = []
|
||||
|
||||
while parser.stream.skip_if('comma'):
|
||||
target = parser.parse_assign_target(name_only=True)
|
||||
|
||||
if target.name == 'default':
|
||||
pass
|
||||
|
||||
elif target.name == 'adapter':
|
||||
parser.stream.expect('assign')
|
||||
value = parser.parse_expression()
|
||||
adapter_name = value.value
|
||||
|
||||
else:
|
||||
invalid_materialization_argument(
|
||||
materialization_name, target.name
|
||||
)
|
||||
|
||||
node.name = get_materialization_macro_name(
|
||||
materialization_name, adapter_name
|
||||
)
|
||||
|
||||
node.body = parser.parse_statements(('name:endmaterialization',),
|
||||
drop_needle=True)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
class DocumentationExtension(jinja2.ext.Extension):
|
||||
tags = ['docs']
|
||||
|
||||
def parse(self, parser):
|
||||
node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
|
||||
docs_name = parser.parse_assign_target(name_only=True).name
|
||||
|
||||
node.args = []
|
||||
node.defaults = []
|
||||
node.name = get_docs_macro_name(docs_name)
|
||||
node.body = parser.parse_statements(('name:enddocs',),
|
||||
drop_needle=True)
|
||||
return node
|
||||
|
||||
|
||||
def _is_dunder_name(name):
|
||||
return name.startswith('__') and name.endswith('__')
|
||||
|
||||
|
||||
def create_undefined(node=None):
|
||||
class Undefined(jinja2.Undefined):
|
||||
def __init__(self, hint=None, obj=None, name=None, exc=None):
|
||||
super().__init__(hint=hint, name=name)
|
||||
self.node = node
|
||||
self.name = name
|
||||
self.hint = hint
|
||||
# jinja uses these for safety, so we have to override them.
|
||||
# see https://github.com/pallets/jinja/blob/master/jinja2/sandbox.py#L332-L339 # noqa
|
||||
self.unsafe_callable = False
|
||||
self.alters_data = False
|
||||
|
||||
def __getitem__(self, name):
|
||||
# Propagate the undefined value if a caller accesses this as if it
|
||||
# were a dictionary
|
||||
return self
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == 'name' or _is_dunder_name(name):
|
||||
raise AttributeError(
|
||||
"'{}' object has no attribute '{}'"
|
||||
.format(type(self).__name__, name)
|
||||
)
|
||||
|
||||
self.name = name
|
||||
|
||||
return self.__class__(hint=self.hint, name=self.name)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def __reduce__(self):
|
||||
raise_compiler_error(f'{self.name} is undefined', node=node)
|
||||
|
||||
return Undefined
|
||||
|
||||
|
||||
NATIVE_FILTERS: Dict[str, Callable[[Any], Any]] = {
|
||||
'as_text': TextMarker,
|
||||
'as_bool': BoolMarker,
|
||||
'as_native': NativeMarker,
|
||||
'as_number': NumberMarker,
|
||||
}
|
||||
|
||||
|
||||
TEXT_FILTERS: Dict[str, Callable[[Any], Any]] = {
|
||||
'as_text': lambda x: x,
|
||||
'as_bool': lambda x: x,
|
||||
'as_native': lambda x: x,
|
||||
'as_number': lambda x: x,
|
||||
}
|
||||
|
||||
|
||||
def get_environment(
|
||||
node=None,
|
||||
capture_macros: bool = False,
|
||||
native: bool = False,
|
||||
) -> jinja2.Environment:
|
||||
args: Dict[str, List[Union[str, Type[jinja2.ext.Extension]]]] = {
|
||||
'extensions': ['jinja2.ext.do']
|
||||
}
|
||||
|
||||
if capture_macros:
|
||||
args['undefined'] = create_undefined(node)
|
||||
|
||||
args['extensions'].append(MaterializationExtension)
|
||||
args['extensions'].append(DocumentationExtension)
|
||||
|
||||
env_cls: Type[jinja2.Environment]
|
||||
text_filter: Type
|
||||
if native:
|
||||
env_cls = NativeSandboxEnvironment
|
||||
filters = NATIVE_FILTERS
|
||||
else:
|
||||
env_cls = MacroFuzzEnvironment
|
||||
filters = TEXT_FILTERS
|
||||
|
||||
env = env_cls(**args)
|
||||
env.filters.update(filters)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
@contextmanager
|
||||
def catch_jinja(node=None) -> Iterator[None]:
|
||||
try:
|
||||
yield
|
||||
except jinja2.exceptions.TemplateSyntaxError as e:
|
||||
e.translated = False
|
||||
raise CompilationException(str(e), node) from e
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
raise CompilationException(str(e), node) from e
|
||||
except CompilationException as exc:
|
||||
exc.add_node(node)
|
||||
raise
|
||||
|
||||
|
||||
def parse(string):
|
||||
with catch_jinja():
|
||||
return get_environment().parse(str(string))
|
||||
|
||||
|
||||
def get_template(
|
||||
string: str,
|
||||
ctx: Dict[str, Any],
|
||||
node=None,
|
||||
capture_macros: bool = False,
|
||||
native: bool = False,
|
||||
):
|
||||
with catch_jinja(node):
|
||||
env = get_environment(node, capture_macros, native=native)
|
||||
|
||||
template_source = str(string)
|
||||
return env.from_string(template_source, globals=ctx)
|
||||
|
||||
|
||||
def render_template(template, ctx: Dict[str, Any], node=None) -> str:
|
||||
with catch_jinja(node):
|
||||
return template.render(ctx)
|
||||
|
||||
|
||||
def _requote_result(raw_value: str, rendered: str) -> str:
|
||||
double_quoted = raw_value.startswith('"') and raw_value.endswith('"')
|
||||
single_quoted = raw_value.startswith("'") and raw_value.endswith("'")
|
||||
if double_quoted:
|
||||
quote_char = '"'
|
||||
elif single_quoted:
|
||||
quote_char = "'"
|
||||
else:
|
||||
quote_char = ''
|
||||
return f'{quote_char}{rendered}{quote_char}'
|
||||
|
||||
|
||||
# performance note: Local benmcharking (so take it with a big grain of salt!)
|
||||
# on this indicates that it is is on average slightly slower than
|
||||
# checking two separate patterns, but the standard deviation is smaller with
|
||||
# one pattern. The time difference between the two was ~2 std deviations, which
|
||||
# is small enough that I've just chosen the more readable option.
|
||||
_HAS_RENDER_CHARS_PAT = re.compile(r'({[{%#]|[#}%]})')
|
||||
|
||||
|
||||
def get_rendered(
|
||||
string: str,
|
||||
ctx: Dict[str, Any],
|
||||
node=None,
|
||||
capture_macros: bool = False,
|
||||
native: bool = False,
|
||||
) -> str:
|
||||
# performance optimization: if there are no jinja control characters in the
|
||||
# string, we can just return the input. Fall back to jinja if the type is
|
||||
# not a string or if native rendering is enabled (so '1' -> 1, etc...)
|
||||
# If this is desirable in the native env as well, we could handle the
|
||||
# native=True case by passing the input string to ast.literal_eval, like
|
||||
# the native renderer does.
|
||||
if (
|
||||
not native and
|
||||
isinstance(string, str) and
|
||||
_HAS_RENDER_CHARS_PAT.search(string) is None
|
||||
):
|
||||
return string
|
||||
template = get_template(
|
||||
string,
|
||||
ctx,
|
||||
node,
|
||||
capture_macros=capture_macros,
|
||||
native=native,
|
||||
)
|
||||
return render_template(template, ctx, node)
|
||||
|
||||
|
||||
def undefined_error(msg) -> NoReturn:
|
||||
raise jinja2.exceptions.UndefinedError(msg)
|
||||
|
||||
|
||||
def extract_toplevel_blocks(
|
||||
data: str,
|
||||
allowed_blocks: Optional[Set[str]] = None,
|
||||
collect_raw_data: bool = True,
|
||||
) -> List[Union[BlockData, BlockTag]]:
|
||||
"""Extract the top level blocks with matching block types from a jinja
|
||||
file, with some special handling for block nesting.
|
||||
|
||||
:param data: The data to extract blocks from.
|
||||
:param allowed_blocks: The names of the blocks to extract from the file.
|
||||
They may not be nested within if/for blocks. If None, use the default
|
||||
values.
|
||||
:param collect_raw_data: If set, raw data between matched blocks will also
|
||||
be part of the results, as `BlockData` objects. They have a
|
||||
`block_type_name` field of `'__dbt_data'` and will never have a
|
||||
`block_name`.
|
||||
:return: A list of `BlockTag`s matching the allowed block types and (if
|
||||
`collect_raw_data` is `True`) `BlockData` objects.
|
||||
"""
|
||||
return BlockIterator(data).lex_for_blocks(
|
||||
allowed_blocks=allowed_blocks,
|
||||
collect_raw_data=collect_raw_data
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_TEST_KWARGS_NAME = '_dbt_schema_test_kwargs'
|
||||
|
||||
|
||||
def add_rendered_test_kwargs(
|
||||
context: Dict[str, Any],
|
||||
node: Union[ParsedSchemaTestNode, CompiledSchemaTestNode],
|
||||
capture_macros: bool = False,
|
||||
) -> None:
|
||||
"""Render each of the test kwargs in the given context using the native
|
||||
renderer, then insert that value into the given context as the special test
|
||||
keyword arguments member.
|
||||
"""
|
||||
looks_like_func = r'^\s*(env_var|ref|var|source|doc)\s*\(.+\)\s*$'
|
||||
|
||||
def _convert_function(
|
||||
value: Any, keypath: Tuple[Union[str, int], ...]
|
||||
) -> Any:
|
||||
if isinstance(value, str):
|
||||
if keypath == ('column_name',):
|
||||
# special case: Don't render column names as native, make them
|
||||
# be strings
|
||||
return value
|
||||
|
||||
if re.match(looks_like_func, value) is not None:
|
||||
# curly braces to make rendering happy
|
||||
value = f'{{{{ {value} }}}}'
|
||||
|
||||
value = get_rendered(
|
||||
value, context, node, capture_macros=capture_macros,
|
||||
native=True
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
kwargs = deep_map(_convert_function, node.test_metadata.kwargs)
|
||||
context[SCHEMA_TEST_KWARGS_NAME] = kwargs
|
||||
74
core/dbt/clients/registry.py
Normal file
74
core/dbt/clients/registry.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from functools import wraps
|
||||
import requests
|
||||
from dbt.exceptions import RegistryException
|
||||
from dbt.utils import memoized
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
import os
|
||||
import time
|
||||
|
||||
if os.getenv('DBT_PACKAGE_HUB_URL'):
|
||||
DEFAULT_REGISTRY_BASE_URL = os.getenv('DBT_PACKAGE_HUB_URL')
|
||||
else:
|
||||
DEFAULT_REGISTRY_BASE_URL = 'https://hub.getdbt.com/'
|
||||
|
||||
|
||||
def _get_url(url, registry_base_url=None):
|
||||
if registry_base_url is None:
|
||||
registry_base_url = DEFAULT_REGISTRY_BASE_URL
|
||||
|
||||
return '{}{}'.format(registry_base_url, url)
|
||||
|
||||
|
||||
def _wrap_exceptions(fn):
|
||||
@wraps(fn)
|
||||
def wrapper(*args, **kwargs):
|
||||
max_attempts = 5
|
||||
attempt = 0
|
||||
while True:
|
||||
attempt += 1
|
||||
try:
|
||||
return fn(*args, **kwargs)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
if attempt < max_attempts:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
raise RegistryException(
|
||||
'Unable to connect to registry hub'
|
||||
) from exc
|
||||
return wrapper
|
||||
|
||||
|
||||
@_wrap_exceptions
|
||||
def _get(path, registry_base_url=None):
|
||||
url = _get_url(path, registry_base_url)
|
||||
logger.debug('Making package registry request: GET {}'.format(url))
|
||||
resp = requests.get(url)
|
||||
logger.debug('Response from registry: GET {} {}'.format(url,
|
||||
resp.status_code))
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
|
||||
def index(registry_base_url=None):
|
||||
return _get('api/v1/index.json', registry_base_url)
|
||||
|
||||
|
||||
index_cached = memoized(index)
|
||||
|
||||
|
||||
def packages(registry_base_url=None):
|
||||
return _get('api/v1/packages.json', registry_base_url)
|
||||
|
||||
|
||||
def package(name, registry_base_url=None):
|
||||
return _get('api/v1/{}.json'.format(name), registry_base_url)
|
||||
|
||||
|
||||
def package_version(name, version, registry_base_url=None):
|
||||
return _get('api/v1/{}/{}.json'.format(name, version), registry_base_url)
|
||||
|
||||
|
||||
def get_available_versions(name):
|
||||
response = package(name)
|
||||
return list(response['versions'])
|
||||
541
core/dbt/clients/system.py
Normal file
541
core/dbt/clients/system.py
Normal file
@@ -0,0 +1,541 @@
|
||||
import errno
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import requests
|
||||
import stat
|
||||
from typing import (
|
||||
Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
|
||||
)
|
||||
|
||||
import dbt.exceptions
|
||||
import dbt.utils
|
||||
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
|
||||
if sys.platform == 'win32':
|
||||
from ctypes import WinDLL, c_bool
|
||||
else:
|
||||
WinDLL = None
|
||||
c_bool = None
|
||||
|
||||
|
||||
def find_matching(
|
||||
root_path: str,
|
||||
relative_paths_to_search: List[str],
|
||||
file_pattern: str,
|
||||
) -> List[Dict[str, str]]:
|
||||
"""
|
||||
Given an absolute `root_path`, a list of relative paths to that
|
||||
absolute root path (`relative_paths_to_search`), and a `file_pattern`
|
||||
like '*.sql', returns information about the files. For example:
|
||||
|
||||
> find_matching('/root/path', ['models'], '*.sql')
|
||||
|
||||
[ { 'absolute_path': '/root/path/models/model_one.sql',
|
||||
'relative_path': 'model_one.sql',
|
||||
'searched_path': 'models' },
|
||||
{ 'absolute_path': '/root/path/models/subdirectory/model_two.sql',
|
||||
'relative_path': 'subdirectory/model_two.sql',
|
||||
'searched_path': 'models' } ]
|
||||
"""
|
||||
matching = []
|
||||
root_path = os.path.normpath(root_path)
|
||||
regex = fnmatch.translate(file_pattern)
|
||||
reobj = re.compile(regex, re.IGNORECASE)
|
||||
|
||||
for relative_path_to_search in relative_paths_to_search:
|
||||
absolute_path_to_search = os.path.join(
|
||||
root_path, relative_path_to_search)
|
||||
walk_results = os.walk(absolute_path_to_search)
|
||||
|
||||
for current_path, subdirectories, local_files in walk_results:
|
||||
for local_file in local_files:
|
||||
absolute_path = os.path.join(current_path, local_file)
|
||||
relative_path = os.path.relpath(
|
||||
absolute_path, absolute_path_to_search
|
||||
)
|
||||
if reobj.match(local_file):
|
||||
matching.append({
|
||||
'searched_path': relative_path_to_search,
|
||||
'absolute_path': absolute_path,
|
||||
'relative_path': relative_path,
|
||||
})
|
||||
|
||||
return matching
|
||||
|
||||
|
||||
def load_file_contents(path: str, strip: bool = True) -> str:
|
||||
path = convert_path(path)
|
||||
with open(path, 'rb') as handle:
|
||||
to_return = handle.read().decode('utf-8')
|
||||
|
||||
if strip:
|
||||
to_return = to_return.strip()
|
||||
|
||||
return to_return
|
||||
|
||||
|
||||
def make_directory(path: str) -> None:
|
||||
"""
|
||||
Make a directory and any intermediate directories that don't already
|
||||
exist. This function handles the case where two threads try to create
|
||||
a directory at once.
|
||||
"""
|
||||
path = convert_path(path)
|
||||
if not os.path.exists(path):
|
||||
# concurrent writes that try to create the same dir can fail
|
||||
try:
|
||||
os.makedirs(path)
|
||||
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
def make_file(path: str, contents: str = '', overwrite: bool = False) -> bool:
|
||||
"""
|
||||
Make a file at `path` assuming that the directory it resides in already
|
||||
exists. The file is saved with contents `contents`
|
||||
"""
|
||||
if overwrite or not os.path.exists(path):
|
||||
path = convert_path(path)
|
||||
with open(path, 'w') as fh:
|
||||
fh.write(contents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def make_symlink(source: str, link_path: str) -> None:
|
||||
"""
|
||||
Create a symlink at `link_path` referring to `source`.
|
||||
"""
|
||||
if not supports_symlinks():
|
||||
dbt.exceptions.system_error('create a symbolic link')
|
||||
|
||||
os.symlink(source, link_path)
|
||||
|
||||
|
||||
def supports_symlinks() -> bool:
|
||||
return getattr(os, "symlink", None) is not None
|
||||
|
||||
|
||||
def write_file(path: str, contents: str = '') -> bool:
|
||||
path = convert_path(path)
|
||||
try:
|
||||
make_directory(os.path.dirname(path))
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
f.write(str(contents))
|
||||
except Exception as exc:
|
||||
# note that you can't just catch FileNotFound, because sometimes
|
||||
# windows apparently raises something else.
|
||||
# It's also not sufficient to look at the path length, because
|
||||
# sometimes windows fails to write paths that are less than the length
|
||||
# limit. So on windows, suppress all errors that happen from writing
|
||||
# to disk.
|
||||
if os.name == 'nt':
|
||||
# sometimes we get a winerror of 3 which means the path was
|
||||
# definitely too long, but other times we don't and it means the
|
||||
# path was just probably too long. This is probably based on the
|
||||
# windows/python version.
|
||||
if getattr(exc, 'winerror', 0) == 3:
|
||||
reason = 'Path was too long'
|
||||
else:
|
||||
reason = 'Path was possibly too long'
|
||||
# all our hard work and the path was still too long. Log and
|
||||
# continue.
|
||||
logger.debug(
|
||||
f'Could not write to path {path}({len(path)} characters): '
|
||||
f'{reason}\nexception: {exc}'
|
||||
)
|
||||
else:
|
||||
raise
|
||||
return True
|
||||
|
||||
|
||||
def read_json(path: str) -> Dict[str, Any]:
|
||||
return json.loads(load_file_contents(path))
|
||||
|
||||
|
||||
def write_json(path: str, data: Dict[str, Any]) -> bool:
|
||||
return write_file(path, json.dumps(data, cls=dbt.utils.JSONEncoder))
|
||||
|
||||
|
||||
def _windows_rmdir_readonly(
|
||||
func: Callable[[str], Any], path: str, exc: Tuple[Any, OSError, Any]
|
||||
):
|
||||
exception_val = exc[1]
|
||||
if exception_val.errno == errno.EACCES:
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
func(path)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def resolve_path_from_base(path_to_resolve: str, base_path: str) -> str:
|
||||
"""
|
||||
If path-to_resolve is a relative path, create an absolute path
|
||||
with base_path as the base.
|
||||
|
||||
If path_to_resolve is an absolute path or a user path (~), just
|
||||
resolve it to an absolute path and return.
|
||||
"""
|
||||
return os.path.abspath(
|
||||
os.path.join(
|
||||
base_path,
|
||||
os.path.expanduser(path_to_resolve)))
|
||||
|
||||
|
||||
def rmdir(path: str) -> None:
|
||||
"""
|
||||
Recursively deletes a directory. Includes an error handler to retry with
|
||||
different permissions on Windows. Otherwise, removing directories (eg.
|
||||
cloned via git) can cause rmtree to throw a PermissionError exception
|
||||
"""
|
||||
path = convert_path(path)
|
||||
if sys.platform == 'win32':
|
||||
onerror = _windows_rmdir_readonly
|
||||
else:
|
||||
onerror = None
|
||||
|
||||
shutil.rmtree(path, onerror=onerror)
|
||||
|
||||
|
||||
def _win_prepare_path(path: str) -> str:
|
||||
"""Given a windows path, prepare it for use by making sure it is absolute
|
||||
and normalized.
|
||||
"""
|
||||
path = os.path.normpath(path)
|
||||
|
||||
# if a path starts with '\', splitdrive() on it will return '' for the
|
||||
# drive, but the prefix requires a drive letter. So let's add the drive
|
||||
# letter back in.
|
||||
# Unless it starts with '\\'. In that case, the path is a UNC mount point
|
||||
# and splitdrive will be fine.
|
||||
if not path.startswith('\\\\') and path.startswith('\\'):
|
||||
curdrive = os.path.splitdrive(os.getcwd())[0]
|
||||
path = curdrive + path
|
||||
|
||||
# now our path is either an absolute UNC path or relative to the current
|
||||
# directory. If it's relative, we need to make it absolute or the prefix
|
||||
# won't work. `ntpath.abspath` allegedly doesn't always play nice with long
|
||||
# paths, so do this instead.
|
||||
if not os.path.splitdrive(path)[0]:
|
||||
path = os.path.join(os.getcwd(), path)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def _supports_long_paths() -> bool:
|
||||
if sys.platform != 'win32':
|
||||
return True
|
||||
# Eryk Sun says to use `WinDLL('ntdll')` instead of `windll.ntdll` because
|
||||
# of pointer caching in a comment here:
|
||||
# https://stackoverflow.com/a/35097999/11262881
|
||||
# I don't know exaclty what he means, but I am inclined to believe him as
|
||||
# he's pretty active on Python windows bugs!
|
||||
try:
|
||||
dll = WinDLL('ntdll')
|
||||
except OSError: # I don't think this happens? you need ntdll to run python
|
||||
return False
|
||||
# not all windows versions have it at all
|
||||
if not hasattr(dll, 'RtlAreLongPathsEnabled'):
|
||||
return False
|
||||
# tell windows we want to get back a single unsigned byte (a bool).
|
||||
dll.RtlAreLongPathsEnabled.restype = c_bool
|
||||
return dll.RtlAreLongPathsEnabled()
|
||||
|
||||
|
||||
def convert_path(path: str) -> str:
|
||||
"""Convert a path that dbt has, which might be >260 characters long, to one
|
||||
that will be writable/readable on Windows.
|
||||
|
||||
On other platforms, this is a no-op.
|
||||
"""
|
||||
# some parts of python seem to append '\*.*' to strings, better safe than
|
||||
# sorry.
|
||||
if len(path) < 250:
|
||||
return path
|
||||
if _supports_long_paths():
|
||||
return path
|
||||
|
||||
prefix = '\\\\?\\'
|
||||
# Nothing to do
|
||||
if path.startswith(prefix):
|
||||
return path
|
||||
|
||||
path = _win_prepare_path(path)
|
||||
|
||||
# add the prefix. The check is just in case os.getcwd() does something
|
||||
# unexpected - I believe this if-state should always be True though!
|
||||
if not path.startswith(prefix):
|
||||
path = prefix + path
|
||||
return path
|
||||
|
||||
|
||||
def remove_file(path: str) -> None:
|
||||
path = convert_path(path)
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def path_exists(path: str) -> bool:
|
||||
path = convert_path(path)
|
||||
return os.path.lexists(path)
|
||||
|
||||
|
||||
def path_is_symlink(path: str) -> bool:
|
||||
path = convert_path(path)
|
||||
return os.path.islink(path)
|
||||
|
||||
|
||||
def open_dir_cmd() -> str:
|
||||
# https://docs.python.org/2/library/sys.html#sys.platform
|
||||
if sys.platform == 'win32':
|
||||
return 'start'
|
||||
|
||||
elif sys.platform == 'darwin':
|
||||
return 'open'
|
||||
|
||||
else:
|
||||
return 'xdg-open'
|
||||
|
||||
|
||||
def _handle_posix_cwd_error(
|
||||
exc: OSError, cwd: str, cmd: List[str]
|
||||
) -> NoReturn:
|
||||
if exc.errno == errno.ENOENT:
|
||||
message = 'Directory does not exist'
|
||||
elif exc.errno == errno.EACCES:
|
||||
message = 'Current user cannot access directory, check permissions'
|
||||
elif exc.errno == errno.ENOTDIR:
|
||||
message = 'Not a directory'
|
||||
else:
|
||||
message = 'Unknown OSError: {} - cwd'.format(str(exc))
|
||||
raise dbt.exceptions.WorkingDirectoryError(cwd, cmd, message)
|
||||
|
||||
|
||||
def _handle_posix_cmd_error(
|
||||
exc: OSError, cwd: str, cmd: List[str]
|
||||
) -> NoReturn:
|
||||
if exc.errno == errno.ENOENT:
|
||||
message = "Could not find command, ensure it is in the user's PATH"
|
||||
elif exc.errno == errno.EACCES:
|
||||
message = 'User does not have permissions for this command'
|
||||
else:
|
||||
message = 'Unknown OSError: {} - cmd'.format(str(exc))
|
||||
raise dbt.exceptions.ExecutableError(cwd, cmd, message)
|
||||
|
||||
|
||||
def _handle_posix_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
"""OSError handling for posix systems.
|
||||
|
||||
Some things that could happen to trigger an OSError:
|
||||
- cwd could not exist
|
||||
- exc.errno == ENOENT
|
||||
- exc.filename == cwd
|
||||
- cwd could have permissions that prevent the current user moving to it
|
||||
- exc.errno == EACCES
|
||||
- exc.filename == cwd
|
||||
- cwd could exist but not be a directory
|
||||
- exc.errno == ENOTDIR
|
||||
- exc.filename == cwd
|
||||
- cmd[0] could not exist
|
||||
- exc.errno == ENOENT
|
||||
- exc.filename == None(?)
|
||||
- cmd[0] could exist but have permissions that prevents the current
|
||||
user from executing it (executable bit not set for the user)
|
||||
- exc.errno == EACCES
|
||||
- exc.filename == None(?)
|
||||
"""
|
||||
if getattr(exc, 'filename', None) == cwd:
|
||||
_handle_posix_cwd_error(exc, cwd, cmd)
|
||||
else:
|
||||
_handle_posix_cmd_error(exc, cwd, cmd)
|
||||
|
||||
|
||||
def _handle_windows_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
cls: Type[dbt.exceptions.Exception] = dbt.exceptions.CommandError
|
||||
if exc.errno == errno.ENOENT:
|
||||
message = ("Could not find command, ensure it is in the user's PATH "
|
||||
"and that the user has permissions to run it")
|
||||
cls = dbt.exceptions.ExecutableError
|
||||
elif exc.errno == errno.ENOEXEC:
|
||||
message = ('Command was not executable, ensure it is valid')
|
||||
cls = dbt.exceptions.ExecutableError
|
||||
elif exc.errno == errno.ENOTDIR:
|
||||
message = ('Unable to cd: path does not exist, user does not have'
|
||||
' permissions, or not a directory')
|
||||
cls = dbt.exceptions.WorkingDirectoryError
|
||||
else:
|
||||
message = 'Unknown error: {} (errno={}: "{}")'.format(
|
||||
str(exc), exc.errno, errno.errorcode.get(exc.errno, '<Unknown!>')
|
||||
)
|
||||
raise cls(cwd, cmd, message)
|
||||
|
||||
|
||||
def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
"""Interpret an OSError exc and raise the appropriate dbt exception.
|
||||
|
||||
"""
|
||||
if len(cmd) == 0:
|
||||
raise dbt.exceptions.CommandError(cwd, cmd)
|
||||
|
||||
# all of these functions raise unconditionally
|
||||
if os.name == 'nt':
|
||||
_handle_windows_error(exc, cwd, cmd)
|
||||
else:
|
||||
_handle_posix_error(exc, cwd, cmd)
|
||||
|
||||
# this should not be reachable, raise _something_ at least!
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Unhandled exception in _interpret_oserror: {}'.format(exc)
|
||||
)
|
||||
|
||||
|
||||
def run_cmd(
|
||||
cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None
|
||||
) -> Tuple[bytes, bytes]:
|
||||
logger.debug('Executing "{}"'.format(' '.join(cmd)))
|
||||
if len(cmd) == 0:
|
||||
raise dbt.exceptions.CommandError(cwd, cmd)
|
||||
|
||||
# the env argument replaces the environment entirely, which has exciting
|
||||
# consequences on Windows! Do an update instead.
|
||||
full_env = env
|
||||
if env is not None:
|
||||
full_env = os.environ.copy()
|
||||
full_env.update(env)
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=full_env)
|
||||
|
||||
out, err = proc.communicate()
|
||||
except OSError as exc:
|
||||
_interpret_oserror(exc, cwd, cmd)
|
||||
|
||||
logger.debug('STDOUT: "{!s}"'.format(out))
|
||||
logger.debug('STDERR: "{!s}"'.format(err))
|
||||
|
||||
if proc.returncode != 0:
|
||||
logger.debug('command return code={}'.format(proc.returncode))
|
||||
raise dbt.exceptions.CommandResultError(cwd, cmd, proc.returncode,
|
||||
out, err)
|
||||
|
||||
return out, err
|
||||
|
||||
|
||||
def download(url: str, path: str, timeout: Union[float, tuple] = None) -> None:
|
||||
path = convert_path(path)
|
||||
connection_timeout = timeout or float(os.getenv('DBT_HTTP_TIMEOUT', 10))
|
||||
response = requests.get(url, timeout=connection_timeout)
|
||||
with open(path, 'wb') as handle:
|
||||
for block in response.iter_content(1024 * 64):
|
||||
handle.write(block)
|
||||
|
||||
|
||||
def rename(from_path: str, to_path: str, force: bool = False) -> None:
|
||||
from_path = convert_path(from_path)
|
||||
to_path = convert_path(to_path)
|
||||
is_symlink = path_is_symlink(to_path)
|
||||
|
||||
if os.path.exists(to_path) and force:
|
||||
if is_symlink:
|
||||
remove_file(to_path)
|
||||
else:
|
||||
rmdir(to_path)
|
||||
|
||||
shutil.move(from_path, to_path)
|
||||
|
||||
|
||||
def untar_package(
|
||||
tar_path: str, dest_dir: str, rename_to: Optional[str] = None
|
||||
) -> None:
|
||||
tar_path = convert_path(tar_path)
|
||||
tar_dir_name = None
|
||||
with tarfile.open(tar_path, 'r') as tarball:
|
||||
tarball.extractall(dest_dir)
|
||||
tar_dir_name = os.path.commonprefix(tarball.getnames())
|
||||
if rename_to:
|
||||
downloaded_path = os.path.join(dest_dir, tar_dir_name)
|
||||
desired_path = os.path.join(dest_dir, rename_to)
|
||||
dbt.clients.system.rename(downloaded_path, desired_path, force=True)
|
||||
|
||||
|
||||
def chmod_and_retry(func, path, exc_info):
|
||||
"""Define an error handler to pass to shutil.rmtree.
|
||||
On Windows, when a file is marked read-only as git likes to do, rmtree will
|
||||
fail. To handle that, on errors try to make the file writable.
|
||||
We want to retry most operations here, but listdir is one that we know will
|
||||
be useless.
|
||||
"""
|
||||
if func is os.listdir or os.name != 'nt':
|
||||
raise
|
||||
os.chmod(path, stat.S_IREAD | stat.S_IWRITE)
|
||||
# on error,this will raise.
|
||||
func(path)
|
||||
|
||||
|
||||
def _absnorm(path):
|
||||
return os.path.normcase(os.path.abspath(path))
|
||||
|
||||
|
||||
def move(src, dst):
|
||||
"""A re-implementation of shutil.move that properly removes the source
|
||||
directory on windows when it has read-only files in it and the move is
|
||||
between two drives.
|
||||
|
||||
This is almost identical to the real shutil.move, except it uses our rmtree
|
||||
and skips handling non-windows OSes since the existing one works ok there.
|
||||
"""
|
||||
src = convert_path(src)
|
||||
dst = convert_path(dst)
|
||||
if os.name != 'nt':
|
||||
return shutil.move(src, dst)
|
||||
|
||||
if os.path.isdir(dst):
|
||||
if _absnorm(src) == _absnorm(dst):
|
||||
os.rename(src, dst)
|
||||
return
|
||||
|
||||
dst = os.path.join(dst, os.path.basename(src.rstrip('/\\')))
|
||||
if os.path.exists(dst):
|
||||
raise EnvironmentError("Path '{}' already exists".format(dst))
|
||||
|
||||
try:
|
||||
os.rename(src, dst)
|
||||
except OSError:
|
||||
# probably different drives
|
||||
if os.path.isdir(src):
|
||||
if _absnorm(dst + '\\').startswith(_absnorm(src + '\\')):
|
||||
# dst is inside src
|
||||
raise EnvironmentError(
|
||||
"Cannot move a directory '{}' into itself '{}'"
|
||||
.format(src, dst)
|
||||
)
|
||||
shutil.copytree(src, dst, symlinks=True)
|
||||
rmtree(src)
|
||||
else:
|
||||
shutil.copy2(src, dst)
|
||||
os.unlink(src)
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
"""Recursively remove path. On permissions errors on windows, try to remove
|
||||
the read-only flag and try again.
|
||||
"""
|
||||
path = convert_path(path)
|
||||
return shutil.rmtree(path, onerror=chmod_and_retry)
|
||||
@@ -1,9 +1,17 @@
|
||||
import dbt.compat
|
||||
from typing import Any
|
||||
|
||||
import dbt.exceptions
|
||||
|
||||
import yaml
|
||||
import yaml.scanner
|
||||
|
||||
# the C version is faster, but it doesn't always exist
|
||||
YamlLoader: Any
|
||||
try:
|
||||
from yaml import CSafeLoader as YamlLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader as YamlLoader
|
||||
|
||||
|
||||
YAML_ERROR_MESSAGE = """
|
||||
Syntax error near line {line_number}
|
||||
@@ -17,7 +25,7 @@ Raw Error:
|
||||
|
||||
|
||||
def line_no(i, line, width=3):
|
||||
line_number = dbt.compat.to_string(i).ljust(width)
|
||||
line_number = str(i).ljust(width)
|
||||
return "{}| {}".format(line_number, line)
|
||||
|
||||
|
||||
@@ -45,13 +53,17 @@ def contextualized_yaml_error(raw_contents, error):
|
||||
raw_error=error)
|
||||
|
||||
|
||||
def safe_load(contents):
|
||||
return yaml.load(contents, Loader=YamlLoader)
|
||||
|
||||
|
||||
def load_yaml_text(contents):
|
||||
try:
|
||||
return yaml.safe_load(contents)
|
||||
return safe_load(contents)
|
||||
except (yaml.scanner.ScannerError, yaml.YAMLError) as e:
|
||||
if hasattr(e, 'problem_mark'):
|
||||
error = contextualized_yaml_error(contents, e)
|
||||
else:
|
||||
error = dbt.compat.to_string(e)
|
||||
error = str(e)
|
||||
|
||||
raise dbt.exceptions.ValidationException(error)
|
||||
509
core/dbt/compilation.py
Normal file
509
core/dbt/compilation.py
Normal file
@@ -0,0 +1,509 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, cast, Optional
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
import sqlparse
|
||||
|
||||
from dbt import flags
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledDataTestNode,
|
||||
CompiledSchemaTestNode,
|
||||
COMPILED_TYPES,
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
ManifestNode,
|
||||
NonSourceCompiledNode,
|
||||
)
|
||||
from dbt.contracts.graph.parsed import ParsedNode
|
||||
from dbt.exceptions import (
|
||||
dependency_not_found,
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.graph import Graph
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import pluralize
|
||||
|
||||
graph_file_name = 'graph.gpickle'
|
||||
|
||||
|
||||
def _compiled_type_for(model: ParsedNode):
|
||||
if type(model) not in COMPILED_TYPES:
|
||||
raise InternalException(
|
||||
f'Asked to compile {type(model)} node, but it has no compiled form'
|
||||
)
|
||||
return COMPILED_TYPES[type(model)]
|
||||
|
||||
|
||||
def print_compile_stats(stats):
|
||||
names = {
|
||||
NodeType.Model: 'model',
|
||||
NodeType.Test: 'test',
|
||||
NodeType.Snapshot: 'snapshot',
|
||||
NodeType.Analysis: 'analysis',
|
||||
NodeType.Macro: 'macro',
|
||||
NodeType.Operation: 'operation',
|
||||
NodeType.Seed: 'seed file',
|
||||
NodeType.Source: 'source',
|
||||
NodeType.Exposure: 'exposure',
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
results.update(stats)
|
||||
|
||||
stat_line = ", ".join([
|
||||
pluralize(ct, names.get(t)) for t, ct in results.items()
|
||||
if t in names
|
||||
])
|
||||
|
||||
logger.info("Found {}".format(stat_line))
|
||||
|
||||
|
||||
def _node_enabled(node: ManifestNode):
|
||||
# Disabled models are already excluded from the manifest
|
||||
if node.resource_type == NodeType.Test and not node.config.enabled:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def _generate_stats(manifest: Manifest):
|
||||
stats: Dict[NodeType, int] = defaultdict(int)
|
||||
for node in manifest.nodes.values():
|
||||
if _node_enabled(node):
|
||||
stats[node.resource_type] += 1
|
||||
|
||||
for source in manifest.sources.values():
|
||||
stats[source.resource_type] += 1
|
||||
for exposure in manifest.exposures.values():
|
||||
stats[exposure.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def _add_prepended_cte(prepended_ctes, new_cte):
|
||||
for cte in prepended_ctes:
|
||||
if cte.id == new_cte.id:
|
||||
cte.sql = new_cte.sql
|
||||
return
|
||||
prepended_ctes.append(new_cte)
|
||||
|
||||
|
||||
def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
for new_cte in new_prepended_ctes:
|
||||
_add_prepended_cte(prepended_ctes, new_cte)
|
||||
|
||||
|
||||
class Linker:
|
||||
def __init__(self, data=None):
|
||||
if data is None:
|
||||
data = {}
|
||||
self.graph = nx.DiGraph(**data)
|
||||
|
||||
def edges(self):
|
||||
return self.graph.edges()
|
||||
|
||||
def nodes(self):
|
||||
return self.graph.nodes()
|
||||
|
||||
def find_cycles(self):
|
||||
try:
|
||||
cycle = nx.find_cycle(self.graph)
|
||||
except nx.NetworkXNoCycle:
|
||||
return None
|
||||
else:
|
||||
# cycles is a List[Tuple[str, ...]]
|
||||
return " --> ".join(c[0] for c in cycle)
|
||||
|
||||
def dependency(self, node1, node2):
|
||||
"indicate that node1 depends on node2"
|
||||
self.graph.add_node(node1)
|
||||
self.graph.add_node(node2)
|
||||
self.graph.add_edge(node2, node1)
|
||||
|
||||
def add_node(self, node):
|
||||
self.graph.add_node(node)
|
||||
|
||||
def write_graph(self, outfile: str, manifest: Manifest):
|
||||
"""Write the graph to a gpickle file. Before doing so, serialize and
|
||||
include all nodes in their corresponding graph entries.
|
||||
"""
|
||||
out_graph = self.graph.copy()
|
||||
for node_id in self.graph.nodes():
|
||||
data = manifest.expect(node_id).to_dict()
|
||||
out_graph.add_node(node_id, **data)
|
||||
nx.write_gpickle(out_graph, outfile)
|
||||
|
||||
|
||||
class Compiler:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.target_path)
|
||||
make_directory(self.config.modules_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
def _create_node_context(
|
||||
self,
|
||||
node: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
context = generate_runtime_model(
|
||||
node, self.config, manifest
|
||||
)
|
||||
context.update(extra_context)
|
||||
if isinstance(node, CompiledSchemaTestNode):
|
||||
# for test nodes, add a special keyword args value to the context
|
||||
jinja.add_rendered_test_kwargs(context, node)
|
||||
|
||||
return context
|
||||
|
||||
def add_ephemeral_prefix(self, name: str):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
return relation_cls.add_ephemeral_prefix(name)
|
||||
|
||||
def _get_relation_name(self, node: ParsedNode):
|
||||
relation_name = None
|
||||
if (node.resource_type in NodeType.refable() and
|
||||
not node.is_ephemeral_model):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
return relation_name
|
||||
|
||||
def _inject_ctes_into_sql(self, sql: str, ctes: List[InjectedCTE]) -> str:
|
||||
"""
|
||||
`ctes` is a list of InjectedCTEs like:
|
||||
|
||||
[
|
||||
InjectedCTE(
|
||||
id="cte_id_1",
|
||||
sql="__dbt__cte__ephemeral as (select * from table)",
|
||||
),
|
||||
InjectedCTE(
|
||||
id="cte_id_2",
|
||||
sql="__dbt__cte__events as (select id, type from events)",
|
||||
),
|
||||
]
|
||||
|
||||
Given `sql` like:
|
||||
|
||||
"with internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
This will spit out:
|
||||
|
||||
"with __dbt__cte__ephemeral as (select * from table),
|
||||
__dbt__cte__events as (select id, type from events),
|
||||
with internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
(Whitespace enhanced for readability.)
|
||||
"""
|
||||
if len(ctes) == 0:
|
||||
return sql
|
||||
|
||||
parsed_stmts = sqlparse.parse(sql)
|
||||
parsed = parsed_stmts[0]
|
||||
|
||||
with_stmt = None
|
||||
for token in parsed.tokens:
|
||||
if token.is_keyword and token.normalized == 'WITH':
|
||||
with_stmt = token
|
||||
break
|
||||
|
||||
if with_stmt is None:
|
||||
# no with stmt, add one, and inject CTEs right at the beginning
|
||||
first_token = parsed.token_first()
|
||||
with_stmt = sqlparse.sql.Token(sqlparse.tokens.Keyword, 'with')
|
||||
parsed.insert_before(first_token, with_stmt)
|
||||
else:
|
||||
# stmt exists, add a comma (which will come after injected CTEs)
|
||||
trailing_comma = sqlparse.sql.Token(
|
||||
sqlparse.tokens.Punctuation, ','
|
||||
)
|
||||
parsed.insert_after(with_stmt, trailing_comma)
|
||||
|
||||
token = sqlparse.sql.Token(
|
||||
sqlparse.tokens.Keyword,
|
||||
", ".join(c.sql for c in ctes)
|
||||
)
|
||||
parsed.insert_after(with_stmt, token)
|
||||
|
||||
return str(parsed)
|
||||
|
||||
def _get_dbt_test_name(self) -> str:
|
||||
return 'dbt__cte__internal_test'
|
||||
|
||||
# This method is called by the 'compile_node' method. Starting
|
||||
# from the node that it is passed in, it will recursively call
|
||||
# itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
# not produce SQL that is executed directly, instead they
|
||||
# are rolled up into the models that refer to them by
|
||||
# inserting CTEs into the SQL.
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
||||
|
||||
if model.compiled_sql is None:
|
||||
raise RuntimeException(
|
||||
'Cannot inject ctes into an unparsed node', model
|
||||
)
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
prepended_ctes: List[InjectedCTE] = []
|
||||
|
||||
dbt_test_name = self._get_dbt_test_name()
|
||||
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id == dbt_test_name:
|
||||
sql = cte.sql
|
||||
else:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
f'During compilation, found a cte reference that '
|
||||
f'could not be resolved: {cte.id}'
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f'{cte.id} is not ephemeral')
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, 'compiled', False):
|
||||
assert isinstance(cte_model,
|
||||
tuple(COMPILED_TYPES.values()))
|
||||
cte_model = cast(NonSourceCompiledNode, cte_model)
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(
|
||||
cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
cte_model, new_prepended_ctes = \
|
||||
self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
sql = f' {new_cte_name} as (\n{cte_model.compiled_sql}\n)'
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
# We don't save injected_sql into compiled sql for ephemeral models
|
||||
# because it will cause problems with processing of subsequent models.
|
||||
# Ephemeral models do not produce executable SQL of their own.
|
||||
if not model.is_ephemeral_model:
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_sql,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.compiled_sql = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict())
|
||||
|
||||
manifest.update_node(model)
|
||||
|
||||
return model, prepended_ctes
|
||||
|
||||
def _add_ctes(
|
||||
self,
|
||||
compiled_node: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> NonSourceCompiledNode:
|
||||
"""Wrap the data test SQL in a CTE."""
|
||||
|
||||
# for data tests, we need to insert a special CTE at the end of the
|
||||
# list containing the test query, and then have the "real" query be a
|
||||
# select count(*) from that model.
|
||||
# the benefit of doing it this way is that _add_ctes() can be
|
||||
# rewritten for different adapters to handle databases that don't
|
||||
# support CTEs, or at least don't have full support.
|
||||
if isinstance(compiled_node, CompiledDataTestNode):
|
||||
# the last prepend (so last in order) should be the data test body.
|
||||
# then we can add our select count(*) from _that_ cte as the "real"
|
||||
# compiled_sql, and do the regular prepend logic from CTEs.
|
||||
name = self._get_dbt_test_name()
|
||||
cte = InjectedCTE(
|
||||
id=name,
|
||||
sql=f' {name} as (\n{compiled_node.compiled_sql}\n)'
|
||||
)
|
||||
compiled_node.extra_ctes.append(cte)
|
||||
compiled_node.compiled_sql = f'\nselect count(*) from {name}'
|
||||
|
||||
return compiled_node
|
||||
|
||||
# creates a compiled_node from the ManifestNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_sql" using the node, the
|
||||
# raw_sql and the context.
|
||||
def _compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> NonSourceCompiledNode:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
logger.debug("Compiling {}".format(node.unique_id))
|
||||
|
||||
data = node.to_dict()
|
||||
data.update({
|
||||
'compiled': False,
|
||||
'compiled_sql': None,
|
||||
'extra_ctes_injected': False,
|
||||
'extra_ctes': [],
|
||||
})
|
||||
compiled_node = _compiled_type_for(node).from_dict(data)
|
||||
|
||||
context = self._create_node_context(
|
||||
compiled_node, manifest, extra_context
|
||||
)
|
||||
|
||||
compiled_node.compiled_sql = jinja.get_rendered(
|
||||
node.raw_sql,
|
||||
context,
|
||||
node,
|
||||
)
|
||||
|
||||
compiled_node.relation_name = self._get_relation_name(node)
|
||||
|
||||
compiled_node.compiled = True
|
||||
|
||||
# add ctes for specific test nodes, and also for
|
||||
# possible future use in adapters
|
||||
compiled_node = self._add_ctes(
|
||||
compiled_node, manifest, extra_context
|
||||
)
|
||||
|
||||
return compiled_node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
graph_path = os.path.join(self.config.target_path, filename)
|
||||
if flags.WRITE_JSON:
|
||||
linker.write_graph(graph_path, manifest)
|
||||
|
||||
def link_node(
|
||||
self, linker: Linker, node: GraphMemberNode, manifest: Manifest
|
||||
):
|
||||
linker.add_node(node.unique_id)
|
||||
|
||||
for dependency in node.depends_on_nodes:
|
||||
if dependency in manifest.nodes:
|
||||
linker.dependency(
|
||||
node.unique_id,
|
||||
(manifest.nodes[dependency].unique_id)
|
||||
)
|
||||
elif dependency in manifest.sources:
|
||||
linker.dependency(
|
||||
node.unique_id,
|
||||
(manifest.sources[dependency].unique_id)
|
||||
)
|
||||
else:
|
||||
dependency_not_found(node, dependency)
|
||||
|
||||
def link_graph(self, linker: Linker, manifest: Manifest):
|
||||
for source in manifest.sources.values():
|
||||
linker.add_node(source.unique_id)
|
||||
for node in manifest.nodes.values():
|
||||
self.link_node(linker, node, manifest)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(linker, exposure, manifest)
|
||||
# linker.add_node(exposure.unique_id)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
if cycle:
|
||||
raise RuntimeError("Found a cycle: {}".format(cycle))
|
||||
|
||||
def compile(self, manifest: Manifest, write=True) -> Graph:
|
||||
self.initialize()
|
||||
linker = Linker()
|
||||
|
||||
self.link_graph(linker, manifest)
|
||||
|
||||
stats = _generate_stats(manifest)
|
||||
|
||||
if write:
|
||||
self.write_graph_file(linker, manifest)
|
||||
print_compile_stats(stats)
|
||||
|
||||
return Graph(linker.graph)
|
||||
|
||||
# writes the "compiled_sql" into the target/compiled directory
|
||||
def _write_node(self, node: NonSourceCompiledNode) -> ManifestNode:
|
||||
if (not node.extra_ctes_injected or
|
||||
node.resource_type == NodeType.Snapshot):
|
||||
return node
|
||||
logger.debug(f'Writing injected SQL for node "{node.unique_id}"')
|
||||
|
||||
if node.compiled_sql:
|
||||
node.build_path = node.write_node(
|
||||
self.config.target_path,
|
||||
'compiled',
|
||||
node.compiled_sql
|
||||
)
|
||||
return node
|
||||
|
||||
# This is the main entry point into this code. It's called by
|
||||
# CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
# RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
# the node into a compiled node, and then calls the
|
||||
# recursive method to "prepend" the ctes.
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
write: bool = True,
|
||||
) -> NonSourceCompiledNode:
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(
|
||||
node, manifest, extra_context
|
||||
)
|
||||
if write:
|
||||
self._write_node(node)
|
||||
return node
|
||||
4
core/dbt/config/__init__.py
Normal file
4
core/dbt/config/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, PROFILES_DIR, read_user_config # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
425
core/dbt/config/profile.py
Normal file
425
core/dbt/config/profile.py
Normal file
@@ -0,0 +1,425 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
import os
|
||||
|
||||
from hologram import ValidationError
|
||||
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
from dbt.contracts.project import ProfileConfig, UserConfig
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import DbtProfileError
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import ValidationException
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.utils import coerce_dict_str
|
||||
|
||||
from .renderer import ProfileRenderer
|
||||
|
||||
DEFAULT_THREADS = 1
|
||||
DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser('~'), '.dbt')
|
||||
PROFILES_DIR = os.path.expanduser(
|
||||
os.getenv('DBT_PROFILES_DIR', DEFAULT_PROFILES_DIR)
|
||||
)
|
||||
|
||||
INVALID_PROFILE_MESSAGE = """
|
||||
dbt encountered an error while trying to read your profiles.yml file.
|
||||
|
||||
{error_string}
|
||||
"""
|
||||
|
||||
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(profiles_file=PROFILES_DIR)
|
||||
|
||||
|
||||
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
path = os.path.join(profiles_dir, 'profiles.yml')
|
||||
|
||||
contents = None
|
||||
if os.path.isfile(path):
|
||||
try:
|
||||
contents = load_file_contents(path, strip=False)
|
||||
yaml_content = load_yaml_text(contents)
|
||||
if not yaml_content:
|
||||
msg = f'The profiles.yml file at {path} is empty'
|
||||
raise DbtProfileError(
|
||||
INVALID_PROFILE_MESSAGE.format(
|
||||
error_string=msg
|
||||
)
|
||||
)
|
||||
return yaml_content
|
||||
except ValidationException as e:
|
||||
msg = INVALID_PROFILE_MESSAGE.format(error_string=e)
|
||||
raise ValidationException(msg) from e
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def read_user_config(directory: str) -> UserConfig:
|
||||
try:
|
||||
profile = read_profile(directory)
|
||||
if profile:
|
||||
user_cfg = coerce_dict_str(profile.get('config', {}))
|
||||
if user_cfg is not None:
|
||||
return UserConfig.from_dict(user_cfg)
|
||||
except (RuntimeException, ValidationError):
|
||||
pass
|
||||
return UserConfig()
|
||||
|
||||
|
||||
# The Profile class is included in RuntimeConfig, so any attribute
|
||||
# additions must also be set where the RuntimeConfig class is created
|
||||
@dataclass
|
||||
class Profile(HasCredentials):
|
||||
profile_name: str
|
||||
target_name: str
|
||||
config: UserConfig
|
||||
threads: int
|
||||
credentials: Credentials
|
||||
|
||||
def to_profile_info(
|
||||
self, serialize_credentials: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""Unlike to_project_config, this dict is not a mirror of any existing
|
||||
on-disk data structure. It's used when creating a new profile from an
|
||||
existing one.
|
||||
|
||||
:param serialize_credentials bool: If True, serialize the credentials.
|
||||
Otherwise, the Credentials object will be copied.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = {
|
||||
'profile_name': self.profile_name,
|
||||
'target_name': self.target_name,
|
||||
'config': self.config,
|
||||
'threads': self.threads,
|
||||
'credentials': self.credentials,
|
||||
}
|
||||
if serialize_credentials:
|
||||
result['config'] = self.config.to_dict()
|
||||
result['credentials'] = self.credentials.to_dict()
|
||||
return result
|
||||
|
||||
def to_target_dict(self) -> Dict[str, Any]:
|
||||
target = dict(
|
||||
self.credentials.connection_info(with_aliases=True)
|
||||
)
|
||||
target.update({
|
||||
'type': self.credentials.type,
|
||||
'threads': self.threads,
|
||||
'name': self.target_name,
|
||||
'target_name': self.target_name,
|
||||
'profile_name': self.profile_name,
|
||||
'config': self.config.to_dict(),
|
||||
})
|
||||
return target
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not (isinstance(other, self.__class__) and
|
||||
isinstance(self, other.__class__)):
|
||||
return NotImplemented
|
||||
return self.to_profile_info() == other.to_profile_info()
|
||||
|
||||
def validate(self):
|
||||
try:
|
||||
if self.credentials:
|
||||
self.credentials.to_dict(validate=True)
|
||||
ProfileConfig.from_dict(
|
||||
self.to_profile_info(serialize_credentials=True)
|
||||
)
|
||||
except ValidationError as exc:
|
||||
raise DbtProfileError(validator_error_message(exc)) from exc
|
||||
|
||||
@staticmethod
|
||||
def _credentials_from_profile(
|
||||
profile: Dict[str, Any], profile_name: str, target_name: str
|
||||
) -> Credentials:
|
||||
# avoid an import cycle
|
||||
from dbt.adapters.factory import load_plugin
|
||||
# credentials carry their 'type' in their actual type, not their
|
||||
# attributes. We do want this in order to pick our Credentials class.
|
||||
if 'type' not in profile:
|
||||
raise DbtProfileError(
|
||||
'required field "type" not found in profile {} and target {}'
|
||||
.format(profile_name, target_name))
|
||||
|
||||
typename = profile.pop('type')
|
||||
try:
|
||||
cls = load_plugin(typename)
|
||||
credentials = cls.from_dict(profile)
|
||||
except (RuntimeException, ValidationError) as e:
|
||||
msg = str(e) if isinstance(e, RuntimeException) else e.message
|
||||
raise DbtProfileError(
|
||||
'Credentials in profile "{}", target "{}" invalid: {}'
|
||||
.format(profile_name, target_name, msg)
|
||||
) from e
|
||||
|
||||
return credentials
|
||||
|
||||
@staticmethod
|
||||
def pick_profile_name(
|
||||
args_profile_name: Optional[str],
|
||||
project_profile_name: Optional[str] = None,
|
||||
) -> str:
|
||||
profile_name = project_profile_name
|
||||
if args_profile_name is not None:
|
||||
profile_name = args_profile_name
|
||||
if profile_name is None:
|
||||
raise DbtProjectError(NO_SUPPLIED_PROFILE_ERROR)
|
||||
return profile_name
|
||||
|
||||
@staticmethod
|
||||
def _get_profile_data(
|
||||
profile: Dict[str, Any], profile_name: str, target_name: str
|
||||
) -> Dict[str, Any]:
|
||||
if 'outputs' not in profile:
|
||||
raise DbtProfileError(
|
||||
"outputs not specified in profile '{}'".format(profile_name)
|
||||
)
|
||||
outputs = profile['outputs']
|
||||
|
||||
if target_name not in outputs:
|
||||
outputs = '\n'.join(' - {}'.format(output)
|
||||
for output in outputs)
|
||||
msg = ("The profile '{}' does not have a target named '{}'. The "
|
||||
"valid target names for this profile are:\n{}"
|
||||
.format(profile_name, target_name, outputs))
|
||||
raise DbtProfileError(msg, result_type='invalid_target')
|
||||
profile_data = outputs[target_name]
|
||||
|
||||
if not isinstance(profile_data, dict):
|
||||
msg = (
|
||||
f"output '{target_name}' of profile '{profile_name}' is "
|
||||
f"misconfigured in profiles.yml"
|
||||
)
|
||||
raise DbtProfileError(msg, result_type='invalid_target')
|
||||
|
||||
return profile_data
|
||||
|
||||
@classmethod
|
||||
def from_credentials(
|
||||
cls,
|
||||
credentials: Credentials,
|
||||
threads: int,
|
||||
profile_name: str,
|
||||
target_name: str,
|
||||
user_cfg: Optional[Dict[str, Any]] = None
|
||||
) -> 'Profile':
|
||||
"""Create a profile from an existing set of Credentials and the
|
||||
remaining information.
|
||||
|
||||
:param credentials: The credentials dict for this profile.
|
||||
:param threads: The number of threads to use for connections.
|
||||
:param profile_name: The profile name used for this profile.
|
||||
:param target_name: The target name used for this profile.
|
||||
:param user_cfg: The user-level config block from the
|
||||
raw profiles, if specified.
|
||||
:raises DbtProfileError: If the profile is invalid.
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
if user_cfg is None:
|
||||
user_cfg = {}
|
||||
config = UserConfig.from_dict(user_cfg)
|
||||
|
||||
profile = cls(
|
||||
profile_name=profile_name,
|
||||
target_name=target_name,
|
||||
config=config,
|
||||
threads=threads,
|
||||
credentials=credentials
|
||||
)
|
||||
profile.validate()
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def render_profile(
|
||||
cls,
|
||||
raw_profile: Dict[str, Any],
|
||||
profile_name: str,
|
||||
target_override: Optional[str],
|
||||
renderer: ProfileRenderer,
|
||||
) -> Tuple[str, Dict[str, Any]]:
|
||||
"""This is a containment zone for the hateful way we're rendering
|
||||
profiles.
|
||||
"""
|
||||
# rendering profiles is a bit complex. Two constraints cause trouble:
|
||||
# 1) users should be able to use environment/cli variables to specify
|
||||
# the target in their profile.
|
||||
# 2) Missing environment/cli variables in profiles/targets that don't
|
||||
# end up getting selected should not cause errors.
|
||||
# so first we'll just render the target name, then we use that rendered
|
||||
# name to extract a profile that we can render.
|
||||
if target_override is not None:
|
||||
target_name = target_override
|
||||
elif 'target' in raw_profile:
|
||||
# render the target if it was parsed from yaml
|
||||
target_name = renderer.render_value(raw_profile['target'])
|
||||
else:
|
||||
target_name = 'default'
|
||||
logger.debug(
|
||||
"target not specified in profile '{}', using '{}'"
|
||||
.format(profile_name, target_name)
|
||||
)
|
||||
|
||||
raw_profile_data = cls._get_profile_data(
|
||||
raw_profile, profile_name, target_name
|
||||
)
|
||||
|
||||
try:
|
||||
profile_data = renderer.render_data(raw_profile_data)
|
||||
except CompilationException as exc:
|
||||
raise DbtProfileError(str(exc)) from exc
|
||||
return target_name, profile_data
|
||||
|
||||
@classmethod
|
||||
def from_raw_profile_info(
|
||||
cls,
|
||||
raw_profile: Dict[str, Any],
|
||||
profile_name: str,
|
||||
renderer: ProfileRenderer,
|
||||
user_cfg: Optional[Dict[str, Any]] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> 'Profile':
|
||||
"""Create a profile from its raw profile information.
|
||||
|
||||
(this is an intermediate step, mostly useful for unit testing)
|
||||
|
||||
:param raw_profile: The profile data for a single profile, from
|
||||
disk as yaml and its values rendered with jinja.
|
||||
:param profile_name: The profile name used.
|
||||
:param renderer: The config renderer.
|
||||
:param user_cfg: The global config for the user, if it
|
||||
was present.
|
||||
:param target_override: The target to use, if provided on
|
||||
the command line.
|
||||
:param threads_override: The thread count to use, if
|
||||
provided on the command line.
|
||||
:raises DbtProfileError: If the profile is invalid or missing, or the
|
||||
target could not be found
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
# user_cfg is not rendered.
|
||||
if user_cfg is None:
|
||||
user_cfg = raw_profile.get('config')
|
||||
# TODO: should it be, and the values coerced to bool?
|
||||
target_name, profile_data = cls.render_profile(
|
||||
raw_profile, profile_name, target_override, renderer
|
||||
)
|
||||
|
||||
# valid connections never include the number of threads, but it's
|
||||
# stored on a per-connection level in the raw configs
|
||||
threads = profile_data.pop('threads', DEFAULT_THREADS)
|
||||
if threads_override is not None:
|
||||
threads = threads_override
|
||||
|
||||
credentials: Credentials = cls._credentials_from_profile(
|
||||
profile_data, profile_name, target_name
|
||||
)
|
||||
|
||||
return cls.from_credentials(
|
||||
credentials=credentials,
|
||||
profile_name=profile_name,
|
||||
target_name=target_name,
|
||||
threads=threads,
|
||||
user_cfg=user_cfg
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_raw_profiles(
|
||||
cls,
|
||||
raw_profiles: Dict[str, Any],
|
||||
profile_name: str,
|
||||
renderer: ProfileRenderer,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> 'Profile':
|
||||
"""
|
||||
:param raw_profiles: The profile data, from disk as yaml.
|
||||
:param profile_name: The profile name to use.
|
||||
:param renderer: The config renderer.
|
||||
:param target_override: The target to use, if provided on the command
|
||||
line.
|
||||
:param threads_override: The thread count to use, if provided on the
|
||||
command line.
|
||||
:raises DbtProjectError: If there is no profile name specified in the
|
||||
project or the command line arguments
|
||||
:raises DbtProfileError: If the profile is invalid or missing, or the
|
||||
target could not be found
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
if profile_name not in raw_profiles:
|
||||
raise DbtProjectError(
|
||||
"Could not find profile named '{}'".format(profile_name)
|
||||
)
|
||||
|
||||
# First, we've already got our final decision on profile name, and we
|
||||
# don't render keys, so we can pluck that out
|
||||
raw_profile = raw_profiles[profile_name]
|
||||
if not raw_profile:
|
||||
msg = (
|
||||
f'Profile {profile_name} in profiles.yml is empty'
|
||||
)
|
||||
raise DbtProfileError(
|
||||
INVALID_PROFILE_MESSAGE.format(
|
||||
error_string=msg
|
||||
)
|
||||
)
|
||||
user_cfg = raw_profiles.get('config')
|
||||
|
||||
return cls.from_raw_profile_info(
|
||||
raw_profile=raw_profile,
|
||||
profile_name=profile_name,
|
||||
renderer=renderer,
|
||||
user_cfg=user_cfg,
|
||||
target_override=target_override,
|
||||
threads_override=threads_override,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def render_from_args(
|
||||
cls,
|
||||
args: Any,
|
||||
renderer: ProfileRenderer,
|
||||
project_profile_name: Optional[str],
|
||||
) -> 'Profile':
|
||||
"""Given the raw profiles as read from disk and the name of the desired
|
||||
profile if specified, return the profile component of the runtime
|
||||
config.
|
||||
|
||||
:param args argparse.Namespace: The arguments as parsed from the cli.
|
||||
:param project_profile_name Optional[str]: The profile name, if
|
||||
specified in a project.
|
||||
:raises DbtProjectError: If there is no profile name specified in the
|
||||
project or the command line arguments, or if the specified profile
|
||||
is not found
|
||||
:raises DbtProfileError: If the profile is invalid or missing, or the
|
||||
target could not be found.
|
||||
:returns Profile: The new Profile object.
|
||||
"""
|
||||
threads_override = getattr(args, 'threads', None)
|
||||
target_override = getattr(args, 'target', None)
|
||||
raw_profiles = read_profile(args.profiles_dir)
|
||||
profile_name = cls.pick_profile_name(getattr(args, 'profile', None),
|
||||
project_profile_name)
|
||||
return cls.from_raw_profiles(
|
||||
raw_profiles=raw_profiles,
|
||||
profile_name=profile_name,
|
||||
renderer=renderer,
|
||||
target_override=target_override,
|
||||
threads_override=threads_override
|
||||
)
|
||||
642
core/dbt/config/project.py
Normal file
642
core/dbt/config/project.py
Normal file
@@ -0,0 +1,642 @@
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, field
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
List, Dict, Any, Optional, TypeVar, Union, Mapping,
|
||||
)
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt.clients.system import resolve_path_from_base
|
||||
from dbt.clients.system import path_exists
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import SemverException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.graph import SelectionSpec
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.semver import VersionSpecifier
|
||||
from dbt.semver import versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.config.selectors import SelectorDict
|
||||
|
||||
from dbt.contracts.project import (
|
||||
Project as ProjectContract,
|
||||
SemverString,
|
||||
)
|
||||
from dbt.contracts.project import PackageConfig
|
||||
|
||||
from hologram import ValidationError
|
||||
|
||||
from .renderer import DbtProjectYamlRenderer
|
||||
from .selectors import (
|
||||
selector_config_from_data,
|
||||
selector_data_from_root,
|
||||
SelectorConfig,
|
||||
)
|
||||
|
||||
|
||||
INVALID_VERSION_ERROR = """\
|
||||
This version of dbt is not supported with the '{package}' package.
|
||||
Installed version of dbt: {installed}
|
||||
Required version of dbt for '{package}': {version_spec}
|
||||
Check the requirements for the '{package}' package, or run dbt again with \
|
||||
--no-version-check
|
||||
"""
|
||||
|
||||
|
||||
IMPOSSIBLE_VERSION_ERROR = """\
|
||||
The package version requirement can never be satisfied for the '{package}
|
||||
package.
|
||||
Required versions of dbt for '{package}': {version_spec}
|
||||
Check the requirements for the '{package}' package, or run dbt again with \
|
||||
--no-version-check
|
||||
"""
|
||||
|
||||
MALFORMED_PACKAGE_ERROR = """\
|
||||
The packages.yml file in this project is malformed. Please double check
|
||||
the contents of this file and fix any errors before retrying.
|
||||
|
||||
You can find more information on the syntax for this file here:
|
||||
https://docs.getdbt.com/docs/package-management
|
||||
|
||||
Validator Error:
|
||||
{error}
|
||||
"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IsFQNResource(Protocol):
|
||||
fqn: List[str]
|
||||
resource_type: NodeType
|
||||
package_name: str
|
||||
|
||||
|
||||
def _load_yaml(path):
|
||||
contents = load_file_contents(path)
|
||||
return load_yaml_text(contents)
|
||||
|
||||
|
||||
def package_data_from_root(project_root):
|
||||
package_filepath = resolve_path_from_base(
|
||||
'packages.yml', project_root
|
||||
)
|
||||
|
||||
if path_exists(package_filepath):
|
||||
packages_dict = _load_yaml(package_filepath)
|
||||
else:
|
||||
packages_dict = None
|
||||
return packages_dict
|
||||
|
||||
|
||||
def package_config_from_data(packages_data: Dict[str, Any]):
|
||||
if not packages_data:
|
||||
packages_data = {'packages': []}
|
||||
|
||||
try:
|
||||
packages = PackageConfig.from_dict(packages_data)
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(
|
||||
MALFORMED_PACKAGE_ERROR.format(error=str(e.message))
|
||||
) from e
|
||||
return packages
|
||||
|
||||
|
||||
def _parse_versions(versions: Union[List[str], str]) -> List[VersionSpecifier]:
|
||||
"""Parse multiple versions as read from disk. The versions value may be any
|
||||
one of:
|
||||
- a single version string ('>0.12.1')
|
||||
- a single string specifying multiple comma-separated versions
|
||||
('>0.11.1,<=0.12.2')
|
||||
- an array of single-version strings (['>0.11.1', '<=0.12.2'])
|
||||
|
||||
Regardless, this will return a list of VersionSpecifiers
|
||||
"""
|
||||
if isinstance(versions, str):
|
||||
versions = versions.split(',')
|
||||
return [VersionSpecifier.from_version_string(v) for v in versions]
|
||||
|
||||
|
||||
def _all_source_paths(
|
||||
source_paths: List[str],
|
||||
data_paths: List[str],
|
||||
snapshot_paths: List[str],
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
return list(chain(source_paths, data_paths, snapshot_paths, analysis_paths,
|
||||
macro_paths))
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def value_or(value: Optional[T], default: T) -> T:
|
||||
if value is None:
|
||||
return default
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
def _raw_project_from(project_root: str) -> Dict[str, Any]:
|
||||
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_yaml_filepath = os.path.join(project_root, 'dbt_project.yml')
|
||||
|
||||
# get the project.yml contents
|
||||
if not path_exists(project_yaml_filepath):
|
||||
raise DbtProjectError(
|
||||
'no dbt_project.yml found at expected path {}'
|
||||
.format(project_yaml_filepath)
|
||||
)
|
||||
|
||||
project_dict = _load_yaml(project_yaml_filepath)
|
||||
|
||||
if not isinstance(project_dict, dict):
|
||||
raise DbtProjectError(
|
||||
'dbt_project.yml does not parse to a dictionary'
|
||||
)
|
||||
|
||||
return project_dict
|
||||
|
||||
|
||||
def _query_comment_from_cfg(
|
||||
cfg_query_comment: Union[QueryComment, NoValue, str, None]
|
||||
) -> QueryComment:
|
||||
if not cfg_query_comment:
|
||||
return QueryComment(comment='')
|
||||
|
||||
if isinstance(cfg_query_comment, str):
|
||||
return QueryComment(comment=cfg_query_comment)
|
||||
|
||||
if isinstance(cfg_query_comment, NoValue):
|
||||
return QueryComment()
|
||||
|
||||
return cfg_query_comment
|
||||
|
||||
|
||||
def validate_version(dbt_version: List[VersionSpecifier], project_name: str):
|
||||
"""Ensure this package works with the installed version of dbt."""
|
||||
installed = get_installed_version()
|
||||
if not versions_compatible(*dbt_version):
|
||||
msg = IMPOSSIBLE_VERSION_ERROR.format(
|
||||
package=project_name,
|
||||
version_spec=[
|
||||
x.to_version_string() for x in dbt_version
|
||||
]
|
||||
)
|
||||
raise DbtProjectError(msg)
|
||||
|
||||
if not versions_compatible(installed, *dbt_version):
|
||||
msg = INVALID_VERSION_ERROR.format(
|
||||
package=project_name,
|
||||
installed=installed.to_version_string(),
|
||||
version_spec=[
|
||||
x.to_version_string() for x in dbt_version
|
||||
]
|
||||
)
|
||||
raise DbtProjectError(msg)
|
||||
|
||||
|
||||
def _get_required_version(
|
||||
project_dict: Dict[str, Any],
|
||||
verify_version: bool,
|
||||
) -> List[VersionSpecifier]:
|
||||
dbt_raw_version: Union[List[str], str] = '>=0.0.0'
|
||||
required = project_dict.get('require-dbt-version')
|
||||
if required is not None:
|
||||
dbt_raw_version = required
|
||||
|
||||
try:
|
||||
dbt_version = _parse_versions(dbt_raw_version)
|
||||
except SemverException as e:
|
||||
raise DbtProjectError(str(e)) from e
|
||||
|
||||
if verify_version:
|
||||
# no name is also an error that we want to raise
|
||||
if 'name' not in project_dict:
|
||||
raise DbtProjectError(
|
||||
'Required "name" field not present in project',
|
||||
)
|
||||
validate_version(dbt_version, project_dict['name'])
|
||||
|
||||
return dbt_version
|
||||
|
||||
|
||||
@dataclass
|
||||
class RenderComponents:
|
||||
project_dict: Dict[str, Any] = field(
|
||||
metadata=dict(description='The project dictionary')
|
||||
)
|
||||
packages_dict: Dict[str, Any] = field(
|
||||
metadata=dict(description='The packages dictionary')
|
||||
)
|
||||
selectors_dict: Dict[str, Any] = field(
|
||||
metadata=dict(description='The selectors dictionary')
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PartialProject(RenderComponents):
|
||||
profile_name: Optional[str] = field(metadata=dict(
|
||||
description='The unrendered profile name in the project, if set'
|
||||
))
|
||||
project_name: Optional[str] = field(metadata=dict(
|
||||
description=(
|
||||
'The name of the project. This should always be set and will not '
|
||||
'be rendered'
|
||||
)
|
||||
))
|
||||
project_root: str = field(
|
||||
metadata=dict(description='The root directory of the project'),
|
||||
)
|
||||
verify_version: bool = field(
|
||||
metadata=dict(description=(
|
||||
'If True, verify the dbt version matches the required version'
|
||||
))
|
||||
)
|
||||
|
||||
def render_profile_name(self, renderer) -> Optional[str]:
|
||||
if self.profile_name is None:
|
||||
return None
|
||||
return renderer.render_value(self.profile_name)
|
||||
|
||||
def get_rendered(
|
||||
self,
|
||||
renderer: DbtProjectYamlRenderer,
|
||||
) -> RenderComponents:
|
||||
|
||||
rendered_project = renderer.render_project(
|
||||
self.project_dict, self.project_root
|
||||
)
|
||||
rendered_packages = renderer.render_packages(self.packages_dict)
|
||||
rendered_selectors = renderer.render_selectors(self.selectors_dict)
|
||||
|
||||
return RenderComponents(
|
||||
project_dict=rendered_project,
|
||||
packages_dict=rendered_packages,
|
||||
selectors_dict=rendered_selectors,
|
||||
)
|
||||
|
||||
def render(self, renderer: DbtProjectYamlRenderer) -> 'Project':
|
||||
try:
|
||||
rendered = self.get_rendered(renderer)
|
||||
return self.create_project(rendered)
|
||||
except DbtProjectError as exc:
|
||||
if exc.path is None:
|
||||
exc.path = os.path.join(self.project_root, 'dbt_project.yml')
|
||||
raise
|
||||
|
||||
def create_project(self, rendered: RenderComponents) -> 'Project':
|
||||
unrendered = RenderComponents(
|
||||
project_dict=self.project_dict,
|
||||
packages_dict=self.packages_dict,
|
||||
selectors_dict=self.selectors_dict,
|
||||
)
|
||||
dbt_version = _get_required_version(
|
||||
rendered.project_dict,
|
||||
verify_version=self.verify_version,
|
||||
)
|
||||
|
||||
try:
|
||||
cfg = ProjectContract.from_dict(rendered.project_dict)
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
# name/version are required in the Project definition, so we can assume
|
||||
# they are present
|
||||
name = cfg.name
|
||||
version = cfg.version
|
||||
# this is added at project_dict parse time and should always be here
|
||||
# once we see it.
|
||||
if cfg.project_root is None:
|
||||
raise DbtProjectError('cfg must have a project root!')
|
||||
else:
|
||||
project_root = cfg.project_root
|
||||
# this is only optional in the sense that if it's not present, it needs
|
||||
# to have been a cli argument.
|
||||
profile_name = cfg.profile
|
||||
# these are all the defaults
|
||||
source_paths: List[str] = value_or(cfg.source_paths, ['models'])
|
||||
macro_paths: List[str] = value_or(cfg.macro_paths, ['macros'])
|
||||
data_paths: List[str] = value_or(cfg.data_paths, ['data'])
|
||||
test_paths: List[str] = value_or(cfg.test_paths, ['test'])
|
||||
analysis_paths: List[str] = value_or(cfg.analysis_paths, [])
|
||||
snapshot_paths: List[str] = value_or(cfg.snapshot_paths, ['snapshots'])
|
||||
|
||||
all_source_paths: List[str] = _all_source_paths(
|
||||
source_paths, data_paths, snapshot_paths, analysis_paths,
|
||||
macro_paths
|
||||
)
|
||||
|
||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||
target_path: str = value_or(cfg.target_path, 'target')
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = value_or(cfg.log_path, 'logs')
|
||||
modules_path: str = value_or(cfg.modules_path, 'dbt_modules')
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
# break many things
|
||||
quoting: Dict[str, Any] = {}
|
||||
if cfg.quoting is not None:
|
||||
quoting = cfg.quoting.to_dict()
|
||||
|
||||
models: Dict[str, Any]
|
||||
seeds: Dict[str, Any]
|
||||
snapshots: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
models = cfg.models
|
||||
seeds = cfg.seeds
|
||||
snapshots = cfg.snapshots
|
||||
sources = cfg.sources
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
else:
|
||||
vars_dict = cfg.vars
|
||||
|
||||
vars_value = VarProvider(vars_dict)
|
||||
on_run_start: List[str] = value_or(cfg.on_run_start, [])
|
||||
on_run_end: List[str] = value_or(cfg.on_run_end, [])
|
||||
|
||||
query_comment = _query_comment_from_cfg(cfg.query_comment)
|
||||
|
||||
packages = package_config_from_data(rendered.packages_dict)
|
||||
selectors = selector_config_from_data(rendered.selectors_dict)
|
||||
manifest_selectors: Dict[str, Any] = {}
|
||||
if rendered.selectors_dict and rendered.selectors_dict['selectors']:
|
||||
# this is a dict with a single key 'selectors' pointing to a list
|
||||
# of dicts.
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict['selectors'])
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
project_root=project_root,
|
||||
profile_name=profile_name,
|
||||
source_paths=source_paths,
|
||||
macro_paths=macro_paths,
|
||||
data_paths=data_paths,
|
||||
test_paths=test_paths,
|
||||
analysis_paths=analysis_paths,
|
||||
docs_paths=docs_paths,
|
||||
asset_paths=asset_paths,
|
||||
target_path=target_path,
|
||||
snapshot_paths=snapshot_paths,
|
||||
clean_targets=clean_targets,
|
||||
log_path=log_path,
|
||||
modules_path=modules_path,
|
||||
quoting=quoting,
|
||||
models=models,
|
||||
on_run_start=on_run_start,
|
||||
on_run_end=on_run_end,
|
||||
seeds=seeds,
|
||||
snapshots=snapshots,
|
||||
dbt_version=dbt_version,
|
||||
packages=packages,
|
||||
manifest_selectors=manifest_selectors,
|
||||
selectors=selectors,
|
||||
query_comment=query_comment,
|
||||
sources=sources,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
unrendered=unrendered,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
return project
|
||||
|
||||
@classmethod
|
||||
def from_dicts(
|
||||
cls,
|
||||
project_root: str,
|
||||
project_dict: Dict[str, Any],
|
||||
packages_dict: Dict[str, Any],
|
||||
selectors_dict: Dict[str, Any],
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
):
|
||||
"""Construct a partial project from its constituent dicts.
|
||||
"""
|
||||
project_name = project_dict.get('name')
|
||||
profile_name = project_dict.get('profile')
|
||||
|
||||
return cls(
|
||||
profile_name=profile_name,
|
||||
project_name=project_name,
|
||||
project_root=project_root,
|
||||
project_dict=project_dict,
|
||||
packages_dict=packages_dict,
|
||||
selectors_dict=selectors_dict,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_project_root(
|
||||
cls, project_root: str, *, verify_version: bool = False
|
||||
) -> 'PartialProject':
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_dict = _raw_project_from(project_root)
|
||||
config_version = project_dict.get('config-version', 1)
|
||||
if config_version != 2:
|
||||
raise DbtProjectError(
|
||||
f'Invalid config version: {config_version}, expected 2',
|
||||
path=os.path.join(project_root, 'dbt_project.yml')
|
||||
)
|
||||
|
||||
packages_dict = package_data_from_root(project_root)
|
||||
selectors_dict = selector_data_from_root(project_root)
|
||||
return cls.from_dicts(
|
||||
project_root=project_root,
|
||||
project_dict=project_dict,
|
||||
selectors_dict=selectors_dict,
|
||||
packages_dict=packages_dict,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
|
||||
|
||||
class VarProvider:
|
||||
"""Var providers are tied to a particular Project."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vars: Dict[str, Dict[str, Any]]
|
||||
) -> None:
|
||||
self.vars = vars
|
||||
|
||||
def vars_for(
|
||||
self, node: IsFQNResource, adapter_type: str
|
||||
) -> Mapping[str, Any]:
|
||||
# in v2, vars are only either project or globally scoped
|
||||
merged = MultiDict([self.vars])
|
||||
merged.add(self.vars.get(node.package_name, {}))
|
||||
return merged
|
||||
|
||||
def to_dict(self):
|
||||
return self.vars
|
||||
|
||||
|
||||
# The Project class is included in RuntimeConfig, so any attribute
|
||||
# additions must also be set where the RuntimeConfig class is created
|
||||
@dataclass
|
||||
class Project:
|
||||
project_name: str
|
||||
version: Union[SemverString, float]
|
||||
project_root: str
|
||||
profile_name: Optional[str]
|
||||
source_paths: List[str]
|
||||
macro_paths: List[str]
|
||||
data_paths: List[str]
|
||||
test_paths: List[str]
|
||||
analysis_paths: List[str]
|
||||
docs_paths: List[str]
|
||||
asset_paths: List[str]
|
||||
target_path: str
|
||||
snapshot_paths: List[str]
|
||||
clean_targets: List[str]
|
||||
log_path: str
|
||||
modules_path: str
|
||||
quoting: Dict[str, Any]
|
||||
models: Dict[str, Any]
|
||||
on_run_start: List[str]
|
||||
on_run_end: List[str]
|
||||
seeds: Dict[str, Any]
|
||||
snapshots: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
packages: Dict[str, Any]
|
||||
manifest_selectors: Dict[str, Any]
|
||||
selectors: SelectorConfig
|
||||
query_comment: QueryComment
|
||||
config_version: int
|
||||
unrendered: RenderComponents
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
return _all_source_paths(
|
||||
self.source_paths, self.data_paths, self.snapshot_paths,
|
||||
self.analysis_paths, self.macro_paths
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
cfg = self.to_project_config(with_packages=True)
|
||||
return str(cfg)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not (isinstance(other, self.__class__) and
|
||||
isinstance(self, other.__class__)):
|
||||
return False
|
||||
return self.to_project_config(with_packages=True) == \
|
||||
other.to_project_config(with_packages=True)
|
||||
|
||||
def to_project_config(self, with_packages=False):
|
||||
"""Return a dict representation of the config that could be written to
|
||||
disk with `yaml.safe_dump` to get this configuration.
|
||||
|
||||
:param with_packages bool: If True, include the serialized packages
|
||||
file in the root.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = deepcopy({
|
||||
'name': self.project_name,
|
||||
'version': self.version,
|
||||
'project-root': self.project_root,
|
||||
'profile': self.profile_name,
|
||||
'source-paths': self.source_paths,
|
||||
'macro-paths': self.macro_paths,
|
||||
'data-paths': self.data_paths,
|
||||
'test-paths': self.test_paths,
|
||||
'analysis-paths': self.analysis_paths,
|
||||
'docs-paths': self.docs_paths,
|
||||
'asset-paths': self.asset_paths,
|
||||
'target-path': self.target_path,
|
||||
'snapshot-paths': self.snapshot_paths,
|
||||
'clean-targets': self.clean_targets,
|
||||
'log-path': self.log_path,
|
||||
'quoting': self.quoting,
|
||||
'models': self.models,
|
||||
'on-run-start': self.on_run_start,
|
||||
'on-run-end': self.on_run_end,
|
||||
'seeds': self.seeds,
|
||||
'snapshots': self.snapshots,
|
||||
'sources': self.sources,
|
||||
'vars': self.vars.to_dict(),
|
||||
'require-dbt-version': [
|
||||
v.to_version_string() for v in self.dbt_version
|
||||
],
|
||||
'config-version': self.config_version,
|
||||
})
|
||||
if self.query_comment:
|
||||
result['query-comment'] = self.query_comment.to_dict()
|
||||
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict())
|
||||
|
||||
return result
|
||||
|
||||
def validate(self):
|
||||
try:
|
||||
ProjectContract.from_dict(self.to_project_config())
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@classmethod
|
||||
def partial_load(
|
||||
cls, project_root: str, *, verify_version: bool = False
|
||||
) -> PartialProject:
|
||||
return PartialProject.from_project_root(
|
||||
project_root,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def render_from_dict(
|
||||
cls,
|
||||
project_root: str,
|
||||
project_dict: Dict[str, Any],
|
||||
packages_dict: Dict[str, Any],
|
||||
selectors_dict: Dict[str, Any],
|
||||
renderer: DbtProjectYamlRenderer,
|
||||
*,
|
||||
verify_version: bool = False
|
||||
) -> 'Project':
|
||||
partial = PartialProject.from_dicts(
|
||||
project_root=project_root,
|
||||
project_dict=project_dict,
|
||||
packages_dict=packages_dict,
|
||||
selectors_dict=selectors_dict,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
return partial.render(renderer)
|
||||
|
||||
@classmethod
|
||||
def from_project_root(
|
||||
cls,
|
||||
project_root: str,
|
||||
renderer: DbtProjectYamlRenderer,
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
) -> 'Project':
|
||||
partial = cls.partial_load(project_root, verify_version=verify_version)
|
||||
return partial.render(renderer)
|
||||
|
||||
def hashed_name(self):
|
||||
return hashlib.md5(self.project_name.encode('utf-8')).hexdigest()
|
||||
|
||||
def get_selector(self, name: str) -> SelectionSpec:
|
||||
if name not in self.selectors:
|
||||
raise RuntimeException(
|
||||
f'Could not find selector named {name}, expected one of '
|
||||
f'{list(self.selectors)}'
|
||||
)
|
||||
return self.selectors[name]
|
||||
238
core/dbt/config/renderer.py
Normal file
238
core/dbt/config/renderer.py
Normal file
@@ -0,0 +1,238 @@
|
||||
from typing import Dict, Any, Tuple, Optional, Union, Callable
|
||||
|
||||
from dbt.clients.jinja import get_rendered, catch_jinja
|
||||
|
||||
from dbt.exceptions import (
|
||||
DbtProjectError, CompilationException, RecursionException
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import deep_map
|
||||
|
||||
|
||||
Keypath = Tuple[Union[str, int], ...]
|
||||
|
||||
|
||||
class BaseRenderer:
|
||||
def __init__(self, context: Dict[str, Any]) -> None:
|
||||
self.context = context
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return 'Rendering'
|
||||
|
||||
def should_render_keypath(self, keypath: Keypath) -> bool:
|
||||
return True
|
||||
|
||||
def render_entry(self, value: Any, keypath: Keypath) -> Any:
|
||||
if not self.should_render_keypath(keypath):
|
||||
return value
|
||||
|
||||
return self.render_value(value, keypath)
|
||||
|
||||
def render_value(
|
||||
self, value: Any, keypath: Optional[Keypath] = None
|
||||
) -> Any:
|
||||
# keypath is ignored.
|
||||
# if it wasn't read as a string, ignore it
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
try:
|
||||
with catch_jinja():
|
||||
return get_rendered(value, self.context, native=True)
|
||||
except CompilationException as exc:
|
||||
msg = f'Could not render {value}: {exc.msg}'
|
||||
raise CompilationException(msg) from exc
|
||||
|
||||
def render_data(
|
||||
self, data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
try:
|
||||
return deep_map(self.render_entry, data)
|
||||
except RecursionException:
|
||||
raise DbtProjectError(
|
||||
f'Cycle detected: {self.name} input has a reference to itself',
|
||||
project=data
|
||||
)
|
||||
|
||||
|
||||
def _list_if_none(value):
|
||||
if value is None:
|
||||
value = []
|
||||
return value
|
||||
|
||||
|
||||
def _dict_if_none(value):
|
||||
if value is None:
|
||||
value = {}
|
||||
return value
|
||||
|
||||
|
||||
def _list_if_none_or_string(value):
|
||||
value = _list_if_none(value)
|
||||
if isinstance(value, str):
|
||||
return [value]
|
||||
return value
|
||||
|
||||
|
||||
class ProjectPostprocessor(Dict[Keypath, Callable[[Any], Any]]):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self[('on-run-start',)] = _list_if_none_or_string
|
||||
self[('on-run-end',)] = _list_if_none_or_string
|
||||
|
||||
for k in ('models', 'seeds', 'snapshots'):
|
||||
self[(k,)] = _dict_if_none
|
||||
self[(k, 'vars')] = _dict_if_none
|
||||
self[(k, 'pre-hook')] = _list_if_none_or_string
|
||||
self[(k, 'post-hook')] = _list_if_none_or_string
|
||||
self[('seeds', 'column_types')] = _dict_if_none
|
||||
|
||||
def postprocess(self, value: Any, key: Keypath) -> Any:
|
||||
if key in self:
|
||||
handler = self[key]
|
||||
return handler(value)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class DbtProjectYamlRenderer(BaseRenderer):
|
||||
_KEYPATH_HANDLERS = ProjectPostprocessor()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
'Project config'
|
||||
|
||||
def get_package_renderer(self) -> BaseRenderer:
|
||||
return PackageRenderer(self.context)
|
||||
|
||||
def get_selector_renderer(self) -> BaseRenderer:
|
||||
return SelectorRenderer(self.context)
|
||||
|
||||
def render_project(
|
||||
self,
|
||||
project: Dict[str, Any],
|
||||
project_root: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""Render the project and insert the project root after rendering."""
|
||||
rendered_project = self.render_data(project)
|
||||
rendered_project['project-root'] = project_root
|
||||
return rendered_project
|
||||
|
||||
def render_packages(self, packages: Dict[str, Any]):
|
||||
"""Render the given packages dict"""
|
||||
package_renderer = self.get_package_renderer()
|
||||
return package_renderer.render_data(packages)
|
||||
|
||||
def render_selectors(self, selectors: Dict[str, Any]):
|
||||
selector_renderer = self.get_selector_renderer()
|
||||
return selector_renderer.render_data(selectors)
|
||||
|
||||
def render_entry(self, value: Any, keypath: Keypath) -> Any:
|
||||
result = super().render_entry(value, keypath)
|
||||
return self._KEYPATH_HANDLERS.postprocess(result, keypath)
|
||||
|
||||
def should_render_keypath(self, keypath: Keypath) -> bool:
|
||||
if not keypath:
|
||||
return True
|
||||
|
||||
first = keypath[0]
|
||||
# run hooks are not rendered
|
||||
if first in {'on-run-start', 'on-run-end', 'query-comment'}:
|
||||
return False
|
||||
|
||||
# don't render vars blocks until runtime
|
||||
if first == 'vars':
|
||||
return False
|
||||
|
||||
if first in {'seeds', 'models', 'snapshots', 'seeds'}:
|
||||
keypath_parts = {
|
||||
(k.lstrip('+') if isinstance(k, str) else k)
|
||||
for k in keypath
|
||||
}
|
||||
# model-level hooks
|
||||
if 'pre-hook' in keypath_parts or 'post-hook' in keypath_parts:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ProfileRenderer(BaseRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
'Profile'
|
||||
|
||||
|
||||
class SchemaYamlRenderer(BaseRenderer):
|
||||
DOCUMENTABLE_NODES = frozenset(
|
||||
n.pluralize() for n in NodeType.documentable()
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return 'Rendering yaml'
|
||||
|
||||
def _is_norender_key(self, keypath: Keypath) -> bool:
|
||||
"""
|
||||
models:
|
||||
- name: blah
|
||||
- description: blah
|
||||
tests: ...
|
||||
- columns:
|
||||
- name:
|
||||
- description: blah
|
||||
tests: ...
|
||||
|
||||
Return True if it's tests or description - those aren't rendered
|
||||
"""
|
||||
if len(keypath) >= 2 and keypath[1] in ('tests', 'description'):
|
||||
return True
|
||||
|
||||
if (
|
||||
len(keypath) >= 4 and
|
||||
keypath[1] == 'columns' and
|
||||
keypath[3] in ('tests', 'description')
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# don't render descriptions or test keyword arguments
|
||||
def should_render_keypath(self, keypath: Keypath) -> bool:
|
||||
if len(keypath) < 2:
|
||||
return True
|
||||
|
||||
if keypath[0] not in self.DOCUMENTABLE_NODES:
|
||||
return True
|
||||
|
||||
if len(keypath) < 3:
|
||||
return True
|
||||
|
||||
if keypath[0] == NodeType.Source.pluralize():
|
||||
if keypath[2] == 'description':
|
||||
return False
|
||||
if keypath[2] == 'tables':
|
||||
if self._is_norender_key(keypath[3:]):
|
||||
return False
|
||||
elif keypath[0] == NodeType.Macro.pluralize():
|
||||
if keypath[2] == 'arguments':
|
||||
if self._is_norender_key(keypath[3:]):
|
||||
return False
|
||||
elif self._is_norender_key(keypath[1:]):
|
||||
return False
|
||||
else: # keypath[0] in self.DOCUMENTABLE_NODES:
|
||||
if self._is_norender_key(keypath[1:]):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class PackageRenderer(BaseRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
return 'Packages config'
|
||||
|
||||
|
||||
class SelectorRenderer(BaseRenderer):
|
||||
@property
|
||||
def name(self):
|
||||
return 'Selector config'
|
||||
565
core/dbt/config/runtime.py
Normal file
565
core/dbt/config/runtime.py
Normal file
@@ -0,0 +1,565 @@
|
||||
import itertools
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, fields
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Dict, Any, Optional, Mapping, Iterator, Iterable, Tuple, List, MutableSet,
|
||||
Type
|
||||
)
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
from dbt import tracking
|
||||
from dbt.adapters.factory import get_relation_class_by_name, get_include_paths
|
||||
from dbt.helper_types import FQNPath, PathSet
|
||||
from dbt.context.base import generate_base_context
|
||||
from dbt.context.target import generate_target_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.exceptions import (
|
||||
RuntimeException,
|
||||
DbtProfileError,
|
||||
DbtProjectError,
|
||||
validator_error_message,
|
||||
warn_or_error,
|
||||
raise_compiler_error
|
||||
)
|
||||
|
||||
from hologram import ValidationError
|
||||
|
||||
|
||||
def _project_quoting_dict(
|
||||
proj: Project, profile: Profile
|
||||
) -> Dict[ComponentName, bool]:
|
||||
src: Dict[str, Any] = profile.credentials.translate_aliases(proj.quoting)
|
||||
result: Dict[ComponentName, bool] = {}
|
||||
for key in ComponentName:
|
||||
if key in src:
|
||||
value = src[key]
|
||||
if isinstance(value, bool):
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
args: Any
|
||||
profile_name: str
|
||||
cli_vars: Dict[str, Any]
|
||||
dependencies: Optional[Mapping[str, 'RuntimeConfig']] = None
|
||||
|
||||
def __post_init__(self):
|
||||
self.validate()
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
project: Project,
|
||||
profile: Profile,
|
||||
args: Any,
|
||||
dependencies: Optional[Mapping[str, 'RuntimeConfig']] = None,
|
||||
) -> 'RuntimeConfig':
|
||||
"""Instantiate a RuntimeConfig from its components.
|
||||
|
||||
:param profile: A parsed dbt Profile.
|
||||
:param project: A parsed dbt Project.
|
||||
:param args: The parsed command-line arguments.
|
||||
:returns RuntimeConfig: The new configuration.
|
||||
"""
|
||||
quoting: Dict[str, Any] = (
|
||||
get_relation_class_by_name(profile.credentials.type)
|
||||
.get_default_quote_policy()
|
||||
.replace_dict(_project_quoting_dict(project, profile))
|
||||
).to_dict()
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}'))
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
source_paths=project.source_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
data_paths=project.data_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
asset_paths=project.asset_paths,
|
||||
target_path=project.target_path,
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
modules_path=project.modules_path,
|
||||
quoting=quoting,
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
profile_name=profile.profile_name,
|
||||
target_name=profile.target_name,
|
||||
config=profile.config,
|
||||
threads=profile.threads,
|
||||
credentials=profile.credentials,
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
def new_project(self, project_root: str) -> 'RuntimeConfig':
|
||||
"""Given a new project root, read in its project dictionary, supply the
|
||||
existing project's profile info, and create a new project file.
|
||||
|
||||
:param project_root: A filepath to a dbt project.
|
||||
:raises DbtProfileError: If the profile is invalid.
|
||||
:raises DbtProjectError: If project is missing or invalid.
|
||||
:returns: The new configuration.
|
||||
"""
|
||||
# copy profile
|
||||
profile = Profile(**self.to_profile_info())
|
||||
profile.validate()
|
||||
|
||||
# load the new project and its packages. Don't pass cli variables.
|
||||
renderer = DbtProjectYamlRenderer(generate_target_context(profile, {}))
|
||||
|
||||
project = Project.from_project_root(
|
||||
project_root,
|
||||
renderer,
|
||||
verify_version=getattr(self.args, 'version_check', False),
|
||||
)
|
||||
|
||||
cfg = self.from_parts(
|
||||
project=project,
|
||||
profile=profile,
|
||||
args=deepcopy(self.args),
|
||||
)
|
||||
# force our quoting back onto the new project.
|
||||
cfg.quoting = deepcopy(self.quoting)
|
||||
return cfg
|
||||
|
||||
def serialize(self) -> Dict[str, Any]:
|
||||
"""Serialize the full configuration to a single dictionary. For any
|
||||
instance that has passed validate() (which happens in __init__), it
|
||||
matches the Configuration contract.
|
||||
|
||||
Note that args are not serialized.
|
||||
|
||||
:returns dict: The serialized configuration.
|
||||
"""
|
||||
result = self.to_project_config(with_packages=True)
|
||||
result.update(self.to_profile_info(serialize_credentials=True))
|
||||
result['cli_vars'] = deepcopy(self.cli_vars)
|
||||
return result
|
||||
|
||||
def validate(self):
|
||||
"""Validate the configuration against its contract.
|
||||
|
||||
:raises DbtProjectError: If the configuration fails validation.
|
||||
"""
|
||||
try:
|
||||
Configuration.from_dict(self.serialize())
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
return Profile.render_from_args(
|
||||
args, profile_renderer, profile_name
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def collect_parts(
|
||||
cls: Type['RuntimeConfig'], args: Any
|
||||
) -> Tuple[Project, Profile]:
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = getattr(args, 'version_check', False)
|
||||
partial = Project.partial_load(
|
||||
project_root,
|
||||
verify_version=version_check
|
||||
)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}'))
|
||||
profile_renderer = ProfileRenderer(generate_base_context(cli_vars))
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
profile = cls._get_rendered_profile(
|
||||
args, profile_renderer, profile_name
|
||||
)
|
||||
|
||||
# get a new renderer using our target information and render the
|
||||
# project
|
||||
ctx = generate_target_context(profile, cli_vars)
|
||||
project_renderer = DbtProjectYamlRenderer(ctx)
|
||||
project = partial.render(project_renderer)
|
||||
return (project, profile)
|
||||
|
||||
@classmethod
|
||||
def from_args(cls, args: Any) -> 'RuntimeConfig':
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
read in packages.yml if it exists, and use them to find the profile to
|
||||
load.
|
||||
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
return cls.from_parts(
|
||||
project=project,
|
||||
profile=profile,
|
||||
args=args,
|
||||
)
|
||||
|
||||
def get_metadata(self) -> ManifestMetadata:
|
||||
return ManifestMetadata(
|
||||
project_id=self.hashed_name(),
|
||||
adapter_type=self.credentials.type
|
||||
)
|
||||
|
||||
def _get_v2_config_paths(
|
||||
self,
|
||||
config,
|
||||
path: FQNPath,
|
||||
paths: MutableSet[FQNPath],
|
||||
) -> PathSet:
|
||||
for key, value in config.items():
|
||||
if isinstance(value, dict) and not key.startswith('+'):
|
||||
self._get_v2_config_paths(value, path + (key,), paths)
|
||||
else:
|
||||
paths.add(path)
|
||||
return frozenset(paths)
|
||||
|
||||
def _get_config_paths(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
path: FQNPath = (),
|
||||
paths: Optional[MutableSet[FQNPath]] = None,
|
||||
) -> PathSet:
|
||||
if paths is None:
|
||||
paths = set()
|
||||
|
||||
for key, value in config.items():
|
||||
if isinstance(value, dict) and not key.startswith('+'):
|
||||
self._get_v2_config_paths(value, path + (key,), paths)
|
||||
else:
|
||||
paths.add(path)
|
||||
return frozenset(paths)
|
||||
|
||||
def get_resource_config_paths(self) -> Dict[str, PathSet]:
|
||||
"""Return a dictionary with 'seeds' and 'models' keys whose values are
|
||||
lists of lists of strings, where each inner list of strings represents
|
||||
a configured path in the resource.
|
||||
"""
|
||||
return {
|
||||
'models': self._get_config_paths(self.models),
|
||||
'seeds': self._get_config_paths(self.seeds),
|
||||
'snapshots': self._get_config_paths(self.snapshots),
|
||||
'sources': self._get_config_paths(self.sources),
|
||||
}
|
||||
|
||||
def get_unused_resource_config_paths(
|
||||
self,
|
||||
resource_fqns: Mapping[str, PathSet],
|
||||
disabled: PathSet,
|
||||
) -> List[FQNPath]:
|
||||
"""Return a list of lists of strings, where each inner list of strings
|
||||
represents a type + FQN path of a resource configuration that is not
|
||||
used.
|
||||
"""
|
||||
disabled_fqns = frozenset(tuple(fqn) for fqn in disabled)
|
||||
resource_config_paths = self.get_resource_config_paths()
|
||||
unused_resource_config_paths = []
|
||||
for resource_type, config_paths in resource_config_paths.items():
|
||||
used_fqns = resource_fqns.get(resource_type, frozenset())
|
||||
fqns = used_fqns | disabled_fqns
|
||||
|
||||
for config_path in config_paths:
|
||||
if not _is_config_used(config_path, fqns):
|
||||
unused_resource_config_paths.append(
|
||||
(resource_type,) + config_path
|
||||
)
|
||||
return unused_resource_config_paths
|
||||
|
||||
def warn_for_unused_resource_config_paths(
|
||||
self,
|
||||
resource_fqns: Mapping[str, PathSet],
|
||||
disabled: PathSet,
|
||||
) -> None:
|
||||
unused = self.get_unused_resource_config_paths(resource_fqns, disabled)
|
||||
if len(unused) == 0:
|
||||
return
|
||||
|
||||
msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format(
|
||||
len(unused),
|
||||
'\n'.join('- {}'.format('.'.join(u)) for u in unused)
|
||||
)
|
||||
|
||||
warn_or_error(msg, log_fmt=warning_tag('{}'))
|
||||
|
||||
def load_dependencies(self) -> Mapping[str, 'RuntimeConfig']:
|
||||
if self.dependencies is None:
|
||||
all_projects = {self.project_name: self}
|
||||
internal_packages = get_include_paths(self.credentials.type)
|
||||
project_paths = itertools.chain(
|
||||
internal_packages,
|
||||
self._get_project_directories()
|
||||
)
|
||||
for project_name, project in self.load_projects(project_paths):
|
||||
if project_name in all_projects:
|
||||
raise_compiler_error(
|
||||
f'dbt found more than one package with the name '
|
||||
f'"{project_name}" included in this project. Package '
|
||||
f'names must be unique in a project. Please rename '
|
||||
f'one of these packages.'
|
||||
)
|
||||
all_projects[project_name] = project
|
||||
self.dependencies = all_projects
|
||||
return self.dependencies
|
||||
|
||||
def clear_dependencies(self):
|
||||
self.dependencies = None
|
||||
|
||||
def load_projects(
|
||||
self, paths: Iterable[Path]
|
||||
) -> Iterator[Tuple[str, 'RuntimeConfig']]:
|
||||
for path in paths:
|
||||
try:
|
||||
project = self.new_project(str(path))
|
||||
except DbtProjectError as e:
|
||||
raise DbtProjectError(
|
||||
f'Failed to read package: {e}',
|
||||
result_type='invalid_project',
|
||||
path=path,
|
||||
) from e
|
||||
else:
|
||||
yield project.project_name, project
|
||||
|
||||
def _get_project_directories(self) -> Iterator[Path]:
|
||||
root = Path(self.project_root) / self.modules_path
|
||||
|
||||
if root.exists():
|
||||
for path in root.iterdir():
|
||||
if path.is_dir() and not path.name.startswith('__'):
|
||||
yield path
|
||||
|
||||
|
||||
class UnsetCredentials(Credentials):
|
||||
def __init__(self):
|
||||
super().__init__('', '')
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return None
|
||||
|
||||
def connection_info(self, *args, **kwargs):
|
||||
return {}
|
||||
|
||||
def _connection_keys(self):
|
||||
return ()
|
||||
|
||||
|
||||
class UnsetConfig(UserConfig):
|
||||
def __getattribute__(self, name):
|
||||
if name in {f.name for f in fields(UserConfig)}:
|
||||
raise AttributeError(
|
||||
f"'UnsetConfig' object has no attribute {name}"
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {}
|
||||
|
||||
|
||||
class UnsetProfile(Profile):
|
||||
def __init__(self):
|
||||
self.credentials = UnsetCredentials()
|
||||
self.config = UnsetConfig()
|
||||
self.profile_name = ''
|
||||
self.target_name = ''
|
||||
self.threads = -1
|
||||
|
||||
def to_target_dict(self):
|
||||
return {}
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name in {'profile_name', 'target_name', 'threads'}:
|
||||
raise RuntimeException(
|
||||
f'Error: disallowed attribute "{name}" - no profile!'
|
||||
)
|
||||
|
||||
return Profile.__getattribute__(self, name)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnsetProfileConfig(RuntimeConfig):
|
||||
"""This class acts a lot _like_ a RuntimeConfig, except if your profile is
|
||||
missing, any access to profile members results in an exception.
|
||||
"""
|
||||
|
||||
def __post_init__(self):
|
||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
||||
# `del` the attrs we don't want users touching.
|
||||
del self.profile_name
|
||||
del self.target_name
|
||||
# don't call super().__post_init__(), as that calls validate(), and
|
||||
# this object isn't very valid
|
||||
|
||||
def __getattribute__(self, name):
|
||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
||||
if name in {'profile_name', 'target_name'}:
|
||||
raise RuntimeException(
|
||||
f'Error: disallowed attribute "{name}" - no profile!'
|
||||
)
|
||||
|
||||
# avoid every attribute access triggering infinite recursion
|
||||
return RuntimeConfig.__getattribute__(self, name)
|
||||
|
||||
def to_target_dict(self):
|
||||
# re-override the poisoned profile behavior
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
project: Project,
|
||||
profile: Profile,
|
||||
args: Any,
|
||||
dependencies: Optional[Mapping[str, 'RuntimeConfig']] = None,
|
||||
) -> 'RuntimeConfig':
|
||||
"""Instantiate a RuntimeConfig from its components.
|
||||
|
||||
:param profile: Ignored.
|
||||
:param project: A parsed dbt Project.
|
||||
:param args: The parsed command-line arguments.
|
||||
:returns RuntimeConfig: The new configuration.
|
||||
"""
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}'))
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
source_paths=project.source_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
data_paths=project.data_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
asset_paths=project.asset_paths,
|
||||
target_path=project.target_path,
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
modules_path=project.modules_path,
|
||||
quoting=project.quoting, # we never use this anyway.
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
profile_name='',
|
||||
target_name='',
|
||||
config=UnsetConfig(),
|
||||
threads=getattr(args, 'threads', 1),
|
||||
credentials=UnsetCredentials(),
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
try:
|
||||
profile = Profile.render_from_args(
|
||||
args, profile_renderer, profile_name
|
||||
)
|
||||
except (DbtProjectError, DbtProfileError) as exc:
|
||||
logger.debug(
|
||||
'Profile not loaded due to error: {}', exc, exc_info=True
|
||||
)
|
||||
logger.info(
|
||||
'No profile "{}" found, continuing with no target',
|
||||
profile_name
|
||||
)
|
||||
# return the poisoned form
|
||||
profile = UnsetProfile()
|
||||
# disable anonymous usage statistics
|
||||
tracking.disable_tracking()
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def from_args(cls: Type[RuntimeConfig], args: Any) -> 'RuntimeConfig':
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
read in packages.yml if it exists, and use them to find the profile to
|
||||
load.
|
||||
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
if not isinstance(profile, UnsetProfile):
|
||||
# if it's a real profile, return a real config
|
||||
cls = RuntimeConfig
|
||||
|
||||
return cls.from_parts(
|
||||
project=project,
|
||||
profile=profile,
|
||||
args=args
|
||||
)
|
||||
|
||||
|
||||
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
||||
Configuration paths exist in your dbt_project.yml file which do not \
|
||||
apply to any resources.
|
||||
There are {} unused configuration paths:
|
||||
{}
|
||||
"""
|
||||
|
||||
|
||||
def _is_config_used(path, fqns):
|
||||
if fqns:
|
||||
for fqn in fqns:
|
||||
if len(path) <= len(fqn) and fqn[:len(path)] == path:
|
||||
return True
|
||||
return False
|
||||
180
core/dbt/config/selectors.py
Normal file
180
core/dbt/config/selectors.py
Normal file
@@ -0,0 +1,180 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
import yaml
|
||||
|
||||
from hologram import ValidationError
|
||||
|
||||
from .renderer import SelectorRenderer
|
||||
|
||||
from dbt.clients.system import (
|
||||
load_file_contents,
|
||||
path_exists,
|
||||
resolve_path_from_base,
|
||||
)
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.selection import SelectorFile
|
||||
from dbt.exceptions import DbtSelectorsError, RuntimeException
|
||||
from dbt.graph import parse_from_selectors_definition, SelectionSpec
|
||||
from dbt.graph.selector_spec import SelectionCriteria
|
||||
|
||||
MALFORMED_SELECTOR_ERROR = """\
|
||||
The selectors.yml file in this project is malformed. Please double check
|
||||
the contents of this file and fix any errors before retrying.
|
||||
|
||||
You can find more information on the syntax for this file here:
|
||||
https://docs.getdbt.com/docs/package-management
|
||||
|
||||
Validator Error:
|
||||
{error}
|
||||
"""
|
||||
|
||||
|
||||
class SelectorConfig(Dict[str, SelectionSpec]):
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'SelectorConfig':
|
||||
try:
|
||||
selector_file = SelectorFile.from_dict(data)
|
||||
selectors = parse_from_selectors_definition(selector_file)
|
||||
except ValidationError as exc:
|
||||
yaml_sel_cfg = yaml.dump(exc.instance)
|
||||
raise DbtSelectorsError(
|
||||
f"Could not parse selector file data: \n{yaml_sel_cfg}\n"
|
||||
f"Valid root-level selector definitions: "
|
||||
f"union, intersection, string, dictionary. No lists. "
|
||||
f"\nhttps://docs.getdbt.com/reference/node-selection/"
|
||||
f"yaml-selectors",
|
||||
result_type='invalid_selector'
|
||||
) from exc
|
||||
except RuntimeException as exc:
|
||||
raise DbtSelectorsError(
|
||||
f'Could not read selector file data: {exc}',
|
||||
result_type='invalid_selector',
|
||||
) from exc
|
||||
|
||||
return cls(selectors)
|
||||
|
||||
@classmethod
|
||||
def render_from_dict(
|
||||
cls,
|
||||
data: Dict[str, Any],
|
||||
renderer: SelectorRenderer,
|
||||
) -> 'SelectorConfig':
|
||||
try:
|
||||
rendered = renderer.render_data(data)
|
||||
except (ValidationError, RuntimeException) as exc:
|
||||
raise DbtSelectorsError(
|
||||
f'Could not render selector data: {exc}',
|
||||
result_type='invalid_selector',
|
||||
) from exc
|
||||
return cls.from_dict(rendered)
|
||||
|
||||
@classmethod
|
||||
def from_path(
|
||||
cls, path: Path, renderer: SelectorRenderer,
|
||||
) -> 'SelectorConfig':
|
||||
try:
|
||||
data = load_yaml_text(load_file_contents(str(path)))
|
||||
except (ValidationError, RuntimeException) as exc:
|
||||
raise DbtSelectorsError(
|
||||
f'Could not read selector file: {exc}',
|
||||
result_type='invalid_selector',
|
||||
path=path,
|
||||
) from exc
|
||||
|
||||
try:
|
||||
return cls.render_from_dict(data, renderer)
|
||||
except DbtSelectorsError as exc:
|
||||
exc.path = path
|
||||
raise
|
||||
|
||||
|
||||
def selector_data_from_root(project_root: str) -> Dict[str, Any]:
|
||||
selector_filepath = resolve_path_from_base(
|
||||
'selectors.yml', project_root
|
||||
)
|
||||
|
||||
if path_exists(selector_filepath):
|
||||
selectors_dict = load_yaml_text(load_file_contents(selector_filepath))
|
||||
else:
|
||||
selectors_dict = None
|
||||
return selectors_dict
|
||||
|
||||
|
||||
def selector_config_from_data(
|
||||
selectors_data: Dict[str, Any]
|
||||
) -> SelectorConfig:
|
||||
if not selectors_data:
|
||||
selectors_data = {'selectors': []}
|
||||
|
||||
try:
|
||||
selectors = SelectorConfig.from_dict(selectors_data)
|
||||
except ValidationError as e:
|
||||
raise DbtSelectorsError(
|
||||
MALFORMED_SELECTOR_ERROR.format(error=str(e.message)),
|
||||
result_type='invalid_selector',
|
||||
) from e
|
||||
return selectors
|
||||
|
||||
|
||||
# These are utilities to clean up the dictionary created from
|
||||
# selectors.yml by turning the cli-string format entries into
|
||||
# normalized dictionary entries. It parallels the flow in
|
||||
# dbt/graph/cli.py. If changes are made there, it might
|
||||
# be necessary to make changes here. Ideally it would be
|
||||
# good to combine the two flows into one at some point.
|
||||
class SelectorDict:
|
||||
|
||||
@classmethod
|
||||
def parse_dict_definition(cls, definition):
|
||||
key = list(definition)[0]
|
||||
value = definition[key]
|
||||
if isinstance(value, list):
|
||||
new_values = []
|
||||
for sel_def in value:
|
||||
new_value = cls.parse_from_definition(sel_def)
|
||||
new_values.append(new_value)
|
||||
value = new_values
|
||||
if key == 'exclude':
|
||||
definition = {key: value}
|
||||
elif len(definition) == 1:
|
||||
definition = {'method': key, 'value': value}
|
||||
return definition
|
||||
|
||||
@classmethod
|
||||
def parse_a_definition(cls, def_type, definition):
|
||||
# this definition must be a list
|
||||
new_dict = {def_type: []}
|
||||
for sel_def in definition[def_type]:
|
||||
if isinstance(sel_def, dict):
|
||||
sel_def = cls.parse_from_definition(sel_def)
|
||||
new_dict[def_type].append(sel_def)
|
||||
elif isinstance(sel_def, str):
|
||||
sel_def = SelectionCriteria.dict_from_single_spec(sel_def)
|
||||
new_dict[def_type].append(sel_def)
|
||||
else:
|
||||
new_dict[def_type].append(sel_def)
|
||||
return new_dict
|
||||
|
||||
@classmethod
|
||||
def parse_from_definition(cls, definition):
|
||||
if isinstance(definition, str):
|
||||
definition = SelectionCriteria.dict_from_single_spec(definition)
|
||||
elif 'union' in definition:
|
||||
definition = cls.parse_a_definition('union', definition)
|
||||
elif 'intersection' in definition:
|
||||
definition = cls.parse_a_definition('intersection', definition)
|
||||
elif isinstance(definition, dict):
|
||||
definition = cls.parse_dict_definition(definition)
|
||||
return definition
|
||||
|
||||
# This is the normal entrypoint of this code. Give it the
|
||||
# list of selectors generated from the selectors.yml file.
|
||||
@classmethod
|
||||
def parse_from_selectors_list(cls, selectors):
|
||||
selector_dict = {}
|
||||
for selector in selectors:
|
||||
sel_name = selector['name']
|
||||
selector_dict[sel_name] = selector
|
||||
definition = cls.parse_from_definition(selector['definition'])
|
||||
selector_dict[sel_name]['definition'] = definition
|
||||
return selector_dict
|
||||
23
core/dbt/config/utils.py
Normal file
23
core/dbt/config/utils.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from typing import Dict, Any
|
||||
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.exceptions import raise_compiler_error, ValidationException
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
|
||||
|
||||
def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
try:
|
||||
cli_vars = yaml_helper.load_yaml_text(var_string)
|
||||
var_type = type(cli_vars)
|
||||
if var_type is dict:
|
||||
return cli_vars
|
||||
else:
|
||||
type_name = var_type.__name__
|
||||
raise_compiler_error(
|
||||
"The --vars argument must be a YAML dictionary, but was "
|
||||
"of type '{}'".format(type_name))
|
||||
except ValidationException:
|
||||
logger.error(
|
||||
"The YAML provided in the --vars argument is not valid.\n"
|
||||
)
|
||||
raise
|
||||
539
core/dbt/context/base.py
Normal file
539
core/dbt/context/base.py
Normal file
@@ -0,0 +1,539 @@
|
||||
import json
|
||||
import os
|
||||
from typing import (
|
||||
Any, Dict, NoReturn, Optional, Mapping
|
||||
)
|
||||
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
from dbt.clients.jinja import undefined_error, get_rendered
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.contracts.graph.compiled import CompiledResource
|
||||
from dbt.exceptions import raise_compiler_error, MacroReturn
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.version import __version__ as dbt_version
|
||||
|
||||
import yaml
|
||||
# These modules are added to the context. Consider alternative
|
||||
# approaches which will extend well to potentially many modules
|
||||
import pytz
|
||||
import datetime
|
||||
import re
|
||||
|
||||
|
||||
def get_pytz_module_context() -> Dict[str, Any]:
|
||||
context_exports = pytz.__all__ # type: ignore
|
||||
|
||||
return {
|
||||
name: getattr(pytz, name) for name in context_exports
|
||||
}
|
||||
|
||||
|
||||
def get_datetime_module_context() -> Dict[str, Any]:
|
||||
context_exports = [
|
||||
'date',
|
||||
'datetime',
|
||||
'time',
|
||||
'timedelta',
|
||||
'tzinfo'
|
||||
]
|
||||
|
||||
return {
|
||||
name: getattr(datetime, name) for name in context_exports
|
||||
}
|
||||
|
||||
|
||||
def get_re_module_context() -> Dict[str, Any]:
|
||||
context_exports = re.__all__
|
||||
|
||||
return {
|
||||
name: getattr(re, name) for name in context_exports
|
||||
}
|
||||
|
||||
|
||||
def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
return {
|
||||
'pytz': get_pytz_module_context(),
|
||||
'datetime': get_datetime_module_context(),
|
||||
're': get_re_module_context(),
|
||||
}
|
||||
|
||||
|
||||
class ContextMember:
|
||||
def __init__(self, value, name=None):
|
||||
self.name = name
|
||||
self.inner = value
|
||||
|
||||
def key(self, default):
|
||||
if self.name is None:
|
||||
return default
|
||||
return self.name
|
||||
|
||||
|
||||
def contextmember(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
return ContextMember(value)
|
||||
|
||||
|
||||
def contextproperty(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
return ContextMember(property(value))
|
||||
|
||||
|
||||
class ContextMeta(type):
|
||||
def __new__(mcls, name, bases, dct):
|
||||
context_members = {}
|
||||
context_attrs = {}
|
||||
new_dct = {}
|
||||
|
||||
for base in bases:
|
||||
context_members.update(getattr(base, '_context_members_', {}))
|
||||
context_attrs.update(getattr(base, '_context_attrs_', {}))
|
||||
|
||||
for key, value in dct.items():
|
||||
if isinstance(value, ContextMember):
|
||||
context_key = value.key(key)
|
||||
context_members[context_key] = value.inner
|
||||
context_attrs[context_key] = key
|
||||
value = value.inner
|
||||
new_dct[key] = value
|
||||
new_dct['_context_members_'] = context_members
|
||||
new_dct['_context_attrs_'] = context_attrs
|
||||
return type.__new__(mcls, name, bases, new_dct)
|
||||
|
||||
|
||||
class Var:
|
||||
UndefinedVarError = "Required var '{}' not found in config:\nVars "\
|
||||
"supplied to {} = {}"
|
||||
_VAR_NOTSET = object()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
context: Mapping[str, Any],
|
||||
cli_vars: Mapping[str, Any],
|
||||
node: Optional[CompiledResource] = None
|
||||
) -> None:
|
||||
self._context: Mapping[str, Any] = context
|
||||
self._cli_vars: Mapping[str, Any] = cli_vars
|
||||
self._node: Optional[CompiledResource] = node
|
||||
self._merged: Mapping[str, Any] = self._generate_merged()
|
||||
|
||||
def _generate_merged(self) -> Mapping[str, Any]:
|
||||
return self._cli_vars
|
||||
|
||||
@property
|
||||
def node_name(self):
|
||||
if self._node is not None:
|
||||
return self._node.name
|
||||
else:
|
||||
return '<Configuration>'
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
dct = {k: self._merged[k] for k in self._merged}
|
||||
pretty_vars = json.dumps(dct, sort_keys=True, indent=4)
|
||||
msg = self.UndefinedVarError.format(
|
||||
var_name, self.node_name, pretty_vars
|
||||
)
|
||||
raise_compiler_error(msg, self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
return var_name in self._merged
|
||||
|
||||
def get_rendered_var(self, var_name):
|
||||
raw = self._merged[var_name]
|
||||
# if bool/int/float/etc are passed in, don't compile anything
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
|
||||
return get_rendered(raw, self._context)
|
||||
|
||||
def __call__(self, var_name, default=_VAR_NOTSET):
|
||||
if self.has_var(var_name):
|
||||
return self.get_rendered_var(var_name)
|
||||
elif default is not self._VAR_NOTSET:
|
||||
return default
|
||||
else:
|
||||
return self.get_missing_var(var_name)
|
||||
|
||||
|
||||
class BaseContext(metaclass=ContextMeta):
|
||||
def __init__(self, cli_vars):
|
||||
self._ctx = {}
|
||||
self.cli_vars = cli_vars
|
||||
|
||||
def generate_builtins(self):
|
||||
builtins: Dict[str, Any] = {}
|
||||
for key, value in self._context_members_.items():
|
||||
if hasattr(value, '__get__'):
|
||||
# handle properties, bound methods, etc
|
||||
value = value.__get__(self)
|
||||
builtins[key] = value
|
||||
return builtins
|
||||
|
||||
def to_dict(self):
|
||||
self._ctx['context'] = self._ctx
|
||||
builtins = self.generate_builtins()
|
||||
self._ctx['builtins'] = builtins
|
||||
self._ctx.update(builtins)
|
||||
return self._ctx
|
||||
|
||||
@contextproperty
|
||||
def dbt_version(self) -> str:
|
||||
"""The `dbt_version` variable returns the installed version of dbt that
|
||||
is currently running. It can be used for debugging or auditing
|
||||
purposes.
|
||||
|
||||
> macros/get_version.sql
|
||||
|
||||
{% macro get_version() %}
|
||||
{% set msg = "The installed version of dbt is: " ~ dbt_version %}
|
||||
{% do log(msg, info=true) %}
|
||||
{% endmacro %}
|
||||
|
||||
Example output:
|
||||
|
||||
$ dbt run-operation get_version
|
||||
The installed version of dbt is 0.16.0
|
||||
"""
|
||||
return dbt_version
|
||||
|
||||
@contextproperty
|
||||
def var(self) -> Var:
|
||||
"""Variables can be passed from your `dbt_project.yml` file into models
|
||||
during compilation. These variables are useful for configuring packages
|
||||
for deployment in multiple environments, or defining values that should
|
||||
be used across multiple models within a package.
|
||||
|
||||
To add a variable to a model, use the `var()` function:
|
||||
|
||||
> my_model.sql:
|
||||
|
||||
select * from events where event_type = '{{ var("event_type") }}'
|
||||
|
||||
If you try to run this model without supplying an `event_type`
|
||||
variable, you'll receive a compilation error that looks like this:
|
||||
|
||||
Encountered an error:
|
||||
! Compilation error while compiling model package_name.my_model:
|
||||
! Required var 'event_type' not found in config:
|
||||
Vars supplied to package_name.my_model = {
|
||||
}
|
||||
|
||||
To supply a variable to a given model, add one or more `vars`
|
||||
dictionaries to the `models` config in your `dbt_project.yml` file.
|
||||
These `vars` are in-scope for all models at or below where they are
|
||||
defined, so place them where they make the most sense. Below are three
|
||||
different placements of the `vars` dict, all of which will make the
|
||||
`my_model` model compile.
|
||||
|
||||
> dbt_project.yml:
|
||||
|
||||
# 1) scoped at the model level
|
||||
models:
|
||||
package_name:
|
||||
my_model:
|
||||
materialized: view
|
||||
vars:
|
||||
event_type: activation
|
||||
# 2) scoped at the package level
|
||||
models:
|
||||
package_name:
|
||||
vars:
|
||||
event_type: activation
|
||||
my_model:
|
||||
materialized: view
|
||||
# 3) scoped globally
|
||||
models:
|
||||
vars:
|
||||
event_type: activation
|
||||
package_name:
|
||||
my_model:
|
||||
materialized: view
|
||||
|
||||
## Variable default values
|
||||
|
||||
The `var()` function takes an optional second argument, `default`. If
|
||||
this argument is provided, then it will be the default value for the
|
||||
variable if one is not explicitly defined.
|
||||
|
||||
> my_model.sql:
|
||||
|
||||
-- Use 'activation' as the event_type if the variable is not
|
||||
-- defined.
|
||||
select *
|
||||
from events
|
||||
where event_type = '{{ var("event_type", "activation") }}'
|
||||
"""
|
||||
return Var(self._ctx, self.cli_vars)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def env_var(var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
|
||||
If the default is None, raise an exception for an undefined variable.
|
||||
"""
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
elif default is not None:
|
||||
return default
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
undefined_error(msg)
|
||||
|
||||
if os.environ.get('DBT_MACRO_DEBUGGING'):
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def debug():
|
||||
"""Enter a debugger at this line in the compiled jinja code."""
|
||||
import sys
|
||||
import ipdb # type: ignore
|
||||
frame = sys._getframe(3)
|
||||
ipdb.set_trace(frame)
|
||||
return ''
|
||||
|
||||
@contextmember('return')
|
||||
@staticmethod
|
||||
def _return(data: Any) -> NoReturn:
|
||||
"""The `return` function can be used in macros to return data to the
|
||||
caller. The type of the data (`dict`, `list`, `int`, etc) will be
|
||||
preserved through the return call.
|
||||
|
||||
:param data: The data to return to the caller
|
||||
|
||||
|
||||
> macros/example.sql:
|
||||
|
||||
{% macro get_data() %}
|
||||
{{ return([1,2,3]) }}
|
||||
{% endmacro %}
|
||||
|
||||
> models/my_model.sql:
|
||||
|
||||
select
|
||||
-- getdata() returns a list!
|
||||
{% for i in getdata() %}
|
||||
{{ i }}
|
||||
{% if not loop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
"""
|
||||
raise MacroReturn(data)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def fromjson(string: str, default: Any = None) -> Any:
|
||||
"""The `fromjson` context method can be used to deserialize a json
|
||||
string into a Python object primitive, eg. a `dict` or `list`.
|
||||
|
||||
:param value: The json string to deserialize
|
||||
:param default: A default value to return if the `string` argument
|
||||
cannot be deserialized (optional)
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_json_str = '{"abc": 123}' %}
|
||||
{% set my_dict = fromjson(my_json_str) %}
|
||||
{% do log(my_dict['abc']) %}
|
||||
"""
|
||||
try:
|
||||
return json.loads(string)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def tojson(
|
||||
value: Any, default: Any = None, sort_keys: bool = False
|
||||
) -> Any:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
object primitive, eg. a `dict` or `list` to a json string.
|
||||
|
||||
:param value: The value serialize to json
|
||||
:param default: A default value to return if the `value` argument
|
||||
cannot be serialized
|
||||
:param sort_keys: If True, sort the keys.
|
||||
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_dict = {"abc": 123} %}
|
||||
{% set my_json_string = tojson(my_dict) %}
|
||||
{% do log(my_json_string) %}
|
||||
"""
|
||||
try:
|
||||
return json.dumps(value, sort_keys=sort_keys)
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def fromyaml(value: str, default: Any = None) -> Any:
|
||||
"""The fromyaml context method can be used to deserialize a yaml string
|
||||
into a Python object primitive, eg. a `dict` or `list`.
|
||||
|
||||
:param value: The yaml string to deserialize
|
||||
:param default: A default value to return if the `string` argument
|
||||
cannot be deserialized (optional)
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_yml_str -%}
|
||||
dogs:
|
||||
- good
|
||||
- bad
|
||||
{%- endset %}
|
||||
{% set my_dict = fromyaml(my_yml_str) %}
|
||||
{% do log(my_dict['dogs'], info=true) %}
|
||||
-- ["good", "bad"]
|
||||
{% do my_dict['dogs'].pop() }
|
||||
{% do log(my_dict['dogs'], info=true) %}
|
||||
-- ["good"]
|
||||
"""
|
||||
try:
|
||||
return yaml_helper.safe_load(value)
|
||||
except (AttributeError, ValueError, yaml.YAMLError):
|
||||
return default
|
||||
|
||||
# safe_dump defaults to sort_keys=True, but we act like json.dumps (the
|
||||
# opposite)
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def toyaml(
|
||||
value: Any, default: Optional[str] = None, sort_keys: bool = False
|
||||
) -> Optional[str]:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
object primitive, eg. a `dict` or `list` to a yaml string.
|
||||
|
||||
:param value: The value serialize to yaml
|
||||
:param default: A default value to return if the `value` argument
|
||||
cannot be serialized
|
||||
:param sort_keys: If True, sort the keys.
|
||||
|
||||
|
||||
Usage:
|
||||
|
||||
{% set my_dict = {"abc": 123} %}
|
||||
{% set my_yaml_string = toyaml(my_dict) %}
|
||||
{% do log(my_yaml_string) %}
|
||||
"""
|
||||
try:
|
||||
return yaml.safe_dump(data=value, sort_keys=sort_keys)
|
||||
except (ValueError, yaml.YAMLError):
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def log(msg: str, info: bool = False) -> str:
|
||||
"""Logs a line to either the log file or stdout.
|
||||
|
||||
:param msg: The message to log
|
||||
:param info: If `False`, write to the log file. If `True`, write to
|
||||
both the log file and stdout.
|
||||
|
||||
> macros/my_log_macro.sql
|
||||
|
||||
{% macro some_macro(arg1, arg2) %}
|
||||
{{ log("Running some_macro: " ~ arg1 ~ ", " ~ arg2) }}
|
||||
{% endmacro %}"
|
||||
"""
|
||||
if info:
|
||||
logger.info(msg)
|
||||
else:
|
||||
logger.debug(msg)
|
||||
return ''
|
||||
|
||||
@contextproperty
|
||||
def run_started_at(self) -> Optional[datetime.datetime]:
|
||||
"""`run_started_at` outputs the timestamp that this run started, e.g.
|
||||
`2017-04-21 01:23:45.678`. The `run_started_at` variable is a Python
|
||||
`datetime` object. As of 0.9.1, the timezone of this variable defaults
|
||||
to UTC.
|
||||
|
||||
> run_started_at_example.sql
|
||||
|
||||
select
|
||||
'{{ run_started_at.strftime("%Y-%m-%d") }}' as date_day
|
||||
from ...
|
||||
|
||||
|
||||
To modify the timezone of this variable, use the the `pytz` module:
|
||||
|
||||
> run_started_at_utc.sql
|
||||
|
||||
{% set est = modules.pytz.timezone("America/New_York") %}
|
||||
select
|
||||
'{{ run_started_at.astimezone(est) }}' as run_started_est
|
||||
from ...
|
||||
"""
|
||||
if tracking.active_user is not None:
|
||||
return tracking.active_user.run_started_at
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
def invocation_id(self) -> Optional[str]:
|
||||
"""invocation_id outputs a UUID generated for this dbt run (useful for
|
||||
auditing)
|
||||
"""
|
||||
if tracking.active_user is not None:
|
||||
return tracking.active_user.invocation_id
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
def modules(self) -> Dict[str, Any]:
|
||||
"""The `modules` variable in the Jinja context contains useful Python
|
||||
modules for operating on data.
|
||||
|
||||
# datetime
|
||||
|
||||
This variable is a pointer to the Python datetime module.
|
||||
|
||||
Usage:
|
||||
|
||||
{% set dt = modules.datetime.datetime.now() %}
|
||||
|
||||
# pytz
|
||||
|
||||
This variable is a pointer to the Python pytz module.
|
||||
|
||||
Usage:
|
||||
|
||||
{% set dt = modules.datetime.datetime(2002, 10, 27, 6, 0, 0) %}
|
||||
{% set dt_local = modules.pytz.timezone('US/Eastern').localize(dt) %}
|
||||
{{ dt_local }}
|
||||
""" # noqa
|
||||
return get_context_modules()
|
||||
|
||||
@contextproperty
|
||||
def flags(self) -> Any:
|
||||
"""The `flags` variable contains true/false values for flags provided
|
||||
on the command line.
|
||||
|
||||
> flags.sql:
|
||||
|
||||
{% if flags.FULL_REFRESH %}
|
||||
drop table ...
|
||||
{% else %}
|
||||
-- no-op
|
||||
{% endif %}
|
||||
|
||||
The list of valid flags are:
|
||||
|
||||
- `flags.STRICT_MODE`: True if `--strict` (or `-S`) was provided on the
|
||||
command line
|
||||
- `flags.FULL_REFRESH`: True if `--full-refresh` was provided on the
|
||||
command line
|
||||
- `flags.NON_DESTRUCTIVE`: True if `--non-destructive` was provided on
|
||||
the command line
|
||||
"""
|
||||
return flags
|
||||
|
||||
|
||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
ctx = BaseContext(cli_vars)
|
||||
return ctx.to_dict()
|
||||
82
core/dbt/context/configured.py
Normal file
82
core/dbt/context/configured.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from dbt.contracts.connection import AdapterRequiredConfig
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import MultiDict
|
||||
|
||||
from dbt.context.base import contextproperty, Var
|
||||
from dbt.context.target import TargetContext
|
||||
|
||||
|
||||
class ConfiguredContext(TargetContext):
|
||||
config: AdapterRequiredConfig
|
||||
|
||||
def __init__(
|
||||
self, config: AdapterRequiredConfig
|
||||
) -> None:
|
||||
super().__init__(config, config.cli_vars)
|
||||
|
||||
@contextproperty
|
||||
def project_name(self) -> str:
|
||||
return self.config.project_name
|
||||
|
||||
|
||||
class FQNLookup:
|
||||
def __init__(self, package_name: str):
|
||||
self.package_name = package_name
|
||||
self.fqn = [package_name]
|
||||
self.resource_type = NodeType.Model
|
||||
|
||||
|
||||
class ConfiguredVar(Var):
|
||||
def __init__(
|
||||
self,
|
||||
context: Dict[str, Any],
|
||||
config: AdapterRequiredConfig,
|
||||
project_name: str,
|
||||
):
|
||||
super().__init__(context, config.cli_vars)
|
||||
self._config = config
|
||||
self._project_name = project_name
|
||||
|
||||
def __call__(self, var_name, default=Var._VAR_NOTSET):
|
||||
my_config = self._config.load_dependencies()[self._project_name]
|
||||
|
||||
# cli vars > active project > local project
|
||||
if var_name in self._config.cli_vars:
|
||||
return self._config.cli_vars[var_name]
|
||||
|
||||
adapter_type = self._config.credentials.type
|
||||
lookup = FQNLookup(self._project_name)
|
||||
active_vars = self._config.vars.vars_for(lookup, adapter_type)
|
||||
all_vars = MultiDict([active_vars])
|
||||
|
||||
if self._config.project_name != my_config.project_name:
|
||||
all_vars.add(my_config.vars.vars_for(lookup, adapter_type))
|
||||
|
||||
if var_name in all_vars:
|
||||
return all_vars[var_name]
|
||||
|
||||
if default is not Var._VAR_NOTSET:
|
||||
return default
|
||||
|
||||
return self.get_missing_var(var_name)
|
||||
|
||||
|
||||
class SchemaYamlContext(ConfiguredContext):
|
||||
def __init__(self, config, project_name: str):
|
||||
super().__init__(config)
|
||||
self._project_name = project_name
|
||||
|
||||
@contextproperty
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(
|
||||
self._ctx, self.config, self._project_name
|
||||
)
|
||||
|
||||
|
||||
def generate_schema_yml(
|
||||
config: AdapterRequiredConfig, project_name: str
|
||||
) -> Dict[str, Any]:
|
||||
ctx = SchemaYamlContext(config, project_name)
|
||||
return ctx.to_dict()
|
||||
276
core/dbt/context/context_config.py
Normal file
276
core/dbt/context/context_config.py
Normal file
@@ -0,0 +1,276 @@
|
||||
from abc import abstractmethod
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Iterator, Dict, Any, TypeVar, Generic
|
||||
|
||||
from dbt.config import RuntimeConfig, Project, IsFQNResource
|
||||
from dbt.contracts.graph.model_config import BaseConfig, get_config_for
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import fqn_search
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelParts(IsFQNResource):
|
||||
fqn: List[str]
|
||||
resource_type: NodeType
|
||||
package_name: str
|
||||
|
||||
|
||||
T = TypeVar('T') # any old type
|
||||
C = TypeVar('C', bound=BaseConfig)
|
||||
|
||||
|
||||
class ConfigSource:
|
||||
def __init__(self, project):
|
||||
self.project = project
|
||||
|
||||
def get_config_dict(self, resource_type: NodeType):
|
||||
...
|
||||
|
||||
|
||||
class UnrenderedConfig(ConfigSource):
|
||||
def __init__(self, project: Project):
|
||||
self.project = project
|
||||
|
||||
def get_config_dict(self, resource_type: NodeType) -> Dict[str, Any]:
|
||||
unrendered = self.project.unrendered.project_dict
|
||||
if resource_type == NodeType.Seed:
|
||||
model_configs = unrendered.get('seeds')
|
||||
elif resource_type == NodeType.Snapshot:
|
||||
model_configs = unrendered.get('snapshots')
|
||||
elif resource_type == NodeType.Source:
|
||||
model_configs = unrendered.get('sources')
|
||||
else:
|
||||
model_configs = unrendered.get('models')
|
||||
|
||||
if model_configs is None:
|
||||
return {}
|
||||
else:
|
||||
return model_configs
|
||||
|
||||
|
||||
class RenderedConfig(ConfigSource):
|
||||
def __init__(self, project: Project):
|
||||
self.project = project
|
||||
|
||||
def get_config_dict(self, resource_type: NodeType) -> Dict[str, Any]:
|
||||
if resource_type == NodeType.Seed:
|
||||
model_configs = self.project.seeds
|
||||
elif resource_type == NodeType.Snapshot:
|
||||
model_configs = self.project.snapshots
|
||||
elif resource_type == NodeType.Source:
|
||||
model_configs = self.project.sources
|
||||
else:
|
||||
model_configs = self.project.models
|
||||
return model_configs
|
||||
|
||||
|
||||
class BaseContextConfigGenerator(Generic[T]):
|
||||
def __init__(self, active_project: RuntimeConfig):
|
||||
self._active_project = active_project
|
||||
|
||||
def get_config_source(self, project: Project) -> ConfigSource:
|
||||
return RenderedConfig(project)
|
||||
|
||||
def get_node_project(self, project_name: str):
|
||||
if project_name == self._active_project.project_name:
|
||||
return self._active_project
|
||||
dependencies = self._active_project.load_dependencies()
|
||||
if project_name not in dependencies:
|
||||
raise InternalException(
|
||||
f'Project name {project_name} not found in dependencies '
|
||||
f'(found {list(dependencies)})'
|
||||
)
|
||||
return dependencies[project_name]
|
||||
|
||||
def _project_configs(
|
||||
self, project: Project, fqn: List[str], resource_type: NodeType
|
||||
) -> Iterator[Dict[str, Any]]:
|
||||
src = self.get_config_source(project)
|
||||
model_configs = src.get_config_dict(resource_type)
|
||||
for level_config in fqn_search(model_configs, fqn):
|
||||
result = {}
|
||||
for key, value in level_config.items():
|
||||
if key.startswith('+'):
|
||||
result[key[1:]] = deepcopy(value)
|
||||
elif not isinstance(value, dict):
|
||||
result[key] = deepcopy(value)
|
||||
|
||||
yield result
|
||||
|
||||
def _active_project_configs(
|
||||
self, fqn: List[str], resource_type: NodeType
|
||||
) -> Iterator[Dict[str, Any]]:
|
||||
return self._project_configs(self._active_project, fqn, resource_type)
|
||||
|
||||
@abstractmethod
|
||||
def _update_from_config(
|
||||
self, result: T, partial: Dict[str, Any], validate: bool = False
|
||||
) -> T:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def initial_result(self, resource_type: NodeType, base: bool) -> T:
|
||||
...
|
||||
|
||||
def calculate_node_config(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
) -> BaseConfig:
|
||||
own_config = self.get_node_project(project_name)
|
||||
|
||||
result = self.initial_result(resource_type=resource_type, base=base)
|
||||
|
||||
project_configs = self._project_configs(own_config, fqn, resource_type)
|
||||
for fqn_config in project_configs:
|
||||
result = self._update_from_config(result, fqn_config)
|
||||
|
||||
for config_call in config_calls:
|
||||
result = self._update_from_config(result, config_call)
|
||||
|
||||
if own_config.project_name != self._active_project.project_name:
|
||||
for fqn_config in self._active_project_configs(fqn, resource_type):
|
||||
result = self._update_from_config(result, fqn_config)
|
||||
|
||||
# this is mostly impactful in the snapshot config case
|
||||
return result
|
||||
|
||||
@abstractmethod
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
) -> Dict[str, Any]:
|
||||
...
|
||||
|
||||
|
||||
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
def __init__(self, active_project: RuntimeConfig):
|
||||
self._active_project = active_project
|
||||
|
||||
def get_config_source(self, project: Project) -> ConfigSource:
|
||||
return RenderedConfig(project)
|
||||
|
||||
def initial_result(self, resource_type: NodeType, base: bool) -> C:
|
||||
# defaults, own_config, config calls, active_config (if != own_config)
|
||||
config_cls = get_config_for(resource_type, base=base)
|
||||
# Calculate the defaults. We don't want to validate the defaults,
|
||||
# because it might be invalid in the case of required config members
|
||||
# (such as on snapshots!)
|
||||
result = config_cls.from_dict({}, validate=False)
|
||||
return result
|
||||
|
||||
def _update_from_config(
|
||||
self, result: C, partial: Dict[str, Any], validate: bool = False
|
||||
) -> C:
|
||||
translated = self._active_project.credentials.translate_aliases(
|
||||
partial
|
||||
)
|
||||
return result.update_from(
|
||||
translated,
|
||||
self._active_project.credentials.type,
|
||||
validate=validate
|
||||
)
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
) -> Dict[str, Any]:
|
||||
config = self.calculate_node_config(
|
||||
config_calls=config_calls,
|
||||
fqn=fqn,
|
||||
resource_type=resource_type,
|
||||
project_name=project_name,
|
||||
base=base,
|
||||
)
|
||||
finalized = config.finalize_and_validate()
|
||||
return finalized.to_dict()
|
||||
|
||||
|
||||
class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):
|
||||
def get_config_source(self, project: Project) -> ConfigSource:
|
||||
return UnrenderedConfig(project)
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
) -> Dict[str, Any]:
|
||||
return self.calculate_node_config(
|
||||
config_calls=config_calls,
|
||||
fqn=fqn,
|
||||
resource_type=resource_type,
|
||||
project_name=project_name,
|
||||
base=base,
|
||||
)
|
||||
|
||||
def initial_result(
|
||||
self,
|
||||
resource_type: NodeType,
|
||||
base: bool
|
||||
) -> Dict[str, Any]:
|
||||
return {}
|
||||
|
||||
def _update_from_config(
|
||||
self,
|
||||
result: Dict[str, Any],
|
||||
partial: Dict[str, Any],
|
||||
validate: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
translated = self._active_project.credentials.translate_aliases(
|
||||
partial
|
||||
)
|
||||
result.update(translated)
|
||||
return result
|
||||
|
||||
|
||||
class ContextConfig:
|
||||
def __init__(
|
||||
self,
|
||||
active_project: RuntimeConfig,
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
) -> None:
|
||||
self._config_calls: List[Dict[str, Any]] = []
|
||||
self._active_project = active_project
|
||||
self._fqn = fqn
|
||||
self._resource_type = resource_type
|
||||
self._project_name = project_name
|
||||
|
||||
def update_in_model_config(self, opts: Dict[str, Any]) -> None:
|
||||
self._config_calls.append(opts)
|
||||
|
||||
def build_config_dict(
|
||||
self,
|
||||
base: bool = False,
|
||||
*,
|
||||
rendered: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
if rendered:
|
||||
src = ContextConfigGenerator(self._active_project)
|
||||
else:
|
||||
src = UnrenderedConfigGenerator(self._active_project)
|
||||
|
||||
return src.calculate_node_config_dict(
|
||||
config_calls=self._config_calls,
|
||||
fqn=self._fqn,
|
||||
resource_type=self._resource_type,
|
||||
project_name=self._project_name,
|
||||
base=base,
|
||||
)
|
||||
80
core/dbt/context/docs.py
Normal file
80
core/dbt/context/docs.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from typing import (
|
||||
Any, Dict, Union
|
||||
)
|
||||
|
||||
from dbt.exceptions import (
|
||||
doc_invalid_args,
|
||||
doc_target_not_found,
|
||||
)
|
||||
from dbt.config.runtime import RuntimeConfig
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
|
||||
from dbt.context.base import contextmember
|
||||
from dbt.context.configured import SchemaYamlContext
|
||||
|
||||
|
||||
class DocsRuntimeContext(SchemaYamlContext):
|
||||
def __init__(
|
||||
self,
|
||||
config: RuntimeConfig,
|
||||
node: Union[ParsedMacro, CompileResultNode],
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
) -> None:
|
||||
super().__init__(config, current_project)
|
||||
self.node = node
|
||||
self.manifest = manifest
|
||||
|
||||
@contextmember
|
||||
def doc(self, *args: str) -> str:
|
||||
"""The `doc` function is used to reference docs blocks in schema.yml
|
||||
files. It is analogous to the `ref` function. For more information,
|
||||
consult the Documentation guide.
|
||||
|
||||
> orders.md:
|
||||
|
||||
{% docs orders %}
|
||||
# docs
|
||||
- go
|
||||
- here
|
||||
{% enddocs %}
|
||||
|
||||
> schema.yml
|
||||
|
||||
version: 2
|
||||
models:
|
||||
- name: orders
|
||||
description: "{{ doc('orders') }}"
|
||||
"""
|
||||
# when you call doc(), this is what happens at runtime
|
||||
if len(args) == 1:
|
||||
doc_package_name = None
|
||||
doc_name = args[0]
|
||||
elif len(args) == 2:
|
||||
doc_package_name, doc_name = args
|
||||
else:
|
||||
doc_invalid_args(self.node, args)
|
||||
|
||||
target_doc = self.manifest.resolve_doc(
|
||||
doc_name,
|
||||
doc_package_name,
|
||||
self._project_name,
|
||||
self.node.package_name,
|
||||
)
|
||||
|
||||
if target_doc is None:
|
||||
doc_target_not_found(self.node, doc_name, doc_package_name)
|
||||
|
||||
return target_doc.block_contents
|
||||
|
||||
|
||||
def generate_runtime_docs(
|
||||
config: RuntimeConfig,
|
||||
target: Any,
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
) -> Dict[str, Any]:
|
||||
ctx = DocsRuntimeContext(config, target, manifest, current_project)
|
||||
return ctx.to_dict()
|
||||
153
core/dbt/context/macros.py
Normal file
153
core/dbt/context/macros.py
Normal file
@@ -0,0 +1,153 @@
|
||||
from typing import (
|
||||
Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set
|
||||
)
|
||||
|
||||
from dbt.clients.jinja import MacroGenerator, MacroStack
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.exceptions import (
|
||||
raise_duplicate_macro_name, raise_compiler_error
|
||||
)
|
||||
|
||||
|
||||
FlatNamespace = Dict[str, MacroGenerator]
|
||||
NamespaceMember = Union[FlatNamespace, MacroGenerator]
|
||||
FullNamespace = Dict[str, NamespaceMember]
|
||||
|
||||
|
||||
class MacroNamespace(Mapping):
|
||||
def __init__(
|
||||
self,
|
||||
global_namespace: FlatNamespace,
|
||||
local_namespace: FlatNamespace,
|
||||
global_project_namespace: FlatNamespace,
|
||||
packages: Dict[str, FlatNamespace],
|
||||
):
|
||||
self.global_namespace: FlatNamespace = global_namespace
|
||||
self.local_namespace: FlatNamespace = local_namespace
|
||||
self.packages: Dict[str, FlatNamespace] = packages
|
||||
self.global_project_namespace: FlatNamespace = global_project_namespace
|
||||
|
||||
def _search_order(self) -> Iterable[Union[FullNamespace, FlatNamespace]]:
|
||||
yield self.local_namespace
|
||||
yield self.global_namespace
|
||||
yield self.packages
|
||||
yield {
|
||||
GLOBAL_PROJECT_NAME: self.global_project_namespace,
|
||||
}
|
||||
yield self.global_project_namespace
|
||||
|
||||
def _keys(self) -> Set[str]:
|
||||
keys: Set[str] = set()
|
||||
for search in self._search_order():
|
||||
keys.update(search)
|
||||
return keys
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
for key in self._keys():
|
||||
yield key
|
||||
|
||||
def __len__(self):
|
||||
return len(self._keys())
|
||||
|
||||
def __getitem__(self, key: str) -> NamespaceMember:
|
||||
for dct in self._search_order():
|
||||
if key in dct:
|
||||
return dct[key]
|
||||
raise KeyError(key)
|
||||
|
||||
def get_from_package(
|
||||
self, package_name: Optional[str], name: str
|
||||
) -> Optional[MacroGenerator]:
|
||||
pkg: FlatNamespace
|
||||
if package_name is None:
|
||||
return self.get(name)
|
||||
elif package_name == GLOBAL_PROJECT_NAME:
|
||||
return self.global_project_namespace.get(name)
|
||||
elif package_name in self.packages:
|
||||
return self.packages[package_name].get(name)
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f"Could not find package '{package_name}'"
|
||||
)
|
||||
|
||||
|
||||
class MacroNamespaceBuilder:
|
||||
def __init__(
|
||||
self,
|
||||
root_package: str,
|
||||
search_package: str,
|
||||
thread_ctx: MacroStack,
|
||||
internal_packages: List[str],
|
||||
node: Optional[Any] = None,
|
||||
) -> None:
|
||||
self.root_package = root_package
|
||||
self.search_package = search_package
|
||||
self.internal_package_names = set(internal_packages)
|
||||
self.internal_package_names_order = internal_packages
|
||||
self.globals: FlatNamespace = {}
|
||||
self.locals: FlatNamespace = {}
|
||||
self.internal_packages: Dict[str, FlatNamespace] = {}
|
||||
self.packages: Dict[str, FlatNamespace] = {}
|
||||
self.thread_ctx = thread_ctx
|
||||
self.node = node
|
||||
|
||||
def _add_macro_to(
|
||||
self,
|
||||
heirarchy: Dict[str, FlatNamespace],
|
||||
macro: ParsedMacro,
|
||||
macro_func: MacroGenerator,
|
||||
):
|
||||
if macro.package_name in heirarchy:
|
||||
namespace = heirarchy[macro.package_name]
|
||||
else:
|
||||
namespace = {}
|
||||
heirarchy[macro.package_name] = namespace
|
||||
|
||||
if macro.name in namespace:
|
||||
raise_duplicate_macro_name(
|
||||
macro_func.macro, macro, macro.package_name
|
||||
)
|
||||
heirarchy[macro.package_name][macro.name] = macro_func
|
||||
|
||||
def add_macro(self, macro: ParsedMacro, ctx: Dict[str, Any]):
|
||||
macro_name: str = macro.name
|
||||
|
||||
macro_func: MacroGenerator = MacroGenerator(
|
||||
macro, ctx, self.node, self.thread_ctx
|
||||
)
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
# project macros, so store them in a different place
|
||||
if macro.package_name in self.internal_package_names:
|
||||
self._add_macro_to(self.internal_packages, macro, macro_func)
|
||||
else:
|
||||
self._add_macro_to(self.packages, macro, macro_func)
|
||||
|
||||
if macro.package_name == self.search_package:
|
||||
self.locals[macro_name] = macro_func
|
||||
elif macro.package_name == self.root_package:
|
||||
self.globals[macro_name] = macro_func
|
||||
|
||||
def add_macros(self, macros: Iterable[ParsedMacro], ctx: Dict[str, Any]):
|
||||
for macro in macros:
|
||||
self.add_macro(macro, ctx)
|
||||
|
||||
def build_namespace(
|
||||
self, macros: Iterable[ParsedMacro], ctx: Dict[str, Any]
|
||||
) -> MacroNamespace:
|
||||
self.add_macros(macros, ctx)
|
||||
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
# in the list are the ones we want to "win".
|
||||
global_project_namespace: FlatNamespace = {}
|
||||
for pkg in reversed(self.internal_package_names_order):
|
||||
if pkg in self.internal_packages:
|
||||
global_project_namespace.update(self.internal_packages[pkg])
|
||||
|
||||
return MacroNamespace(
|
||||
global_namespace=self.globals,
|
||||
local_namespace=self.locals,
|
||||
global_project_namespace=global_project_namespace,
|
||||
packages=self.packages,
|
||||
)
|
||||
66
core/dbt/context/manifest.py
Normal file
66
core/dbt/context/manifest.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from typing import List
|
||||
|
||||
from dbt.clients.jinja import MacroStack
|
||||
from dbt.contracts.connection import AdapterRequiredConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
|
||||
|
||||
from .configured import ConfiguredContext
|
||||
from .macros import MacroNamespaceBuilder
|
||||
|
||||
|
||||
class ManifestContext(ConfiguredContext):
|
||||
"""The Macro context has everything in the target context, plus the macros
|
||||
in the manifest.
|
||||
|
||||
The given macros can override any previous context values, which will be
|
||||
available as if they were accessed relative to the package name.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
config: AdapterRequiredConfig,
|
||||
manifest: Manifest,
|
||||
search_package: str,
|
||||
) -> None:
|
||||
super().__init__(config)
|
||||
self.manifest = manifest
|
||||
self.search_package = search_package
|
||||
self.macro_stack = MacroStack()
|
||||
builder = self._get_namespace_builder()
|
||||
self.namespace = builder.build_namespace(
|
||||
self.manifest.macros.values(),
|
||||
self._ctx,
|
||||
)
|
||||
|
||||
def _get_namespace_builder(self) -> MacroNamespaceBuilder:
|
||||
# avoid an import loop
|
||||
from dbt.adapters.factory import get_adapter_package_names
|
||||
internal_packages: List[str] = get_adapter_package_names(
|
||||
self.config.credentials.type
|
||||
)
|
||||
return MacroNamespaceBuilder(
|
||||
self.config.project_name,
|
||||
self.search_package,
|
||||
self.macro_stack,
|
||||
internal_packages,
|
||||
None,
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
dct = super().to_dict()
|
||||
dct.update(self.namespace)
|
||||
return dct
|
||||
|
||||
|
||||
class QueryHeaderContext(ManifestContext):
|
||||
def __init__(
|
||||
self, config: AdapterRequiredConfig, manifest: Manifest
|
||||
) -> None:
|
||||
super().__init__(config, manifest, config.project_name)
|
||||
|
||||
|
||||
def generate_query_header_context(
|
||||
config: AdapterRequiredConfig, manifest: Manifest
|
||||
):
|
||||
ctx = QueryHeaderContext(config, manifest)
|
||||
return ctx.to_dict()
|
||||
1389
core/dbt/context/providers.py
Normal file
1389
core/dbt/context/providers.py
Normal file
File diff suppressed because it is too large
Load Diff
84
core/dbt/context/target.py
Normal file
84
core/dbt/context/target.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from dbt.contracts.connection import HasCredentials
|
||||
|
||||
from dbt.context.base import (
|
||||
BaseContext, contextproperty
|
||||
)
|
||||
|
||||
|
||||
class TargetContext(BaseContext):
|
||||
def __init__(self, config: HasCredentials, cli_vars: Dict[str, Any]):
|
||||
super().__init__(cli_vars=cli_vars)
|
||||
self.config = config
|
||||
|
||||
@contextproperty
|
||||
def target(self) -> Dict[str, Any]:
|
||||
"""`target` contains information about your connection to the warehouse
|
||||
(specified in profiles.yml). Some configs are shared between all
|
||||
adapters, while others are adapter-specific.
|
||||
|
||||
Common:
|
||||
|
||||
|----------|-----------|------------------------------------------|
|
||||
| Variable | Example | Description |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| name | dev | Name of the active target |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| schema | dbt_alice | Name of the dbt schema (or, dataset on |
|
||||
| | | BigQuery) |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| type | postgres | The active adapter being used. |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| threads | 4 | The number of threads in use by dbt |
|
||||
|----------|-----------|------------------------------------------|
|
||||
|
||||
Snowflake:
|
||||
|
||||
|----------|-----------|------------------------------------------|
|
||||
| Variable | Example | Description |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| database | RAW | The active target's database. |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| warehouse| TRANSFORM | The active target's warehouse. |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| user | USERNAME | The active target's user |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| role | ROLENAME | The active target's role |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| account | abc123 | The active target's account |
|
||||
|----------|-----------|------------------------------------------|
|
||||
|
||||
Postgres/Redshift:
|
||||
|
||||
|----------|-------------------|----------------------------------|
|
||||
| Variable | Example | Description |
|
||||
|----------|-------------------|----------------------------------|
|
||||
| dbname | analytics | The active target's database. |
|
||||
|----------|-------------------|----------------------------------|
|
||||
| host | abc123.us-west-2. | The active target's host. |
|
||||
| | redshift.amazonaws| |
|
||||
| | .com | |
|
||||
|----------|-------------------|----------------------------------|
|
||||
| user | dbt_user | The active target's user |
|
||||
|----------|-------------------|----------------------------------|
|
||||
| port | 5439 | The active target's port |
|
||||
|----------|-------------------|----------------------------------|
|
||||
|
||||
BigQuery:
|
||||
|
||||
|----------|-----------|------------------------------------------|
|
||||
| Variable | Example | Description |
|
||||
|----------|-----------|------------------------------------------|
|
||||
| project | abc-123 | The active target's project. |
|
||||
|----------|-----------|------------------------------------------|
|
||||
|
||||
"""
|
||||
return self.config.to_target_dict()
|
||||
|
||||
|
||||
def generate_target_context(
|
||||
config: HasCredentials, cli_vars: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
ctx = TargetContext(config, cli_vars)
|
||||
return ctx.to_dict()
|
||||
228
core/dbt/contracts/connection.py
Normal file
228
core/dbt/contracts/connection.py
Normal file
@@ -0,0 +1,228 @@
|
||||
import abc
|
||||
import itertools
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Any, ClassVar, Dict, Tuple, Iterable, Optional, NewType, List, Callable,
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from hologram.helpers import (
|
||||
StrEnum, register_pattern, ExtensibleJsonSchemaMixin
|
||||
)
|
||||
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.utils import translate_aliases
|
||||
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
|
||||
|
||||
Identifier = NewType('Identifier', str)
|
||||
register_pattern(Identifier, r'^[A-Za-z_][A-Za-z0-9_]+$')
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(JsonSchemaMixin):
|
||||
_message: str
|
||||
code: Optional[str] = None
|
||||
rows_affected: Optional[int] = None
|
||||
|
||||
def __str__(self):
|
||||
return self._message
|
||||
|
||||
|
||||
class ConnectionState(StrEnum):
|
||||
INIT = 'init'
|
||||
OPEN = 'open'
|
||||
CLOSED = 'closed'
|
||||
FAIL = 'fail'
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class Connection(ExtensibleJsonSchemaMixin, Replaceable):
|
||||
type: Identifier
|
||||
name: Optional[str]
|
||||
state: ConnectionState = ConnectionState.INIT
|
||||
transaction_open: bool = False
|
||||
# prevent serialization
|
||||
_handle: Optional[Any] = None
|
||||
_credentials: JsonSchemaMixin = field(init=False)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
type: Identifier,
|
||||
name: Optional[str],
|
||||
credentials: JsonSchemaMixin,
|
||||
state: ConnectionState = ConnectionState.INIT,
|
||||
transaction_open: bool = False,
|
||||
handle: Optional[Any] = None,
|
||||
) -> None:
|
||||
self.type = type
|
||||
self.name = name
|
||||
self.state = state
|
||||
self.credentials = credentials
|
||||
self.transaction_open = transaction_open
|
||||
self.handle = handle
|
||||
|
||||
@property
|
||||
def credentials(self):
|
||||
return self._credentials
|
||||
|
||||
@credentials.setter
|
||||
def credentials(self, value):
|
||||
self._credentials = value
|
||||
|
||||
@property
|
||||
def handle(self):
|
||||
if isinstance(self._handle, LazyHandle):
|
||||
try:
|
||||
# this will actually change 'self._handle'.
|
||||
self._handle.resolve(self)
|
||||
except RecursionError as exc:
|
||||
raise InternalException(
|
||||
"A connection's open() method attempted to read the "
|
||||
"handle value"
|
||||
) from exc
|
||||
return self._handle
|
||||
|
||||
@handle.setter
|
||||
def handle(self, value):
|
||||
self._handle = value
|
||||
|
||||
|
||||
class LazyHandle:
|
||||
"""Opener must be a callable that takes a Connection object and opens the
|
||||
connection, updating the handle on the Connection.
|
||||
"""
|
||||
|
||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
||||
self.opener = opener
|
||||
|
||||
def resolve(self, connection: Connection) -> Connection:
|
||||
logger.debug(
|
||||
'Opening a new connection, currently in state {}'
|
||||
.format(connection.state)
|
||||
)
|
||||
return self.opener(connection)
|
||||
|
||||
|
||||
# see https://github.com/python/mypy/issues/4717#issuecomment-373932080
|
||||
# and https://github.com/python/mypy/issues/5374
|
||||
# for why we have type: ignore. Maybe someday dataclasses + abstract classes
|
||||
# will work.
|
||||
@dataclass # type: ignore
|
||||
class Credentials(
|
||||
ExtensibleJsonSchemaMixin,
|
||||
Replaceable,
|
||||
metaclass=abc.ABCMeta
|
||||
):
|
||||
database: str
|
||||
schema: str
|
||||
_ALIASES: ClassVar[Dict[str, str]] = field(default={}, init=False)
|
||||
|
||||
@abc.abstractproperty
|
||||
def type(self) -> str:
|
||||
raise NotImplementedError(
|
||||
'type not implemented for base credentials class'
|
||||
)
|
||||
|
||||
def connection_info(
|
||||
self, *, with_aliases: bool = False
|
||||
) -> Iterable[Tuple[str, Any]]:
|
||||
"""Return an ordered iterator of key/value pairs for pretty-printing.
|
||||
"""
|
||||
as_dict = self.to_dict(omit_none=False, with_aliases=with_aliases)
|
||||
connection_keys = set(self._connection_keys())
|
||||
aliases: List[str] = []
|
||||
if with_aliases:
|
||||
aliases = [
|
||||
k for k, v in self._ALIASES.items() if v in connection_keys
|
||||
]
|
||||
for key in itertools.chain(self._connection_keys(), aliases):
|
||||
if key in as_dict:
|
||||
yield key, as_dict[key]
|
||||
|
||||
@abc.abstractmethod
|
||||
def _connection_keys(self) -> Tuple[str, ...]:
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
data = cls.translate_aliases(data)
|
||||
return super().from_dict(data)
|
||||
|
||||
@classmethod
|
||||
def translate_aliases(
|
||||
cls, kwargs: Dict[str, Any], recurse: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
return translate_aliases(kwargs, cls._ALIASES, recurse)
|
||||
|
||||
def to_dict(self, omit_none=True, validate=False, *, with_aliases=False):
|
||||
serialized = super().to_dict(omit_none=omit_none, validate=validate)
|
||||
if with_aliases:
|
||||
serialized.update({
|
||||
new_name: serialized[canonical_name]
|
||||
for new_name, canonical_name in self._ALIASES.items()
|
||||
if canonical_name in serialized
|
||||
})
|
||||
return serialized
|
||||
|
||||
|
||||
class UserConfigContract(Protocol):
|
||||
send_anonymous_usage_stats: bool
|
||||
use_colors: Optional[bool]
|
||||
partial_parse: Optional[bool]
|
||||
printer_width: Optional[int]
|
||||
|
||||
def set_values(self, cookie_dir: str) -> None:
|
||||
...
|
||||
|
||||
def to_dict(
|
||||
self, omit_none: bool = True, validate: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
...
|
||||
|
||||
|
||||
class HasCredentials(Protocol):
|
||||
credentials: Credentials
|
||||
profile_name: str
|
||||
config: UserConfigContract
|
||||
target_name: str
|
||||
threads: int
|
||||
|
||||
def to_target_dict(self):
|
||||
raise NotImplementedError('to_target_dict not implemented')
|
||||
|
||||
|
||||
DEFAULT_QUERY_COMMENT = '''
|
||||
{%- set comment_dict = {} -%}
|
||||
{%- do comment_dict.update(
|
||||
app='dbt',
|
||||
dbt_version=dbt_version,
|
||||
profile_name=target.get('profile_name'),
|
||||
target_name=target.get('target_name'),
|
||||
) -%}
|
||||
{%- if node is not none -%}
|
||||
{%- do comment_dict.update(
|
||||
node_id=node.unique_id,
|
||||
) -%}
|
||||
{% else %}
|
||||
{# in the node context, the connection name is the node_id #}
|
||||
{%- do comment_dict.update(connection_name=connection_name) -%}
|
||||
{%- endif -%}
|
||||
{{ return(tojson(comment_dict)) }}
|
||||
'''
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryComment(JsonSchemaMixin):
|
||||
comment: str = DEFAULT_QUERY_COMMENT
|
||||
append: bool = False
|
||||
|
||||
|
||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
project_name: str
|
||||
query_comment: QueryComment
|
||||
cli_vars: Dict[str, Any]
|
||||
target_path: str
|
||||
167
core/dbt/contracts/files.py
Normal file
167
core/dbt/contracts/files.py
Normal file
@@ -0,0 +1,167 @@
|
||||
import hashlib
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
|
||||
from dbt.exceptions import InternalException
|
||||
|
||||
from .util import MacroKey, SourceKey
|
||||
|
||||
|
||||
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
|
||||
MAXIMUM_SEED_SIZE_NAME = '1MB'
|
||||
|
||||
|
||||
@dataclass
|
||||
class FilePath(JsonSchemaMixin):
|
||||
searched_path: str
|
||||
relative_path: str
|
||||
project_root: str
|
||||
|
||||
@property
|
||||
def search_key(self) -> str:
|
||||
# TODO: should this be project name + path relative to project root?
|
||||
return self.absolute_path
|
||||
|
||||
@property
|
||||
def full_path(self) -> str:
|
||||
# useful for symlink preservation
|
||||
return os.path.join(
|
||||
self.project_root, self.searched_path, self.relative_path
|
||||
)
|
||||
|
||||
@property
|
||||
def absolute_path(self) -> str:
|
||||
return os.path.abspath(self.full_path)
|
||||
|
||||
@property
|
||||
def original_file_path(self) -> str:
|
||||
# this is mostly used for reporting errors. It doesn't show the project
|
||||
# name, should it?
|
||||
return os.path.join(
|
||||
self.searched_path, self.relative_path
|
||||
)
|
||||
|
||||
def seed_too_large(self) -> bool:
|
||||
"""Return whether the file this represents is over the seed size limit
|
||||
"""
|
||||
return os.stat(self.full_path).st_size > MAXIMUM_SEED_SIZE
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileHash(JsonSchemaMixin):
|
||||
name: str # the hash type name
|
||||
checksum: str # the hashlib.hash_type().hexdigest() of the file contents
|
||||
|
||||
@classmethod
|
||||
def empty(cls):
|
||||
return FileHash(name='none', checksum='')
|
||||
|
||||
@classmethod
|
||||
def path(cls, path: str):
|
||||
return FileHash(name='path', checksum=path)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, FileHash):
|
||||
return NotImplemented
|
||||
|
||||
if self.name == 'none' or self.name != other.name:
|
||||
return False
|
||||
|
||||
return self.checksum == other.checksum
|
||||
|
||||
def compare(self, contents: str) -> bool:
|
||||
"""Compare the file contents with the given hash"""
|
||||
if self.name == 'none':
|
||||
return False
|
||||
|
||||
return self.from_contents(contents, name=self.name) == self.checksum
|
||||
|
||||
@classmethod
|
||||
def from_contents(cls, contents: str, name='sha256') -> 'FileHash':
|
||||
"""Create a file hash from the given file contents. The hash is always
|
||||
the utf-8 encoding of the contents given, because dbt only reads files
|
||||
as utf-8.
|
||||
"""
|
||||
data = contents.encode('utf-8')
|
||||
checksum = hashlib.new(name, data).hexdigest()
|
||||
return cls(name=name, checksum=checksum)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemoteFile(JsonSchemaMixin):
|
||||
@property
|
||||
def searched_path(self) -> str:
|
||||
return 'from remote system'
|
||||
|
||||
@property
|
||||
def relative_path(self) -> str:
|
||||
return 'from remote system'
|
||||
|
||||
@property
|
||||
def absolute_path(self) -> str:
|
||||
return 'from remote system'
|
||||
|
||||
@property
|
||||
def original_file_path(self):
|
||||
return 'from remote system'
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceFile(JsonSchemaMixin):
|
||||
"""Define a source file in dbt"""
|
||||
path: Union[FilePath, RemoteFile] # the path information
|
||||
checksum: FileHash
|
||||
# we don't want to serialize this
|
||||
_contents: Optional[str] = None
|
||||
# the unique IDs contained in this file
|
||||
nodes: List[str] = field(default_factory=list)
|
||||
docs: List[str] = field(default_factory=list)
|
||||
macros: List[str] = field(default_factory=list)
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
# any node patches in this file. The entries are names, not unique ids!
|
||||
patches: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file. The entries are package, name pairs.
|
||||
macro_patches: List[MacroKey] = field(default_factory=list)
|
||||
# any source patches in this file. The entries are package, name pairs
|
||||
source_patches: List[SourceKey] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def search_key(self) -> Optional[str]:
|
||||
if isinstance(self.path, RemoteFile):
|
||||
return None
|
||||
if self.checksum.name == 'none':
|
||||
return None
|
||||
return self.path.search_key
|
||||
|
||||
@property
|
||||
def contents(self) -> str:
|
||||
if self._contents is None:
|
||||
raise InternalException('SourceFile has no contents!')
|
||||
return self._contents
|
||||
|
||||
@contents.setter
|
||||
def contents(self, value):
|
||||
self._contents = value
|
||||
|
||||
@classmethod
|
||||
def empty(cls, path: FilePath) -> 'SourceFile':
|
||||
self = cls(path=path, checksum=FileHash.empty())
|
||||
self.contents = ''
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def big_seed(cls, path: FilePath) -> 'SourceFile':
|
||||
"""Parse seeds over the size limit with just the path"""
|
||||
self = cls(path=path, checksum=FileHash.path(path.original_file_path))
|
||||
self.contents = ''
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def remote(cls, contents: str) -> 'SourceFile':
|
||||
self = cls(path=RemoteFile(), checksum=FileHash.empty())
|
||||
self.contents = contents
|
||||
return self
|
||||
226
core/dbt/contracts/graph/compiled.py
Normal file
226
core/dbt/contracts/graph/compiled.py
Normal file
@@ -0,0 +1,226 @@
|
||||
from dbt.contracts.graph.parsed import (
|
||||
HasTestMetadata,
|
||||
ParsedNode,
|
||||
ParsedAnalysisNode,
|
||||
ParsedDataTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedExposure,
|
||||
ParsedResource,
|
||||
ParsedRPCNode,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
ParsedSourceDefinition,
|
||||
SeedConfig,
|
||||
TestConfig,
|
||||
same_seeds,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.contracts.util import Replaceable
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Union, Dict, Type
|
||||
|
||||
|
||||
@dataclass
|
||||
class InjectedCTE(JsonSchemaMixin, Replaceable):
|
||||
id: str
|
||||
sql: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledNodeMixin(JsonSchemaMixin):
|
||||
# this is a special mixin class to provide a required argument. If a node
|
||||
# is missing a `compiled` flag entirely, it must not be a CompiledNode.
|
||||
compiled: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledNode(ParsedNode, CompiledNodeMixin):
|
||||
compiled_sql: Optional[str] = None
|
||||
extra_ctes_injected: bool = False
|
||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||
relation_name: Optional[str] = None
|
||||
|
||||
def set_cte(self, cte_id: str, sql: str):
|
||||
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
|
||||
do if extra_ctes were an OrderedDict
|
||||
"""
|
||||
for cte in self.extra_ctes:
|
||||
if cte.id == cte_id:
|
||||
cte.sql = sql
|
||||
break
|
||||
else:
|
||||
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledAnalysisNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Analysis]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledHookNode(CompiledNode):
|
||||
resource_type: NodeType = field(
|
||||
metadata={'restrict': [NodeType.Operation]}
|
||||
)
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledModelNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Model]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledRPCNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.RPCCall]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSeedNode(CompiledNode):
|
||||
# keep this in sync with ParsedSeedNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
|
||||
config: SeedConfig = field(default_factory=SeedConfig)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
""" Seeds are never empty"""
|
||||
return False
|
||||
|
||||
def same_body(self, other) -> bool:
|
||||
return same_seeds(self, other)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSnapshotNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledDataTestNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSchemaTestNode(CompiledNode, HasTestMetadata):
|
||||
# keep this in sync with ParsedSchemaTestNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
|
||||
def same_config(self, other) -> bool:
|
||||
return (
|
||||
self.unrendered_config.get('severity') ==
|
||||
other.unrendered_config.get('severity')
|
||||
)
|
||||
|
||||
def same_column_name(self, other) -> bool:
|
||||
return self.column_name == other.column_name
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
return False
|
||||
|
||||
return (
|
||||
self.same_config(other) and
|
||||
self.same_fqn(other) and
|
||||
True
|
||||
)
|
||||
|
||||
|
||||
CompiledTestNode = Union[CompiledDataTestNode, CompiledSchemaTestNode]
|
||||
|
||||
|
||||
PARSED_TYPES: Dict[Type[CompiledNode], Type[ParsedResource]] = {
|
||||
CompiledAnalysisNode: ParsedAnalysisNode,
|
||||
CompiledModelNode: ParsedModelNode,
|
||||
CompiledHookNode: ParsedHookNode,
|
||||
CompiledRPCNode: ParsedRPCNode,
|
||||
CompiledSeedNode: ParsedSeedNode,
|
||||
CompiledSnapshotNode: ParsedSnapshotNode,
|
||||
CompiledDataTestNode: ParsedDataTestNode,
|
||||
CompiledSchemaTestNode: ParsedSchemaTestNode,
|
||||
}
|
||||
|
||||
|
||||
COMPILED_TYPES: Dict[Type[ParsedResource], Type[CompiledNode]] = {
|
||||
ParsedAnalysisNode: CompiledAnalysisNode,
|
||||
ParsedModelNode: CompiledModelNode,
|
||||
ParsedHookNode: CompiledHookNode,
|
||||
ParsedRPCNode: CompiledRPCNode,
|
||||
ParsedSeedNode: CompiledSeedNode,
|
||||
ParsedSnapshotNode: CompiledSnapshotNode,
|
||||
ParsedDataTestNode: CompiledDataTestNode,
|
||||
ParsedSchemaTestNode: CompiledSchemaTestNode,
|
||||
}
|
||||
|
||||
|
||||
# for some types, the compiled type is the parsed type, so make this easy
|
||||
CompiledType = Union[Type[CompiledNode], Type[ParsedResource]]
|
||||
CompiledResource = Union[ParsedResource, CompiledNode]
|
||||
|
||||
|
||||
def compiled_type_for(parsed: ParsedNode) -> CompiledType:
|
||||
if type(parsed) in COMPILED_TYPES:
|
||||
return COMPILED_TYPES[type(parsed)]
|
||||
else:
|
||||
return type(parsed)
|
||||
|
||||
|
||||
def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
|
||||
cls = PARSED_TYPES.get(type(compiled))
|
||||
if cls is None:
|
||||
# how???
|
||||
raise ValueError('invalid resource_type: {}'
|
||||
.format(compiled.resource_type))
|
||||
|
||||
# validate=False to allow extra keys from compiling
|
||||
return cls.from_dict(compiled.to_dict(), validate=False)
|
||||
|
||||
|
||||
NonSourceCompiledNode = Union[
|
||||
CompiledAnalysisNode,
|
||||
CompiledDataTestNode,
|
||||
CompiledModelNode,
|
||||
CompiledHookNode,
|
||||
CompiledRPCNode,
|
||||
CompiledSchemaTestNode,
|
||||
CompiledSeedNode,
|
||||
CompiledSnapshotNode,
|
||||
]
|
||||
|
||||
NonSourceParsedNode = Union[
|
||||
ParsedAnalysisNode,
|
||||
ParsedDataTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedRPCNode,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
]
|
||||
|
||||
|
||||
# This is anything that can be in manifest.nodes.
|
||||
ManifestNode = Union[
|
||||
NonSourceCompiledNode,
|
||||
NonSourceParsedNode,
|
||||
]
|
||||
|
||||
# We allow either parsed or compiled nodes, or parsed sources, as some
|
||||
# 'compile()' calls in the runner actually just return the original parsed
|
||||
# node they were given.
|
||||
CompileResultNode = Union[
|
||||
ManifestNode,
|
||||
ParsedSourceDefinition,
|
||||
]
|
||||
|
||||
# anything that participates in the graph: sources, exposures, manifest nodes
|
||||
GraphMemberNode = Union[
|
||||
CompileResultNode,
|
||||
ParsedExposure,
|
||||
]
|
||||
991
core/dbt/contracts/graph/manifest.py
Normal file
991
core/dbt/contracts/graph/manifest.py
Normal file
@@ -0,0 +1,991 @@
|
||||
import abc
|
||||
import enum
|
||||
from dataclasses import dataclass, field
|
||||
from itertools import chain, islice
|
||||
from multiprocessing.synchronize import Lock
|
||||
from typing import (
|
||||
Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple,
|
||||
TypeVar, Callable, Iterable, Generic, cast, AbstractSet
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
from uuid import UUID
|
||||
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,
|
||||
ParsedSourceDefinition, ParsedExposure
|
||||
)
|
||||
from dbt.contracts.files import SourceFile
|
||||
from dbt.contracts.util import (
|
||||
BaseArtifactMetadata, MacroKey, SourceKey, ArtifactMixin, schema_version
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
raise_duplicate_resource_name, raise_compiler_error, warn_or_error,
|
||||
raise_invalid_patch,
|
||||
)
|
||||
from dbt.helper_types import PathSet
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.node_types import NodeType
|
||||
from dbt import deprecations
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
import dbt.utils
|
||||
|
||||
NodeEdgeMap = Dict[str, List[str]]
|
||||
PackageName = str
|
||||
DocName = str
|
||||
RefName = str
|
||||
UniqueID = str
|
||||
|
||||
|
||||
K_T = TypeVar('K_T')
|
||||
V_T = TypeVar('V_T')
|
||||
|
||||
|
||||
class PackageAwareCache(Generic[K_T, V_T]):
|
||||
def __init__(self, manifest: 'Manifest'):
|
||||
self.storage: Dict[K_T, Dict[PackageName, UniqueID]] = {}
|
||||
self._manifest = manifest
|
||||
self.populate()
|
||||
|
||||
@abc.abstractmethod
|
||||
def populate(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def perform_lookup(self, unique_id: UniqueID) -> V_T:
|
||||
pass
|
||||
|
||||
def find_cached_value(
|
||||
self, key: K_T, package: Optional[PackageName]
|
||||
) -> Optional[V_T]:
|
||||
unique_id = self.find_unique_id_for_package(key, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id)
|
||||
return None
|
||||
|
||||
def find_unique_id_for_package(
|
||||
self, key: K_T, package: Optional[PackageName]
|
||||
) -> Optional[UniqueID]:
|
||||
if key not in self.storage:
|
||||
return None
|
||||
|
||||
pkg_dct: Mapping[PackageName, UniqueID] = self.storage[key]
|
||||
|
||||
if package is None:
|
||||
if not pkg_dct:
|
||||
return None
|
||||
else:
|
||||
return next(iter(pkg_dct.values()))
|
||||
elif package in pkg_dct:
|
||||
return pkg_dct[package]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class DocCache(PackageAwareCache[DocName, ParsedDocumentation]):
|
||||
def add_doc(self, doc: ParsedDocumentation):
|
||||
if doc.name not in self.storage:
|
||||
self.storage[doc.name] = {}
|
||||
self.storage[doc.name][doc.package_name] = doc.unique_id
|
||||
|
||||
def populate(self):
|
||||
for doc in self._manifest.docs.values():
|
||||
self.add_doc(doc)
|
||||
|
||||
def perform_lookup(
|
||||
self, unique_id: UniqueID
|
||||
) -> ParsedDocumentation:
|
||||
if unique_id not in self._manifest.docs:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Doc {unique_id} found in cache but not found in manifest'
|
||||
)
|
||||
return self._manifest.docs[unique_id]
|
||||
|
||||
|
||||
class SourceCache(PackageAwareCache[SourceKey, ParsedSourceDefinition]):
|
||||
def add_source(self, source: ParsedSourceDefinition):
|
||||
key = (source.source_name, source.name)
|
||||
if key not in self.storage:
|
||||
self.storage[key] = {}
|
||||
|
||||
self.storage[key][source.package_name] = source.unique_id
|
||||
|
||||
def populate(self):
|
||||
for source in self._manifest.sources.values():
|
||||
self.add_source(source)
|
||||
|
||||
def perform_lookup(
|
||||
self, unique_id: UniqueID
|
||||
) -> ParsedSourceDefinition:
|
||||
if unique_id not in self._manifest.sources:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Source {unique_id} found in cache but not found in manifest'
|
||||
)
|
||||
return self._manifest.sources[unique_id]
|
||||
|
||||
|
||||
class RefableCache(PackageAwareCache[RefName, ManifestNode]):
|
||||
# refables are actually unique, so the Dict[PackageName, UniqueID] will
|
||||
# only ever have exactly one value, but doing 3 dict lookups instead of 1
|
||||
# is not a big deal at all and retains consistency
|
||||
def __init__(self, manifest: 'Manifest'):
|
||||
self._cached_types = set(NodeType.refable())
|
||||
super().__init__(manifest)
|
||||
|
||||
def add_node(self, node: ManifestNode):
|
||||
if node.resource_type in self._cached_types:
|
||||
if node.name not in self.storage:
|
||||
self.storage[node.name] = {}
|
||||
self.storage[node.name][node.package_name] = node.unique_id
|
||||
|
||||
def populate(self):
|
||||
for node in self._manifest.nodes.values():
|
||||
self.add_node(node)
|
||||
|
||||
def perform_lookup(
|
||||
self, unique_id: UniqueID
|
||||
) -> ManifestNode:
|
||||
if unique_id not in self._manifest.nodes:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Node {unique_id} found in cache but not found in manifest'
|
||||
)
|
||||
return self._manifest.nodes[unique_id]
|
||||
|
||||
|
||||
def _search_packages(
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
target_package: Optional[str] = None,
|
||||
) -> List[Optional[str]]:
|
||||
if target_package is not None:
|
||||
return [target_package]
|
||||
elif current_project == node_package:
|
||||
return [current_project, None]
|
||||
else:
|
||||
return [current_project, node_package, None]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManifestMetadata(BaseArtifactMetadata):
|
||||
"""Metadata for the manifest."""
|
||||
dbt_schema_version: str = field(
|
||||
default_factory=lambda: str(WritableManifest.dbt_schema_version)
|
||||
)
|
||||
project_id: Optional[str] = field(
|
||||
default=None,
|
||||
metadata={
|
||||
'description': 'A unique identifier for the project',
|
||||
},
|
||||
)
|
||||
user_id: Optional[UUID] = field(
|
||||
default=None,
|
||||
metadata={
|
||||
'description': 'A unique identifier for the user',
|
||||
},
|
||||
)
|
||||
send_anonymous_usage_stats: Optional[bool] = field(
|
||||
default=None,
|
||||
metadata=dict(description=(
|
||||
'Whether dbt is configured to send anonymous usage statistics'
|
||||
)),
|
||||
)
|
||||
adapter_type: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=dict(description='The type name of the adapter'),
|
||||
)
|
||||
|
||||
def __post_init__(self):
|
||||
if tracking.active_user is None:
|
||||
return
|
||||
|
||||
if self.user_id is None:
|
||||
self.user_id = tracking.active_user.id
|
||||
|
||||
if self.send_anonymous_usage_stats is None:
|
||||
self.send_anonymous_usage_stats = (
|
||||
not tracking.active_user.do_not_track
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
return cls(
|
||||
dbt_schema_version=str(WritableManifest.dbt_schema_version),
|
||||
)
|
||||
|
||||
|
||||
def _sort_values(dct):
|
||||
"""Given a dictionary, sort each value. This makes output deterministic,
|
||||
which helps for tests.
|
||||
"""
|
||||
return {k: sorted(v) for k, v in dct.items()}
|
||||
|
||||
|
||||
def build_edges(nodes: List[ManifestNode]):
|
||||
"""Build the forward and backward edges on the given list of ParsedNodes
|
||||
and return them as two separate dictionaries, each mapping unique IDs to
|
||||
lists of edges.
|
||||
"""
|
||||
backward_edges: Dict[str, List[str]] = {}
|
||||
# pre-populate the forward edge dict for simplicity
|
||||
forward_edges: Dict[str, List[str]] = {n.unique_id: [] for n in nodes}
|
||||
for node in nodes:
|
||||
backward_edges[node.unique_id] = node.depends_on_nodes[:]
|
||||
for unique_id in node.depends_on_nodes:
|
||||
if unique_id in forward_edges.keys():
|
||||
forward_edges[unique_id].append(node.unique_id)
|
||||
return _sort_values(forward_edges), _sort_values(backward_edges)
|
||||
|
||||
|
||||
def _deepcopy(value):
|
||||
return value.from_dict(value.to_dict())
|
||||
|
||||
|
||||
class Locality(enum.IntEnum):
|
||||
Core = 1
|
||||
Imported = 2
|
||||
Root = 3
|
||||
|
||||
|
||||
class Specificity(enum.IntEnum):
|
||||
Default = 1
|
||||
Adapter = 2
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroCandidate:
|
||||
locality: Locality
|
||||
macro: ParsedMacro
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, MacroCandidate):
|
||||
return NotImplemented
|
||||
return self.locality == other.locality
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
if not isinstance(other, MacroCandidate):
|
||||
return NotImplemented
|
||||
if self.locality < other.locality:
|
||||
return True
|
||||
if self.locality > other.locality:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
@dataclass
|
||||
class MaterializationCandidate(MacroCandidate):
|
||||
specificity: Specificity
|
||||
|
||||
@classmethod
|
||||
def from_macro(
|
||||
cls, candidate: MacroCandidate, specificity: Specificity
|
||||
) -> 'MaterializationCandidate':
|
||||
return cls(
|
||||
locality=candidate.locality,
|
||||
macro=candidate.macro,
|
||||
specificity=specificity,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, MaterializationCandidate):
|
||||
return NotImplemented
|
||||
equal = (
|
||||
self.specificity == other.specificity and
|
||||
self.locality == other.locality
|
||||
)
|
||||
if equal:
|
||||
raise_compiler_error(
|
||||
'Found two materializations with the name {} (packages {} and '
|
||||
'{}). dbt cannot resolve this ambiguity'
|
||||
.format(self.macro.name, self.macro.package_name,
|
||||
other.macro.package_name)
|
||||
)
|
||||
|
||||
return equal
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
if not isinstance(other, MaterializationCandidate):
|
||||
return NotImplemented
|
||||
if self.specificity < other.specificity:
|
||||
return True
|
||||
if self.specificity > other.specificity:
|
||||
return False
|
||||
if self.locality < other.locality:
|
||||
return True
|
||||
if self.locality > other.locality:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
M = TypeVar('M', bound=MacroCandidate)
|
||||
|
||||
|
||||
class CandidateList(List[M]):
|
||||
def last(self) -> Optional[ParsedMacro]:
|
||||
if not self:
|
||||
return None
|
||||
self.sort()
|
||||
return self[-1].macro
|
||||
|
||||
|
||||
def _get_locality(
|
||||
macro: ParsedMacro, root_project_name: str, internal_packages: Set[str]
|
||||
) -> Locality:
|
||||
if macro.package_name == root_project_name:
|
||||
return Locality.Root
|
||||
elif macro.package_name in internal_packages:
|
||||
return Locality.Core
|
||||
else:
|
||||
return Locality.Imported
|
||||
|
||||
|
||||
class Searchable(Protocol):
|
||||
resource_type: NodeType
|
||||
package_name: str
|
||||
|
||||
@property
|
||||
def search_name(self) -> str:
|
||||
raise NotImplementedError('search_name not implemented')
|
||||
|
||||
|
||||
N = TypeVar('N', bound=Searchable)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NameSearcher(Generic[N]):
|
||||
name: str
|
||||
package: Optional[str]
|
||||
nodetypes: List[NodeType]
|
||||
|
||||
def _matches(self, model: N) -> bool:
|
||||
"""Return True if the model matches the given name, package, and type.
|
||||
|
||||
If package is None, any package is allowed.
|
||||
nodetypes should be a container of NodeTypes that implements the 'in'
|
||||
operator.
|
||||
"""
|
||||
if model.resource_type not in self.nodetypes:
|
||||
return False
|
||||
|
||||
if self.name != model.search_name:
|
||||
return False
|
||||
|
||||
return self.package is None or self.package == model.package_name
|
||||
|
||||
def search(self, haystack: Iterable[N]) -> Optional[N]:
|
||||
"""Find an entry in the given iterable by name."""
|
||||
for model in haystack:
|
||||
if self._matches(model):
|
||||
return model
|
||||
return None
|
||||
|
||||
|
||||
D = TypeVar('D')
|
||||
|
||||
|
||||
@dataclass
|
||||
class Disabled(Generic[D]):
|
||||
target: D
|
||||
|
||||
|
||||
MaybeDocumentation = Optional[ParsedDocumentation]
|
||||
|
||||
|
||||
MaybeParsedSource = Optional[Union[
|
||||
ParsedSourceDefinition,
|
||||
Disabled[ParsedSourceDefinition],
|
||||
]]
|
||||
|
||||
|
||||
MaybeNonSource = Optional[Union[
|
||||
ManifestNode,
|
||||
Disabled[ManifestNode]
|
||||
]]
|
||||
|
||||
|
||||
T = TypeVar('T', bound=GraphMemberNode)
|
||||
|
||||
|
||||
def _update_into(dest: MutableMapping[str, T], new_item: T):
|
||||
"""Update dest to overwrite whatever is at dest[new_item.unique_id] with
|
||||
new_itme. There must be an existing value to overwrite, and they two nodes
|
||||
must have the same original file path.
|
||||
"""
|
||||
unique_id = new_item.unique_id
|
||||
if unique_id not in dest:
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
f'got an update_{new_item.resource_type} call with an '
|
||||
f'unrecognized {new_item.resource_type}: {new_item.unique_id}'
|
||||
)
|
||||
existing = dest[unique_id]
|
||||
if new_item.original_file_path != existing.original_file_path:
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
f'cannot update a {new_item.resource_type} to have a new file '
|
||||
f'path!'
|
||||
)
|
||||
dest[unique_id] = new_item
|
||||
|
||||
|
||||
@dataclass
|
||||
class Manifest:
|
||||
"""The manifest for the full graph, after parsing and during compilation.
|
||||
"""
|
||||
# These attributes are both positional and by keyword. If an attribute
|
||||
# is added it must all be added in the __reduce_ex__ method in the
|
||||
# args tuple in the right position.
|
||||
nodes: MutableMapping[str, ManifestNode]
|
||||
sources: MutableMapping[str, ParsedSourceDefinition]
|
||||
macros: MutableMapping[str, ParsedMacro]
|
||||
docs: MutableMapping[str, ParsedDocumentation]
|
||||
exposures: MutableMapping[str, ParsedExposure]
|
||||
selectors: MutableMapping[str, Any]
|
||||
disabled: List[CompileResultNode]
|
||||
files: MutableMapping[str, SourceFile]
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
flat_graph: Dict[str, Any] = field(default_factory=dict)
|
||||
_docs_cache: Optional[DocCache] = None
|
||||
_sources_cache: Optional[SourceCache] = None
|
||||
_refs_cache: Optional[RefableCache] = None
|
||||
_lock: Lock = field(default_factory=flags.MP_CONTEXT.Lock)
|
||||
|
||||
@classmethod
|
||||
def from_macros(
|
||||
cls,
|
||||
macros: Optional[MutableMapping[str, ParsedMacro]] = None,
|
||||
files: Optional[MutableMapping[str, SourceFile]] = None,
|
||||
) -> 'Manifest':
|
||||
if macros is None:
|
||||
macros = {}
|
||||
if files is None:
|
||||
files = {}
|
||||
return cls(
|
||||
nodes={},
|
||||
sources={},
|
||||
macros=macros,
|
||||
docs={},
|
||||
exposures={},
|
||||
selectors={},
|
||||
disabled=[],
|
||||
files=files,
|
||||
)
|
||||
|
||||
def sync_update_node(
|
||||
self, new_node: NonSourceCompiledNode
|
||||
) -> NonSourceCompiledNode:
|
||||
"""update the node with a lock. The only time we should want to lock is
|
||||
when compiling an ephemeral ancestor of a node at runtime, because
|
||||
multiple threads could be just-in-time compiling the same ephemeral
|
||||
dependency, and we want them to have a consistent view of the manifest.
|
||||
|
||||
If the existing node is not compiled, update it with the new node and
|
||||
return that. If the existing node is compiled, do not update the
|
||||
manifest and return the existing node.
|
||||
"""
|
||||
with self._lock:
|
||||
existing = self.nodes[new_node.unique_id]
|
||||
if getattr(existing, 'compiled', False):
|
||||
# already compiled -> must be a NonSourceCompiledNode
|
||||
return cast(NonSourceCompiledNode, existing)
|
||||
_update_into(self.nodes, new_node)
|
||||
return new_node
|
||||
|
||||
def update_exposure(self, new_exposure: ParsedExposure):
|
||||
_update_into(self.exposures, new_exposure)
|
||||
|
||||
def update_node(self, new_node: ManifestNode):
|
||||
_update_into(self.nodes, new_node)
|
||||
|
||||
def update_source(self, new_source: ParsedSourceDefinition):
|
||||
_update_into(self.sources, new_source)
|
||||
|
||||
def build_flat_graph(self):
|
||||
"""This attribute is used in context.common by each node, so we want to
|
||||
only build it once and avoid any concurrency issues around it.
|
||||
Make sure you don't call this until you're done with building your
|
||||
manifest!
|
||||
"""
|
||||
self.flat_graph = {
|
||||
'nodes': {
|
||||
k: v.to_dict(omit_none=False) for k, v in self.nodes.items()
|
||||
},
|
||||
'sources': {
|
||||
k: v.to_dict(omit_none=False) for k, v in self.sources.items()
|
||||
}
|
||||
}
|
||||
|
||||
def find_disabled_by_name(
|
||||
self, name: str, package: Optional[str] = None
|
||||
) -> Optional[ManifestNode]:
|
||||
searcher: NameSearcher = NameSearcher(
|
||||
name, package, NodeType.refable()
|
||||
)
|
||||
result = searcher.search(self.disabled)
|
||||
return result
|
||||
|
||||
def find_disabled_source_by_name(
|
||||
self, source_name: str, table_name: str, package: Optional[str] = None
|
||||
) -> Optional[ParsedSourceDefinition]:
|
||||
search_name = f'{source_name}.{table_name}'
|
||||
searcher: NameSearcher = NameSearcher(
|
||||
search_name, package, [NodeType.Source]
|
||||
)
|
||||
result = searcher.search(self.disabled)
|
||||
if result is not None:
|
||||
assert isinstance(result, ParsedSourceDefinition)
|
||||
return result
|
||||
|
||||
def _find_macros_by_name(
|
||||
self,
|
||||
name: str,
|
||||
root_project_name: str,
|
||||
filter: Optional[Callable[[MacroCandidate], bool]] = None
|
||||
) -> CandidateList:
|
||||
"""Find macros by their name.
|
||||
"""
|
||||
# avoid an import cycle
|
||||
from dbt.adapters.factory import get_adapter_package_names
|
||||
candidates: CandidateList = CandidateList()
|
||||
packages = set(get_adapter_package_names(self.metadata.adapter_type))
|
||||
for unique_id, macro in self.macros.items():
|
||||
if macro.name != name:
|
||||
continue
|
||||
candidate = MacroCandidate(
|
||||
locality=_get_locality(macro, root_project_name, packages),
|
||||
macro=macro,
|
||||
)
|
||||
if filter is None or filter(candidate):
|
||||
candidates.append(candidate)
|
||||
|
||||
return candidates
|
||||
|
||||
def _materialization_candidates_for(
|
||||
self, project_name: str,
|
||||
materialization_name: str,
|
||||
adapter_type: Optional[str],
|
||||
) -> CandidateList:
|
||||
|
||||
if adapter_type is None:
|
||||
specificity = Specificity.Default
|
||||
else:
|
||||
specificity = Specificity.Adapter
|
||||
|
||||
full_name = dbt.utils.get_materialization_macro_name(
|
||||
materialization_name=materialization_name,
|
||||
adapter_type=adapter_type,
|
||||
with_prefix=False,
|
||||
)
|
||||
return CandidateList(
|
||||
MaterializationCandidate.from_macro(m, specificity)
|
||||
for m in self._find_macros_by_name(full_name, project_name)
|
||||
)
|
||||
|
||||
def find_macro_by_name(
|
||||
self, name: str, root_project_name: str, package: Optional[str]
|
||||
) -> Optional[ParsedMacro]:
|
||||
"""Find a macro in the graph by its name and package name, or None for
|
||||
any package. The root project name is used to determine priority:
|
||||
- locally defined macros come first
|
||||
- then imported macros
|
||||
- then macros defined in the root project
|
||||
"""
|
||||
filter: Optional[Callable[[MacroCandidate], bool]] = None
|
||||
if package is not None:
|
||||
def filter(candidate: MacroCandidate) -> bool:
|
||||
return package == candidate.macro.package_name
|
||||
|
||||
candidates: CandidateList = self._find_macros_by_name(
|
||||
name=name,
|
||||
root_project_name=root_project_name,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
return candidates.last()
|
||||
|
||||
def find_generate_macro_by_name(
|
||||
self, component: str, root_project_name: str
|
||||
) -> Optional[ParsedMacro]:
|
||||
"""
|
||||
The `generate_X_name` macros are similar to regular ones, but ignore
|
||||
imported packages.
|
||||
- if there is a `generate_{component}_name` macro in the root
|
||||
project, return it
|
||||
- return the `generate_{component}_name` macro from the 'dbt'
|
||||
internal project
|
||||
"""
|
||||
def filter(candidate: MacroCandidate) -> bool:
|
||||
return candidate.locality != Locality.Imported
|
||||
|
||||
candidates: CandidateList = self._find_macros_by_name(
|
||||
name=f'generate_{component}_name',
|
||||
root_project_name=root_project_name,
|
||||
# filter out imported packages
|
||||
filter=filter,
|
||||
)
|
||||
return candidates.last()
|
||||
|
||||
def find_materialization_macro_by_name(
|
||||
self, project_name: str, materialization_name: str, adapter_type: str
|
||||
) -> Optional[ParsedMacro]:
|
||||
candidates: CandidateList = CandidateList(chain.from_iterable(
|
||||
self._materialization_candidates_for(
|
||||
project_name=project_name,
|
||||
materialization_name=materialization_name,
|
||||
adapter_type=atype,
|
||||
) for atype in (adapter_type, None)
|
||||
))
|
||||
return candidates.last()
|
||||
|
||||
def get_resource_fqns(self) -> Mapping[str, PathSet]:
|
||||
resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {}
|
||||
all_resources = chain(self.nodes.values(), self.sources.values())
|
||||
for resource in all_resources:
|
||||
resource_type_plural = resource.resource_type.pluralize()
|
||||
if resource_type_plural not in resource_fqns:
|
||||
resource_fqns[resource_type_plural] = set()
|
||||
resource_fqns[resource_type_plural].add(tuple(resource.fqn))
|
||||
return resource_fqns
|
||||
|
||||
def add_nodes(self, new_nodes: Mapping[str, ManifestNode]):
|
||||
"""Add the given dict of new nodes to the manifest."""
|
||||
for unique_id, node in new_nodes.items():
|
||||
if unique_id in self.nodes:
|
||||
raise_duplicate_resource_name(node, self.nodes[unique_id])
|
||||
self.nodes[unique_id] = node
|
||||
# fixup the cache if it exists.
|
||||
if self._refs_cache is not None:
|
||||
if node.resource_type in NodeType.refable():
|
||||
self._refs_cache.add_node(node)
|
||||
|
||||
def patch_macros(
|
||||
self, patches: MutableMapping[MacroKey, ParsedMacroPatch]
|
||||
) -> None:
|
||||
for macro in self.macros.values():
|
||||
key = (macro.package_name, macro.name)
|
||||
patch = patches.pop(key, None)
|
||||
if not patch:
|
||||
continue
|
||||
macro.patch(patch)
|
||||
|
||||
if patches:
|
||||
for patch in patches.values():
|
||||
warn_or_error(
|
||||
f'WARNING: Found documentation for macro "{patch.name}" '
|
||||
f'which was not found'
|
||||
)
|
||||
|
||||
def patch_nodes(
|
||||
self, patches: MutableMapping[str, ParsedNodePatch]
|
||||
) -> None:
|
||||
"""Patch nodes with the given dict of patches. Note that this consumes
|
||||
the input!
|
||||
This relies on the fact that all nodes have unique _name_ fields, not
|
||||
just unique unique_id fields.
|
||||
"""
|
||||
# because we don't have any mapping from node _names_ to nodes, and we
|
||||
# only have the node name in the patch, we have to iterate over all the
|
||||
# nodes looking for matching names. We could use a NameSearcher if we
|
||||
# were ok with doing an O(n*m) search (one nodes scan per patch)
|
||||
for node in self.nodes.values():
|
||||
patch = patches.pop(node.name, None)
|
||||
if not patch:
|
||||
continue
|
||||
|
||||
expected_key = node.resource_type.pluralize()
|
||||
if expected_key != patch.yaml_key:
|
||||
if patch.yaml_key == 'models':
|
||||
deprecations.warn(
|
||||
'models-key-mismatch',
|
||||
patch=patch, node=node, expected_key=expected_key
|
||||
)
|
||||
else:
|
||||
raise_invalid_patch(
|
||||
node, patch.yaml_key, patch.original_file_path
|
||||
)
|
||||
|
||||
node.patch(patch)
|
||||
|
||||
# log debug-level warning about nodes we couldn't find
|
||||
if patches:
|
||||
for patch in patches.values():
|
||||
# since patches aren't nodes, we can't use the existing
|
||||
# target_not_found warning
|
||||
logger.debug((
|
||||
'WARNING: Found documentation for resource "{}" which was '
|
||||
'not found or is disabled').format(patch.name)
|
||||
)
|
||||
|
||||
def get_used_schemas(self, resource_types=None):
|
||||
return frozenset({
|
||||
(node.database, node.schema) for node in
|
||||
chain(self.nodes.values(), self.sources.values())
|
||||
if not resource_types or node.resource_type in resource_types
|
||||
})
|
||||
|
||||
def get_used_databases(self):
|
||||
return frozenset(
|
||||
x.database for x in
|
||||
chain(self.nodes.values(), self.sources.values())
|
||||
)
|
||||
|
||||
def deepcopy(self):
|
||||
return Manifest(
|
||||
nodes={k: _deepcopy(v) for k, v in self.nodes.items()},
|
||||
sources={k: _deepcopy(v) for k, v in self.sources.items()},
|
||||
macros={k: _deepcopy(v) for k, v in self.macros.items()},
|
||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
selectors=self.root_project.manifest_selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=[_deepcopy(n) for n in self.disabled],
|
||||
files={k: _deepcopy(v) for k, v in self.files.items()},
|
||||
)
|
||||
|
||||
def writable_manifest(self):
|
||||
edge_members = list(chain(
|
||||
self.nodes.values(),
|
||||
self.sources.values(),
|
||||
self.exposures.values(),
|
||||
))
|
||||
forward_edges, backward_edges = build_edges(edge_members)
|
||||
|
||||
return WritableManifest(
|
||||
nodes=self.nodes,
|
||||
sources=self.sources,
|
||||
macros=self.macros,
|
||||
docs=self.docs,
|
||||
exposures=self.exposures,
|
||||
selectors=self.selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=self.disabled,
|
||||
child_map=forward_edges,
|
||||
parent_map=backward_edges,
|
||||
)
|
||||
|
||||
def to_dict(self, omit_none=True, validate=False):
|
||||
return self.writable_manifest().to_dict(
|
||||
omit_none=omit_none, validate=validate
|
||||
)
|
||||
|
||||
def write(self, path):
|
||||
self.writable_manifest().write(path)
|
||||
|
||||
def expect(self, unique_id: str) -> GraphMemberNode:
|
||||
if unique_id in self.nodes:
|
||||
return self.nodes[unique_id]
|
||||
elif unique_id in self.sources:
|
||||
return self.sources[unique_id]
|
||||
elif unique_id in self.exposures:
|
||||
return self.exposures[unique_id]
|
||||
else:
|
||||
# something terrible has happened
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Expected node {} not found in manifest'.format(unique_id)
|
||||
)
|
||||
|
||||
@property
|
||||
def docs_cache(self) -> DocCache:
|
||||
if self._docs_cache is not None:
|
||||
return self._docs_cache
|
||||
cache = DocCache(self)
|
||||
self._docs_cache = cache
|
||||
return cache
|
||||
|
||||
@property
|
||||
def source_cache(self) -> SourceCache:
|
||||
if self._sources_cache is not None:
|
||||
return self._sources_cache
|
||||
cache = SourceCache(self)
|
||||
self._sources_cache = cache
|
||||
return cache
|
||||
|
||||
@property
|
||||
def refs_cache(self) -> RefableCache:
|
||||
if self._refs_cache is not None:
|
||||
return self._refs_cache
|
||||
cache = RefableCache(self)
|
||||
self._refs_cache = cache
|
||||
return cache
|
||||
|
||||
def resolve_ref(
|
||||
self,
|
||||
target_model_name: str,
|
||||
target_model_package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeNonSource:
|
||||
|
||||
node: Optional[ManifestNode] = None
|
||||
disabled: Optional[ManifestNode] = None
|
||||
|
||||
candidates = _search_packages(
|
||||
current_project, node_package, target_model_package
|
||||
)
|
||||
for pkg in candidates:
|
||||
node = self.refs_cache.find_cached_value(target_model_name, pkg)
|
||||
|
||||
if node is not None and node.config.enabled:
|
||||
return node
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.find_disabled_by_name(
|
||||
target_model_name, pkg
|
||||
)
|
||||
|
||||
if disabled is not None:
|
||||
return Disabled(disabled)
|
||||
return None
|
||||
|
||||
def resolve_source(
|
||||
self,
|
||||
target_source_name: str,
|
||||
target_table_name: str,
|
||||
current_project: str,
|
||||
node_package: str
|
||||
) -> MaybeParsedSource:
|
||||
key = (target_source_name, target_table_name)
|
||||
candidates = _search_packages(current_project, node_package)
|
||||
|
||||
source: Optional[ParsedSourceDefinition] = None
|
||||
disabled: Optional[ParsedSourceDefinition] = None
|
||||
|
||||
for pkg in candidates:
|
||||
source = self.source_cache.find_cached_value(key, pkg)
|
||||
if source is not None and source.config.enabled:
|
||||
return source
|
||||
|
||||
if disabled is None:
|
||||
disabled = self.find_disabled_source_by_name(
|
||||
target_source_name, target_table_name, pkg
|
||||
)
|
||||
|
||||
if disabled is not None:
|
||||
return Disabled(disabled)
|
||||
return None
|
||||
|
||||
def resolve_doc(
|
||||
self,
|
||||
name: str,
|
||||
package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> Optional[ParsedDocumentation]:
|
||||
"""Resolve the given documentation. This follows the same algorithm as
|
||||
resolve_ref except the is_enabled checks are unnecessary as docs are
|
||||
always enabled.
|
||||
"""
|
||||
candidates = _search_packages(
|
||||
current_project, node_package, package
|
||||
)
|
||||
|
||||
for pkg in candidates:
|
||||
result = self.docs_cache.find_cached_value(name, pkg)
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
|
||||
def merge_from_artifact(
|
||||
self,
|
||||
adapter,
|
||||
other: 'WritableManifest',
|
||||
selected: AbstractSet[UniqueID],
|
||||
) -> None:
|
||||
"""Given the selected unique IDs and a writable manifest, update this
|
||||
manifest by replacing any unselected nodes with their counterpart.
|
||||
|
||||
Only non-ephemeral refable nodes are examined.
|
||||
"""
|
||||
refables = set(NodeType.refable())
|
||||
merged = set()
|
||||
for unique_id, node in other.nodes.items():
|
||||
current = self.nodes.get(unique_id)
|
||||
if current and (
|
||||
node.resource_type in refables and
|
||||
not node.is_ephemeral and
|
||||
unique_id not in selected and
|
||||
not adapter.get_relation(
|
||||
current.database, current.schema, current.identifier
|
||||
)
|
||||
):
|
||||
merged.add(unique_id)
|
||||
self.nodes[unique_id] = node.replace(deferred=True)
|
||||
|
||||
# log up to 5 items
|
||||
sample = list(islice(merged, 5))
|
||||
logger.debug(
|
||||
f'Merged {len(merged)} items from state (sample: {sample})'
|
||||
)
|
||||
|
||||
# Provide support for copy.deepcopy() - we just need to avoid the lock!
|
||||
# pickle and deepcopy use this. It returns a callable object used to
|
||||
# create the initial version of the object and a tuple of arguments
|
||||
# for the object, i.e. the Manifest.
|
||||
# The order of the arguments must match the order of the attributes
|
||||
# in the Manifest class declaration, because they are used as
|
||||
# positional arguments to construct a Manifest.
|
||||
def __reduce_ex__(self, protocol):
|
||||
args = (
|
||||
self.nodes,
|
||||
self.sources,
|
||||
self.macros,
|
||||
self.docs,
|
||||
self.exposures,
|
||||
self.selectors,
|
||||
self.disabled,
|
||||
self.files,
|
||||
self.metadata,
|
||||
self.flat_graph,
|
||||
self._docs_cache,
|
||||
self._sources_cache,
|
||||
self._refs_cache,
|
||||
)
|
||||
return self.__class__, args
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('manifest', 1)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=(
|
||||
'The nodes defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
sources: Mapping[UniqueID, ParsedSourceDefinition] = field(
|
||||
metadata=dict(description=(
|
||||
'The sources defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
macros: Mapping[UniqueID, ParsedMacro] = field(
|
||||
metadata=dict(description=(
|
||||
'The macros defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
docs: Mapping[UniqueID, ParsedDocumentation] = field(
|
||||
metadata=dict(description=(
|
||||
'The docs defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
exposures: Mapping[UniqueID, ParsedExposure] = field(
|
||||
metadata=dict(description=(
|
||||
'The exposures defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=(
|
||||
'The selectors defined in selectors.yml'
|
||||
))
|
||||
)
|
||||
disabled: Optional[List[CompileResultNode]] = field(metadata=dict(
|
||||
description='A list of the disabled nodes in the target'
|
||||
))
|
||||
parent_map: Optional[NodeEdgeMap] = field(metadata=dict(
|
||||
description='A mapping from child nodes to their dependencies',
|
||||
))
|
||||
child_map: Optional[NodeEdgeMap] = field(metadata=dict(
|
||||
description='A mapping from parent nodes to their dependents',
|
||||
))
|
||||
metadata: ManifestMetadata = field(metadata=dict(
|
||||
description='Metadata about the manifest',
|
||||
))
|
||||
657
core/dbt/contracts/graph/model_config.py
Normal file
657
core/dbt/contracts/graph/model_config.py
Normal file
@@ -0,0 +1,657 @@
|
||||
from dataclasses import field, Field, dataclass
|
||||
from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Any, List, Optional, Dict, MutableMapping, Union, Type, NewType, Tuple,
|
||||
TypeVar, Callable, cast, Hashable
|
||||
)
|
||||
|
||||
# TODO: patch+upgrade hologram to avoid this jsonschema import
|
||||
import jsonschema # type: ignore
|
||||
|
||||
# This is protected, but we really do want to reuse this logic, and the cache!
|
||||
# It would be nice to move the custom error picking stuff into hologram!
|
||||
from hologram import _validate_schema
|
||||
from hologram import JsonSchemaMixin, ValidationError
|
||||
from hologram.helpers import StrEnum, register_pattern
|
||||
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed
|
||||
from dbt.exceptions import CompilationException, InternalException
|
||||
from dbt.contracts.util import Replaceable, list_str
|
||||
from dbt import hooks
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
|
||||
M = TypeVar('M', bound='Metadata')
|
||||
|
||||
|
||||
def _get_meta_value(cls: Type[M], fld: Field, key: str, default: Any) -> M:
|
||||
# a metadata field might exist. If it does, it might have a matching key.
|
||||
# If it has both, make sure the value is valid and return it. If it
|
||||
# doesn't, return the default.
|
||||
if fld.metadata:
|
||||
value = fld.metadata.get(key, default)
|
||||
else:
|
||||
value = default
|
||||
|
||||
try:
|
||||
return cls(value)
|
||||
except ValueError as exc:
|
||||
raise InternalException(
|
||||
f'Invalid {cls} value: {value}'
|
||||
) from exc
|
||||
|
||||
|
||||
def _set_meta_value(
|
||||
obj: M, key: str, existing: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
if existing is None:
|
||||
result = {}
|
||||
else:
|
||||
result = existing.copy()
|
||||
result.update({key: obj})
|
||||
return result
|
||||
|
||||
|
||||
class Metadata(Enum):
|
||||
@classmethod
|
||||
def from_field(cls: Type[M], fld: Field) -> M:
|
||||
default = cls.default_field()
|
||||
key = cls.metadata_key()
|
||||
|
||||
return _get_meta_value(cls, fld, key, default)
|
||||
|
||||
def meta(
|
||||
self, existing: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
key = self.metadata_key()
|
||||
return _set_meta_value(self, key, existing)
|
||||
|
||||
@classmethod
|
||||
def default_field(cls) -> 'Metadata':
|
||||
raise NotImplementedError('Not implemented')
|
||||
|
||||
@classmethod
|
||||
def metadata_key(cls) -> str:
|
||||
raise NotImplementedError('Not implemented')
|
||||
|
||||
|
||||
class MergeBehavior(Metadata):
|
||||
Append = 1
|
||||
Update = 2
|
||||
Clobber = 3
|
||||
|
||||
@classmethod
|
||||
def default_field(cls) -> 'MergeBehavior':
|
||||
return cls.Clobber
|
||||
|
||||
@classmethod
|
||||
def metadata_key(cls) -> str:
|
||||
return 'merge'
|
||||
|
||||
|
||||
class ShowBehavior(Metadata):
|
||||
Show = 1
|
||||
Hide = 2
|
||||
|
||||
@classmethod
|
||||
def default_field(cls) -> 'ShowBehavior':
|
||||
return cls.Show
|
||||
|
||||
@classmethod
|
||||
def metadata_key(cls) -> str:
|
||||
return 'show_hide'
|
||||
|
||||
@classmethod
|
||||
def should_show(cls, fld: Field) -> bool:
|
||||
return cls.from_field(fld) == cls.Show
|
||||
|
||||
|
||||
class CompareBehavior(Metadata):
|
||||
Include = 1
|
||||
Exclude = 2
|
||||
|
||||
@classmethod
|
||||
def default_field(cls) -> 'CompareBehavior':
|
||||
return cls.Include
|
||||
|
||||
@classmethod
|
||||
def metadata_key(cls) -> str:
|
||||
return 'compare'
|
||||
|
||||
@classmethod
|
||||
def should_include(cls, fld: Field) -> bool:
|
||||
return cls.from_field(fld) == cls.Include
|
||||
|
||||
|
||||
def metas(*metas: Metadata) -> Dict[str, Any]:
|
||||
existing: Dict[str, Any] = {}
|
||||
for m in metas:
|
||||
existing = m.meta(existing)
|
||||
return existing
|
||||
|
||||
|
||||
def _listify(value: Any) -> List:
|
||||
if isinstance(value, list):
|
||||
return value[:]
|
||||
else:
|
||||
return [value]
|
||||
|
||||
|
||||
def _merge_field_value(
|
||||
merge_behavior: MergeBehavior,
|
||||
self_value: Any,
|
||||
other_value: Any,
|
||||
):
|
||||
if merge_behavior == MergeBehavior.Clobber:
|
||||
return other_value
|
||||
elif merge_behavior == MergeBehavior.Append:
|
||||
return _listify(self_value) + _listify(other_value)
|
||||
elif merge_behavior == MergeBehavior.Update:
|
||||
if not isinstance(self_value, dict):
|
||||
raise InternalException(f'expected dict, got {self_value}')
|
||||
if not isinstance(other_value, dict):
|
||||
raise InternalException(f'expected dict, got {other_value}')
|
||||
value = self_value.copy()
|
||||
value.update(other_value)
|
||||
return value
|
||||
else:
|
||||
raise InternalException(
|
||||
f'Got an invalid merge_behavior: {merge_behavior}'
|
||||
)
|
||||
|
||||
|
||||
def insensitive_patterns(*patterns: str):
|
||||
lowercased = []
|
||||
for pattern in patterns:
|
||||
lowercased.append(
|
||||
''.join('[{}{}]'.format(s.upper(), s.lower()) for s in pattern)
|
||||
)
|
||||
return '^({})$'.format('|'.join(lowercased))
|
||||
|
||||
|
||||
Severity = NewType('Severity', str)
|
||||
|
||||
register_pattern(Severity, insensitive_patterns('warn', 'error'))
|
||||
|
||||
|
||||
class SnapshotStrategy(StrEnum):
|
||||
Timestamp = 'timestamp'
|
||||
Check = 'check'
|
||||
|
||||
|
||||
class All(StrEnum):
|
||||
All = 'all'
|
||||
|
||||
|
||||
@dataclass
|
||||
class Hook(JsonSchemaMixin, Replaceable):
|
||||
sql: str
|
||||
transaction: bool = True
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
T = TypeVar('T', bound='BaseConfig')
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseConfig(
|
||||
AdditionalPropertiesAllowed, Replaceable, MutableMapping[str, Any]
|
||||
):
|
||||
# Implement MutableMapping so this config will behave as some macros expect
|
||||
# during parsing (notably, syntax like `{{ node.config['schema'] }}`)
|
||||
def __getitem__(self, key):
|
||||
"""Handle parse-time use of `config` as a dictionary, making the extra
|
||||
values available during parsing.
|
||||
"""
|
||||
if hasattr(self, key):
|
||||
return getattr(self, key)
|
||||
else:
|
||||
return self._extra[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
self._extra[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
if hasattr(self, key):
|
||||
msg = (
|
||||
'Error, tried to delete config key "{}": Cannot delete '
|
||||
'built-in keys'
|
||||
).format(key)
|
||||
raise CompilationException(msg)
|
||||
else:
|
||||
del self._extra[key]
|
||||
|
||||
def _content_iterator(self, include_condition: Callable[[Field], bool]):
|
||||
seen = set()
|
||||
for fld, _ in self._get_fields():
|
||||
seen.add(fld.name)
|
||||
if include_condition(fld):
|
||||
yield fld.name
|
||||
|
||||
for key in self._extra:
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
yield from self._content_iterator(include_condition=lambda f: True)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._get_fields()) + len(self._extra)
|
||||
|
||||
@staticmethod
|
||||
def compare_key(
|
||||
unrendered: Dict[str, Any],
|
||||
other: Dict[str, Any],
|
||||
key: str,
|
||||
) -> bool:
|
||||
if key not in unrendered and key not in other:
|
||||
return True
|
||||
elif key not in unrendered and key in other:
|
||||
return False
|
||||
elif key in unrendered and key not in other:
|
||||
return False
|
||||
else:
|
||||
return unrendered[key] == other[key]
|
||||
|
||||
@classmethod
|
||||
def same_contents(
|
||||
cls, unrendered: Dict[str, Any], other: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""This is like __eq__, except it ignores some fields."""
|
||||
seen = set()
|
||||
for fld, target_name in cls._get_fields():
|
||||
key = target_name
|
||||
seen.add(key)
|
||||
if CompareBehavior.should_include(fld):
|
||||
if not cls.compare_key(unrendered, other, key):
|
||||
return False
|
||||
|
||||
for key in chain(unrendered, other):
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
if not cls.compare_key(unrendered, other, key):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def _extract_dict(
|
||||
cls, src: Dict[str, Any], data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Find all the items in data that match a target_field on this class,
|
||||
and merge them with the data found in `src` for target_field, using the
|
||||
field's specified merge behavior. Matching items will be removed from
|
||||
`data` (but _not_ `src`!).
|
||||
|
||||
Returns a dict with the merge results.
|
||||
|
||||
That means this method mutates its input! Any remaining values in data
|
||||
were not merged.
|
||||
"""
|
||||
result = {}
|
||||
|
||||
for fld, target_field in cls._get_fields():
|
||||
if target_field not in data:
|
||||
continue
|
||||
|
||||
data_attr = data.pop(target_field)
|
||||
if target_field not in src:
|
||||
result[target_field] = data_attr
|
||||
continue
|
||||
|
||||
merge_behavior = MergeBehavior.from_field(fld)
|
||||
self_attr = src[target_field]
|
||||
|
||||
result[target_field] = _merge_field_value(
|
||||
merge_behavior=merge_behavior,
|
||||
self_value=self_attr,
|
||||
other_value=data_attr,
|
||||
)
|
||||
return result
|
||||
|
||||
def to_dict(
|
||||
self,
|
||||
omit_none: bool = True,
|
||||
validate: bool = False,
|
||||
*,
|
||||
omit_hidden: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
result = super().to_dict(omit_none=omit_none, validate=validate)
|
||||
if omit_hidden and not omit_none:
|
||||
for fld, target_field in self._get_fields():
|
||||
if target_field not in result:
|
||||
continue
|
||||
|
||||
# if the field is not None, preserve it regardless of the
|
||||
# setting. This is in line with existing behavior, but isn't
|
||||
# an endorsement of it!
|
||||
if result[target_field] is not None:
|
||||
continue
|
||||
|
||||
if not ShowBehavior.should_show(fld):
|
||||
del result[target_field]
|
||||
return result
|
||||
|
||||
def update_from(
|
||||
self: T, data: Dict[str, Any], adapter_type: str, validate: bool = True
|
||||
) -> T:
|
||||
"""Given a dict of keys, update the current config from them, validate
|
||||
it, and return a new config with the updated values
|
||||
"""
|
||||
# sadly, this is a circular import
|
||||
from dbt.adapters.factory import get_config_class_by_name
|
||||
dct = self.to_dict(omit_none=False, validate=False, omit_hidden=False)
|
||||
|
||||
adapter_config_cls = get_config_class_by_name(adapter_type)
|
||||
|
||||
self_merged = self._extract_dict(dct, data)
|
||||
dct.update(self_merged)
|
||||
|
||||
adapter_merged = adapter_config_cls._extract_dict(dct, data)
|
||||
dct.update(adapter_merged)
|
||||
|
||||
# any remaining fields must be "clobber"
|
||||
dct.update(data)
|
||||
|
||||
# any validation failures must have come from the update
|
||||
return self.from_dict(dct, validate=validate)
|
||||
|
||||
def finalize_and_validate(self: T) -> T:
|
||||
# from_dict will validate for us
|
||||
dct = self.to_dict(omit_none=False, validate=False)
|
||||
return self.from_dict(dct)
|
||||
|
||||
def replace(self, **kwargs):
|
||||
dct = self.to_dict(validate=False)
|
||||
|
||||
mapping = self.field_mapping()
|
||||
for key, value in kwargs.items():
|
||||
new_key = mapping.get(key, key)
|
||||
dct[new_key] = value
|
||||
return self.from_dict(dct, validate=False)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
materialized: str = 'view'
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
# this only applies for config v1, so it doesn't participate in comparison
|
||||
vars: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=metas(CompareBehavior.Exclude, MergeBehavior.Update),
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# This is actually only used by seeds. Should it be available to others?
|
||||
# That would be a breaking change!
|
||||
column_types: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# these fields are included in serialized output, but are not part of
|
||||
# config comparison (they are part of database_representation)
|
||||
alias: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
schema: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
database: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
tags: Union[List[str], str] = field(
|
||||
default_factory=list_str,
|
||||
metadata=metas(ShowBehavior.Hide,
|
||||
MergeBehavior.Append,
|
||||
CompareBehavior.Exclude),
|
||||
)
|
||||
full_refresh: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data, validate=True):
|
||||
for key in hooks.ModelHookType:
|
||||
if key in data:
|
||||
data[key] = [hooks.get_hook_dict(h) for h in data[key]]
|
||||
return super().from_dict(data, validate=validate)
|
||||
|
||||
@classmethod
|
||||
def field_mapping(cls):
|
||||
return {'post_hook': 'post-hook', 'pre_hook': 'pre-hook'}
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeedConfig(NodeConfig):
|
||||
materialized: str = 'seed'
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestConfig(NodeConfig):
|
||||
materialized: str = 'test'
|
||||
severity: Severity = Severity('ERROR')
|
||||
|
||||
|
||||
SnapshotVariants = Union[
|
||||
'TimestampSnapshotConfig',
|
||||
'CheckSnapshotConfig',
|
||||
'GenericSnapshotConfig',
|
||||
]
|
||||
|
||||
|
||||
def _relevance_without_strategy(error: jsonschema.ValidationError):
|
||||
# calculate the 'relevance' of an error the normal jsonschema way, except
|
||||
# if the validator is in the 'strategy' field and its conflicting with the
|
||||
# 'enum'. This suppresses `"'timestamp' is not one of ['check']` and such
|
||||
if 'strategy' in error.path and error.validator in {'enum', 'not'}:
|
||||
length = 1
|
||||
else:
|
||||
length = -len(error.path)
|
||||
validator = error.validator
|
||||
return length, validator not in {'anyOf', 'oneOf'}
|
||||
|
||||
|
||||
@dataclass
|
||||
class SnapshotWrapper(JsonSchemaMixin):
|
||||
"""This is a little wrapper to let us serialize/deserialize the
|
||||
SnapshotVariants union.
|
||||
"""
|
||||
config: SnapshotVariants # mypy: ignore
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data: Any):
|
||||
config = data.get('config', {})
|
||||
|
||||
if config.get('strategy') == 'check':
|
||||
schema = _validate_schema(CheckSnapshotConfig)
|
||||
to_validate = config
|
||||
|
||||
elif config.get('strategy') == 'timestamp':
|
||||
schema = _validate_schema(TimestampSnapshotConfig)
|
||||
to_validate = config
|
||||
|
||||
else:
|
||||
h_cls = cast(Hashable, cls)
|
||||
schema = _validate_schema(h_cls)
|
||||
to_validate = data
|
||||
|
||||
validator = jsonschema.Draft7Validator(schema)
|
||||
|
||||
error = jsonschema.exceptions.best_match(
|
||||
validator.iter_errors(to_validate),
|
||||
key=_relevance_without_strategy,
|
||||
)
|
||||
|
||||
if error is not None:
|
||||
raise ValidationError.create_from(error) from error
|
||||
|
||||
|
||||
@dataclass
|
||||
class EmptySnapshotConfig(NodeConfig):
|
||||
materialized: str = 'snapshot'
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class SnapshotConfig(EmptySnapshotConfig):
|
||||
unique_key: str = field(init=False, metadata=dict(init_required=True))
|
||||
target_schema: str = field(init=False, metadata=dict(init_required=True))
|
||||
target_database: Optional[str] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_key: str,
|
||||
target_schema: str,
|
||||
target_database: Optional[str] = None,
|
||||
**kwargs
|
||||
) -> None:
|
||||
self.unique_key = unique_key
|
||||
self.target_schema = target_schema
|
||||
self.target_database = target_database
|
||||
# kwargs['materialized'] = materialized
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# type hacks...
|
||||
@classmethod
|
||||
def _get_fields(cls) -> List[Tuple[Field, str]]: # type: ignore
|
||||
fields: List[Tuple[Field, str]] = []
|
||||
for old_field, name in super()._get_fields():
|
||||
new_field = old_field
|
||||
# tell hologram we're really an initvar
|
||||
if old_field.metadata and old_field.metadata.get('init_required'):
|
||||
new_field = field(init=True, metadata=old_field.metadata)
|
||||
new_field.name = old_field.name
|
||||
new_field.type = old_field.type
|
||||
new_field._field_type = old_field._field_type # type: ignore
|
||||
fields.append((new_field, name))
|
||||
return fields
|
||||
|
||||
def finalize_and_validate(self: 'SnapshotConfig') -> SnapshotVariants:
|
||||
data = self.to_dict()
|
||||
return SnapshotWrapper.from_dict({'config': data}).config
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class GenericSnapshotConfig(SnapshotConfig):
|
||||
strategy: str = field(init=False, metadata=dict(init_required=True))
|
||||
|
||||
def __init__(self, strategy: str, **kwargs) -> None:
|
||||
self.strategy = strategy
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def _collect_json_schema(
|
||||
cls, definitions: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
# this is the method you want to override in hologram if you want
|
||||
# to do clever things about the json schema and have classes that
|
||||
# contain instances of your JsonSchemaMixin respect the change.
|
||||
schema = super()._collect_json_schema(definitions)
|
||||
|
||||
# Instead of just the strategy we'd calculate normally, say
|
||||
# "this strategy except none of our specialization strategies".
|
||||
strategies = [schema['properties']['strategy']]
|
||||
for specialization in (TimestampSnapshotConfig, CheckSnapshotConfig):
|
||||
strategies.append(
|
||||
{'not': specialization.json_schema()['properties']['strategy']}
|
||||
)
|
||||
|
||||
schema['properties']['strategy'] = {
|
||||
'allOf': strategies
|
||||
}
|
||||
return schema
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class TimestampSnapshotConfig(SnapshotConfig):
|
||||
strategy: str = field(
|
||||
init=False,
|
||||
metadata=dict(
|
||||
restrict=[str(SnapshotStrategy.Timestamp)],
|
||||
init_required=True,
|
||||
),
|
||||
)
|
||||
updated_at: str = field(init=False, metadata=dict(init_required=True))
|
||||
|
||||
def __init__(
|
||||
self, strategy: str, updated_at: str, **kwargs
|
||||
) -> None:
|
||||
self.strategy = strategy
|
||||
self.updated_at = updated_at
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class CheckSnapshotConfig(SnapshotConfig):
|
||||
strategy: str = field(
|
||||
init=False,
|
||||
metadata=dict(
|
||||
restrict=[str(SnapshotStrategy.Check)],
|
||||
init_required=True,
|
||||
),
|
||||
)
|
||||
# TODO: is there a way to get this to accept tuples of strings? Adding
|
||||
# `Tuple[str, ...]` to the list of types results in this:
|
||||
# ['email'] is valid under each of {'type': 'array', 'items':
|
||||
# {'type': 'string'}}, {'type': 'array', 'items': {'type': 'string'}}
|
||||
# but without it, parsing gets upset about values like `('email',)`
|
||||
# maybe hologram itself should support this behavior? It's not like tuples
|
||||
# are meaningful in json
|
||||
check_cols: Union[All, List[str]] = field(
|
||||
init=False,
|
||||
metadata=dict(init_required=True),
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, strategy: str, check_cols: Union[All, List[str]],
|
||||
**kwargs
|
||||
) -> None:
|
||||
self.strategy = strategy
|
||||
self.check_cols = check_cols
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||
NodeType.Source: SourceConfig,
|
||||
NodeType.Seed: SeedConfig,
|
||||
NodeType.Test: TestConfig,
|
||||
NodeType.Model: NodeConfig,
|
||||
NodeType.Snapshot: SnapshotConfig,
|
||||
}
|
||||
|
||||
|
||||
# base resource types are like resource types, except nothing has mandatory
|
||||
# configs.
|
||||
BASE_RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = RESOURCE_TYPES.copy()
|
||||
BASE_RESOURCE_TYPES.update({
|
||||
NodeType.Snapshot: EmptySnapshotConfig
|
||||
})
|
||||
|
||||
|
||||
def get_config_for(resource_type: NodeType, base=False) -> Type[BaseConfig]:
|
||||
if base:
|
||||
lookup = BASE_RESOURCE_TYPES
|
||||
else:
|
||||
lookup = RESOURCE_TYPES
|
||||
return lookup.get(resource_type, NodeConfig)
|
||||
718
core/dbt/contracts/graph/parsed.py
Normal file
718
core/dbt/contracts/graph/parsed.py
Normal file
@@ -0,0 +1,718 @@
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Optional,
|
||||
Union,
|
||||
List,
|
||||
Dict,
|
||||
Any,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Iterator,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from hologram.helpers import ExtensibleJsonSchemaMixin
|
||||
|
||||
from dbt.clients.system import write_file
|
||||
from dbt.contracts.files import FileHash, MAXIMUM_SEED_SIZE_NAME
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
UnparsedNode, UnparsedDocumentation, Quoting, Docs,
|
||||
UnparsedBaseNode, FreshnessThreshold, ExternalTable,
|
||||
HasYamlMetadata, MacroArgument, UnparsedSourceDefinition,
|
||||
UnparsedSourceTableDefinition, UnparsedColumn, TestDef,
|
||||
ExposureOwner, ExposureType, MaturityType
|
||||
)
|
||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
from dbt import flags
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
|
||||
from .model_config import (
|
||||
NodeConfig,
|
||||
SeedConfig,
|
||||
TestConfig,
|
||||
SourceConfig,
|
||||
EmptySnapshotConfig,
|
||||
SnapshotVariants,
|
||||
)
|
||||
# import these 3 so the SnapshotVariants forward ref works.
|
||||
from .model_config import ( # noqa
|
||||
TimestampSnapshotConfig,
|
||||
CheckSnapshotConfig,
|
||||
GenericSnapshotConfig,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnInfo(
|
||||
AdditionalPropertiesMixin,
|
||||
ExtensibleJsonSchemaMixin,
|
||||
Replaceable
|
||||
):
|
||||
name: str
|
||||
description: str = ''
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
data_type: Optional[str] = None
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasFqn(JsonSchemaMixin, Replaceable):
|
||||
fqn: List[str]
|
||||
|
||||
def same_fqn(self, other: 'HasFqn') -> bool:
|
||||
return self.fqn == other.fqn
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasUniqueID(JsonSchemaMixin, Replaceable):
|
||||
unique_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroDependsOn(JsonSchemaMixin, Replaceable):
|
||||
macros: List[str] = field(default_factory=list)
|
||||
|
||||
# 'in' on lists is O(n) so this is O(n^2) for # of macros
|
||||
def add_macro(self, value: str):
|
||||
if value not in self.macros:
|
||||
self.macros.append(value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DependsOn(MacroDependsOn):
|
||||
nodes: List[str] = field(default_factory=list)
|
||||
|
||||
def add_node(self, value: str):
|
||||
if value not in self.nodes:
|
||||
self.nodes.append(value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasRelationMetadata(JsonSchemaMixin, Replaceable):
|
||||
database: Optional[str]
|
||||
schema: str
|
||||
|
||||
|
||||
class ParsedNodeMixins(JsonSchemaMixin):
|
||||
resource_type: NodeType
|
||||
depends_on: DependsOn
|
||||
config: NodeConfig
|
||||
|
||||
@property
|
||||
def is_refable(self):
|
||||
return self.resource_type in NodeType.refable()
|
||||
|
||||
@property
|
||||
def is_ephemeral(self):
|
||||
return self.config.materialized == 'ephemeral'
|
||||
|
||||
@property
|
||||
def is_ephemeral_model(self):
|
||||
return self.is_refable and self.is_ephemeral
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return self.depends_on.nodes
|
||||
|
||||
def patch(self, patch: 'ParsedNodePatch'):
|
||||
"""Given a ParsedNodePatch, add the new information to the node."""
|
||||
# explicitly pick out the parts to update so we don't inadvertently
|
||||
# step on the model name or anything
|
||||
self.patch_path: Optional[str] = patch.original_file_path
|
||||
self.description = patch.description
|
||||
self.columns = patch.columns
|
||||
self.meta = patch.meta
|
||||
self.docs = patch.docs
|
||||
if flags.STRICT_MODE:
|
||||
assert isinstance(self, JsonSchemaMixin)
|
||||
self.to_dict(validate=True, omit_none=False)
|
||||
|
||||
def get_materialization(self):
|
||||
return self.config.materialized
|
||||
|
||||
def local_vars(self):
|
||||
return self.config.vars
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedNodeMandatory(
|
||||
UnparsedNode,
|
||||
HasUniqueID,
|
||||
HasFqn,
|
||||
HasRelationMetadata,
|
||||
Replaceable
|
||||
):
|
||||
alias: str
|
||||
checksum: FileHash
|
||||
config: NodeConfig = field(default_factory=NodeConfig)
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
return self.alias
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedNodeDefaults(ParsedNodeMandatory):
|
||||
tags: List[str] = field(default_factory=list)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[Any]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
description: str = field(default='')
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
patch_path: Optional[str] = None
|
||||
build_path: Optional[str] = None
|
||||
deferred: bool = False
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def write_node(self, target_path: str, subdirectory: str, payload: str):
|
||||
if (os.path.basename(self.path) ==
|
||||
os.path.basename(self.original_file_path)):
|
||||
# One-to-one relationship of nodes to files.
|
||||
path = self.original_file_path
|
||||
else:
|
||||
# Many-to-one relationship of nodes to files.
|
||||
path = os.path.join(self.original_file_path, self.path)
|
||||
full_path = os.path.join(
|
||||
target_path, subdirectory, self.package_name, path
|
||||
)
|
||||
|
||||
write_file(full_path, payload)
|
||||
return full_path
|
||||
|
||||
|
||||
T = TypeVar('T', bound='ParsedNode')
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins):
|
||||
def _persist_column_docs(self) -> bool:
|
||||
return bool(self.config.persist_docs.get('columns'))
|
||||
|
||||
def _persist_relation_docs(self) -> bool:
|
||||
return bool(self.config.persist_docs.get('relation'))
|
||||
|
||||
def same_body(self: T, other: T) -> bool:
|
||||
return self.raw_sql == other.raw_sql
|
||||
|
||||
def same_persisted_description(self: T, other: T) -> bool:
|
||||
# the check on configs will handle the case where we have different
|
||||
# persist settings, so we only have to care about the cases where they
|
||||
# are the same..
|
||||
if self._persist_relation_docs():
|
||||
if self.description != other.description:
|
||||
return False
|
||||
|
||||
if self._persist_column_docs():
|
||||
# assert other._persist_column_docs()
|
||||
column_descriptions = {
|
||||
k: v.description for k, v in self.columns.items()
|
||||
}
|
||||
other_column_descriptions = {
|
||||
k: v.description for k, v in other.columns.items()
|
||||
}
|
||||
if column_descriptions != other_column_descriptions:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def same_database_representation(self, other: T) -> bool:
|
||||
# compare the config representation, not the node's config value. This
|
||||
# compares the configured value, rather than the ultimate value (so
|
||||
# generate_*_name and unset values derived from the target are
|
||||
# ignored)
|
||||
keys = ('database', 'schema', 'alias')
|
||||
for key in keys:
|
||||
mine = self.unrendered_config.get(key)
|
||||
others = other.unrendered_config.get(key)
|
||||
if mine != others:
|
||||
return False
|
||||
return True
|
||||
|
||||
def same_config(self, old: T) -> bool:
|
||||
return self.config.same_contents(
|
||||
self.unrendered_config,
|
||||
old.unrendered_config,
|
||||
)
|
||||
|
||||
def same_contents(self: T, old: Optional[T]) -> bool:
|
||||
if old is None:
|
||||
return False
|
||||
|
||||
return (
|
||||
self.same_body(old) and
|
||||
self.same_config(old) and
|
||||
self.same_persisted_description(old) and
|
||||
self.same_fqn(old) and
|
||||
self.same_database_representation(old) and
|
||||
True
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedAnalysisNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Analysis]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedHookNode(ParsedNode):
|
||||
resource_type: NodeType = field(
|
||||
metadata={'restrict': [NodeType.Operation]}
|
||||
)
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedModelNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Model]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedRPCNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.RPCCall]})
|
||||
|
||||
|
||||
def same_seeds(first: ParsedNode, second: ParsedNode) -> bool:
|
||||
# for seeds, we check the hashes. If the hashes are different types,
|
||||
# no match. If the hashes are both the same 'path', log a warning and
|
||||
# assume they are the same
|
||||
# if the current checksum is a path, we want to log a warning.
|
||||
result = first.checksum == second.checksum
|
||||
|
||||
if first.checksum.name == 'path':
|
||||
msg: str
|
||||
if second.checksum.name != 'path':
|
||||
msg = (
|
||||
f'Found a seed ({first.package_name}.{first.name}) '
|
||||
f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was '
|
||||
f'<={MAXIMUM_SEED_SIZE_NAME}, so it has changed'
|
||||
)
|
||||
elif result:
|
||||
msg = (
|
||||
f'Found a seed ({first.package_name}.{first.name}) '
|
||||
f'>{MAXIMUM_SEED_SIZE_NAME} in size at the same path, dbt '
|
||||
f'cannot tell if it has changed: assuming they are the same'
|
||||
)
|
||||
elif not result:
|
||||
msg = (
|
||||
f'Found a seed ({first.package_name}.{first.name}) '
|
||||
f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was in '
|
||||
f'a different location, assuming it has changed'
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
f'Found a seed ({first.package_name}.{first.name}) '
|
||||
f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file had a '
|
||||
f'checksum type of {second.checksum.name}, so it has changed'
|
||||
)
|
||||
warn_or_error(msg, node=first)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedSeedNode(ParsedNode):
|
||||
# keep this in sync with CompiledSeedNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
|
||||
config: SeedConfig = field(default_factory=SeedConfig)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
""" Seeds are never empty"""
|
||||
return False
|
||||
|
||||
def same_body(self: T, other: T) -> bool:
|
||||
return same_seeds(self, other)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestMetadata(JsonSchemaMixin, Replaceable):
|
||||
namespace: Optional[str]
|
||||
name: str
|
||||
kwargs: Dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasTestMetadata(JsonSchemaMixin):
|
||||
test_metadata: TestMetadata
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedDataTestNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedSchemaTestNode(ParsedNode, HasTestMetadata):
|
||||
# keep this in sync with CompiledSchemaTestNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
|
||||
def same_config(self, other) -> bool:
|
||||
return (
|
||||
self.unrendered_config.get('severity') ==
|
||||
other.unrendered_config.get('severity')
|
||||
)
|
||||
|
||||
def same_column_name(self, other) -> bool:
|
||||
return self.column_name == other.column_name
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
return False
|
||||
|
||||
return (
|
||||
self.same_config(other) and
|
||||
self.same_fqn(other) and
|
||||
True
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IntermediateSnapshotNode(ParsedNode):
|
||||
# at an intermediate stage in parsing, where we've built something better
|
||||
# than an unparsed node for rendering in parse mode, it's pretty possible
|
||||
# that we won't have critical snapshot-related information that is only
|
||||
# defined in config blocks. To fix that, we have an intermediate type that
|
||||
# uses a regular node config, which the snapshot parser will then convert
|
||||
# into a full ParsedSnapshotNode after rendering.
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
|
||||
config: EmptySnapshotConfig = field(default_factory=EmptySnapshotConfig)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedSnapshotNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
|
||||
config: SnapshotVariants
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedPatch(HasYamlMetadata, Replaceable):
|
||||
name: str
|
||||
description: str
|
||||
meta: Dict[str, Any]
|
||||
docs: Docs
|
||||
|
||||
|
||||
# The parsed node update is only the 'patch', not the test. The test became a
|
||||
# regular parsed node. Note that description and columns must be present, but
|
||||
# may be empty.
|
||||
@dataclass
|
||||
class ParsedNodePatch(ParsedPatch):
|
||||
columns: Dict[str, ColumnInfo]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedMacroPatch(ParsedPatch):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedMacro(UnparsedBaseNode, HasUniqueID):
|
||||
name: str
|
||||
macro_sql: str
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Macro]})
|
||||
# TODO: can macros even have tags?
|
||||
tags: List[str] = field(default_factory=list)
|
||||
# TODO: is this ever populated?
|
||||
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
|
||||
description: str = ''
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
patch_path: Optional[str] = None
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
|
||||
def local_vars(self):
|
||||
return {}
|
||||
|
||||
def patch(self, patch: ParsedMacroPatch):
|
||||
self.patch_path: Optional[str] = patch.original_file_path
|
||||
self.description = patch.description
|
||||
self.meta = patch.meta
|
||||
self.docs = patch.docs
|
||||
self.arguments = patch.arguments
|
||||
if flags.STRICT_MODE:
|
||||
assert isinstance(self, JsonSchemaMixin)
|
||||
self.to_dict(validate=True, omit_none=False)
|
||||
|
||||
def same_contents(self, other: Optional['ParsedMacro']) -> bool:
|
||||
if other is None:
|
||||
return False
|
||||
# the only thing that makes one macro different from another with the
|
||||
# same name/package is its content
|
||||
return self.macro_sql == other.macro_sql
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedDocumentation(UnparsedDocumentation, HasUniqueID):
|
||||
name: str
|
||||
block_contents: str
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return self.name
|
||||
|
||||
def same_contents(self, other: Optional['ParsedDocumentation']) -> bool:
|
||||
if other is None:
|
||||
return False
|
||||
# the only thing that makes one doc different from another with the
|
||||
# same name/package is its content
|
||||
return self.block_contents == other.block_contents
|
||||
|
||||
|
||||
def normalize_test(testdef: TestDef) -> Dict[str, Any]:
|
||||
if isinstance(testdef, str):
|
||||
return {testdef: {}}
|
||||
else:
|
||||
return testdef
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnpatchedSourceDefinition(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
source: UnparsedSourceDefinition
|
||||
table: UnparsedSourceTableDefinition
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Source]})
|
||||
patch_path: Optional[Path] = None
|
||||
|
||||
def get_full_source_name(self):
|
||||
return f'{self.source.name}_{self.table.name}'
|
||||
|
||||
def get_source_representation(self):
|
||||
return f'source("{self.source.name}", "{self.table.name}")'
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.get_full_source_name()
|
||||
|
||||
@property
|
||||
def quote_columns(self) -> Optional[bool]:
|
||||
result = None
|
||||
if self.source.quoting.column is not None:
|
||||
result = self.source.quoting.column
|
||||
if self.table.quoting.column is not None:
|
||||
result = self.table.quoting.column
|
||||
return result
|
||||
|
||||
@property
|
||||
def columns(self) -> Sequence[UnparsedColumn]:
|
||||
if self.table.columns is None:
|
||||
return []
|
||||
else:
|
||||
return self.table.columns
|
||||
|
||||
def get_tests(
|
||||
self
|
||||
) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]:
|
||||
for test in self.tests:
|
||||
yield normalize_test(test), None
|
||||
|
||||
for column in self.columns:
|
||||
if column.tests is not None:
|
||||
for test in column.tests:
|
||||
yield normalize_test(test), column
|
||||
|
||||
@property
|
||||
def tests(self) -> List[TestDef]:
|
||||
if self.table.tests is None:
|
||||
return []
|
||||
else:
|
||||
return self.table.tests
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedSourceDefinition(
|
||||
UnparsedBaseNode,
|
||||
HasUniqueID,
|
||||
HasRelationMetadata,
|
||||
HasFqn
|
||||
):
|
||||
name: str
|
||||
source_name: str
|
||||
source_description: str
|
||||
loader: str
|
||||
identifier: str
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Source]})
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
loaded_at_field: Optional[str] = None
|
||||
freshness: Optional[FreshnessThreshold] = None
|
||||
external: Optional[ExternalTable] = None
|
||||
description: str = ''
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
source_meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: SourceConfig = field(default_factory=SourceConfig)
|
||||
patch_path: Optional[Path] = None
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
relation_name: Optional[str] = None
|
||||
|
||||
def same_database_representation(
|
||||
self, other: 'ParsedSourceDefinition'
|
||||
) -> bool:
|
||||
return (
|
||||
self.database == other.database and
|
||||
self.schema == other.schema and
|
||||
self.identifier == other.identifier and
|
||||
True
|
||||
)
|
||||
|
||||
def same_quoting(self, other: 'ParsedSourceDefinition') -> bool:
|
||||
return self.quoting == other.quoting
|
||||
|
||||
def same_freshness(self, other: 'ParsedSourceDefinition') -> bool:
|
||||
return (
|
||||
self.freshness == other.freshness and
|
||||
self.loaded_at_field == other.loaded_at_field and
|
||||
True
|
||||
)
|
||||
|
||||
def same_external(self, other: 'ParsedSourceDefinition') -> bool:
|
||||
return self.external == other.external
|
||||
|
||||
def same_config(self, old: 'ParsedSourceDefinition') -> bool:
|
||||
return self.config.same_contents(
|
||||
self.unrendered_config,
|
||||
old.unrendered_config,
|
||||
)
|
||||
|
||||
def same_contents(self, old: Optional['ParsedSourceDefinition']) -> bool:
|
||||
# existing when it didn't before is a change!
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
# config changes are changes (because the only config is "enabled", and
|
||||
# enabling a source is a change!)
|
||||
# changing the database/schema/identifier is a change
|
||||
# messing around with external stuff is a change (uh, right?)
|
||||
# quoting changes are changes
|
||||
# freshness changes are changes, I guess
|
||||
# metadata/tags changes are not "changes"
|
||||
# patching/description changes are not "changes"
|
||||
return (
|
||||
self.same_database_representation(old) and
|
||||
self.same_fqn(old) and
|
||||
self.same_config(old) and
|
||||
self.same_quoting(old) and
|
||||
self.same_freshness(old) and
|
||||
self.same_external(old) and
|
||||
True
|
||||
)
|
||||
|
||||
def get_full_source_name(self):
|
||||
return f'{self.source_name}_{self.name}'
|
||||
|
||||
def get_source_representation(self):
|
||||
return f'source("{self.source.name}", "{self.table.name}")'
|
||||
|
||||
@property
|
||||
def is_refable(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_ephemeral(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_ephemeral_model(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return []
|
||||
|
||||
@property
|
||||
def refs(self):
|
||||
return []
|
||||
|
||||
@property
|
||||
def sources(self):
|
||||
return []
|
||||
|
||||
@property
|
||||
def has_freshness(self):
|
||||
return bool(self.freshness) and self.loaded_at_field is not None
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return f'{self.source_name}.{self.name}'
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
name: str
|
||||
type: ExposureType
|
||||
owner: ExposureOwner
|
||||
resource_type: NodeType = NodeType.Exposure
|
||||
description: str = ''
|
||||
maturity: Optional[MaturityType] = None
|
||||
url: Optional[str] = None
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return self.depends_on.nodes
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return self.name
|
||||
|
||||
# no tags for now, but we could definitely add them
|
||||
@property
|
||||
def tags(self):
|
||||
return []
|
||||
|
||||
def same_depends_on(self, old: 'ParsedExposure') -> bool:
|
||||
return set(self.depends_on.nodes) == set(old.depends_on.nodes)
|
||||
|
||||
def same_description(self, old: 'ParsedExposure') -> bool:
|
||||
return self.description == old.description
|
||||
|
||||
def same_maturity(self, old: 'ParsedExposure') -> bool:
|
||||
return self.maturity == old.maturity
|
||||
|
||||
def same_owner(self, old: 'ParsedExposure') -> bool:
|
||||
return self.owner == old.owner
|
||||
|
||||
def same_exposure_type(self, old: 'ParsedExposure') -> bool:
|
||||
return self.type == old.type
|
||||
|
||||
def same_url(self, old: 'ParsedExposure') -> bool:
|
||||
return self.url == old.url
|
||||
|
||||
def same_contents(self, old: Optional['ParsedExposure']) -> bool:
|
||||
# existing when it didn't before is a change!
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
self.same_fqn(old) and
|
||||
self.same_exposure_type(old) and
|
||||
self.same_owner(old) and
|
||||
self.same_maturity(old) and
|
||||
self.same_url(old) and
|
||||
self.same_description(old) and
|
||||
self.same_depends_on(old) and
|
||||
True
|
||||
)
|
||||
|
||||
|
||||
ParsedResource = Union[
|
||||
ParsedDocumentation,
|
||||
ParsedMacro,
|
||||
ParsedNode,
|
||||
ParsedExposure,
|
||||
ParsedSourceDefinition,
|
||||
]
|
||||
416
core/dbt/contracts/graph/unparsed.py
Normal file
416
core/dbt/contracts/graph/unparsed.py
Normal file
@@ -0,0 +1,416 @@
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.contracts.util import (
|
||||
AdditionalPropertiesMixin,
|
||||
Mergeable,
|
||||
Replaceable,
|
||||
)
|
||||
# trigger the PathEncoder
|
||||
import dbt.helper_types # noqa:F401
|
||||
from dbt.exceptions import CompilationException
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from hologram.helpers import StrEnum, ExtensibleJsonSchemaMixin
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Union, Dict, Any, Sequence
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedBaseNode(JsonSchemaMixin, Replaceable):
|
||||
package_name: str
|
||||
root_path: str
|
||||
path: str
|
||||
original_file_path: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasSQL:
|
||||
raw_sql: str
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
return not self.raw_sql.strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedMacro(UnparsedBaseNode, HasSQL):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Macro]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNode(UnparsedBaseNode, HasSQL):
|
||||
name: str
|
||||
resource_type: NodeType = field(metadata={'restrict': [
|
||||
NodeType.Model,
|
||||
NodeType.Analysis,
|
||||
NodeType.Test,
|
||||
NodeType.Snapshot,
|
||||
NodeType.Operation,
|
||||
NodeType.Seed,
|
||||
NodeType.RPCCall,
|
||||
]})
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return self.name
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedRunHook(UnparsedNode):
|
||||
resource_type: NodeType = field(
|
||||
metadata={'restrict': [NodeType.Operation]}
|
||||
)
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Docs(JsonSchemaMixin, Replaceable):
|
||||
show: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasDocs(AdditionalPropertiesMixin, ExtensibleJsonSchemaMixin,
|
||||
Replaceable):
|
||||
name: str
|
||||
description: str = ''
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
data_type: Optional[str] = None
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
TestDef = Union[Dict[str, Any], str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasTests(HasDocs):
|
||||
tests: Optional[List[TestDef]] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.tests is None:
|
||||
self.tests = []
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedColumn(HasTests):
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasColumnDocs(JsonSchemaMixin, Replaceable):
|
||||
columns: Sequence[HasDocs] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasColumnTests(HasColumnDocs):
|
||||
columns: Sequence[UnparsedColumn] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasYamlMetadata(JsonSchemaMixin):
|
||||
original_file_path: str
|
||||
yaml_key: str
|
||||
package_name: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasColumnDocs, HasDocs, HasYamlMetadata):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNodeUpdate(HasColumnTests, HasTests, HasYamlMetadata):
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroArgument(JsonSchemaMixin):
|
||||
name: str
|
||||
type: Optional[str] = None
|
||||
description: str = ''
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedMacroUpdate(HasDocs, HasYamlMetadata):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
|
||||
|
||||
class TimePeriod(StrEnum):
|
||||
minute = 'minute'
|
||||
hour = 'hour'
|
||||
day = 'day'
|
||||
|
||||
def plural(self) -> str:
|
||||
return str(self) + 's'
|
||||
|
||||
|
||||
@dataclass
|
||||
class Time(JsonSchemaMixin, Replaceable):
|
||||
count: int
|
||||
period: TimePeriod
|
||||
|
||||
def exceeded(self, actual_age: float) -> bool:
|
||||
kwargs = {self.period.plural(): self.count}
|
||||
difference = timedelta(**kwargs).total_seconds()
|
||||
return actual_age > difference
|
||||
|
||||
|
||||
@dataclass
|
||||
class FreshnessThreshold(JsonSchemaMixin, Mergeable):
|
||||
warn_after: Optional[Time] = None
|
||||
error_after: Optional[Time] = None
|
||||
filter: Optional[str] = None
|
||||
|
||||
def status(self, age: float) -> "dbt.contracts.results.FreshnessStatus":
|
||||
from dbt.contracts.results import FreshnessStatus
|
||||
if self.error_after and self.error_after.exceeded(age):
|
||||
return FreshnessStatus.Error
|
||||
elif self.warn_after and self.warn_after.exceeded(age):
|
||||
return FreshnessStatus.Warn
|
||||
else:
|
||||
return FreshnessStatus.Pass
|
||||
|
||||
def __bool__(self):
|
||||
return self.warn_after is not None or self.error_after is not None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdditionalPropertiesAllowed(
|
||||
AdditionalPropertiesMixin,
|
||||
ExtensibleJsonSchemaMixin
|
||||
):
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExternalPartition(AdditionalPropertiesAllowed, Replaceable):
|
||||
name: str = ''
|
||||
description: str = ''
|
||||
data_type: str = ''
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
if self.name == '' or self.data_type == '':
|
||||
raise CompilationException(
|
||||
'External partition columns must have names and data types'
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExternalTable(AdditionalPropertiesAllowed, Mergeable):
|
||||
location: Optional[str] = None
|
||||
file_format: Optional[str] = None
|
||||
row_format: Optional[str] = None
|
||||
tbl_properties: Optional[str] = None
|
||||
partitions: Optional[List[ExternalPartition]] = None
|
||||
|
||||
def __bool__(self):
|
||||
return self.location is not None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Quoting(JsonSchemaMixin, Mergeable):
|
||||
database: Optional[bool] = None
|
||||
schema: Optional[bool] = None
|
||||
identifier: Optional[bool] = None
|
||||
column: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedSourceTableDefinition(HasColumnTests, HasTests):
|
||||
loaded_at_field: Optional[str] = None
|
||||
identifier: Optional[str] = None
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
freshness: Optional[FreshnessThreshold] = field(
|
||||
default_factory=FreshnessThreshold
|
||||
)
|
||||
external: Optional[ExternalTable] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
def to_dict(self, omit_none=True, validate=False):
|
||||
result = super().to_dict(omit_none=omit_none, validate=validate)
|
||||
if omit_none and self.freshness is None:
|
||||
result['freshness'] = None
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedSourceDefinition(JsonSchemaMixin, Replaceable):
|
||||
name: str
|
||||
description: str = ''
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
database: Optional[str] = None
|
||||
schema: Optional[str] = None
|
||||
loader: str = ''
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
freshness: Optional[FreshnessThreshold] = field(
|
||||
default_factory=FreshnessThreshold
|
||||
)
|
||||
loaded_at_field: Optional[str] = None
|
||||
tables: List[UnparsedSourceTableDefinition] = field(default_factory=list)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def yaml_key(self) -> 'str':
|
||||
return 'sources'
|
||||
|
||||
def to_dict(self, omit_none=True, validate=False):
|
||||
result = super().to_dict(omit_none=omit_none, validate=validate)
|
||||
if omit_none and self.freshness is None:
|
||||
result['freshness'] = None
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceTablePatch(JsonSchemaMixin):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
data_type: Optional[str] = None
|
||||
docs: Optional[Docs] = None
|
||||
loaded_at_field: Optional[str] = None
|
||||
identifier: Optional[str] = None
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
freshness: Optional[FreshnessThreshold] = field(
|
||||
default_factory=FreshnessThreshold
|
||||
)
|
||||
external: Optional[ExternalTable] = None
|
||||
tags: Optional[List[str]] = None
|
||||
tests: Optional[List[TestDef]] = None
|
||||
columns: Optional[Sequence[UnparsedColumn]] = None
|
||||
|
||||
def to_patch_dict(self) -> Dict[str, Any]:
|
||||
dct = self.to_dict(omit_none=True)
|
||||
remove_keys = ('name')
|
||||
for key in remove_keys:
|
||||
if key in dct:
|
||||
del dct[key]
|
||||
|
||||
if self.freshness is None:
|
||||
dct['freshness'] = None
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourcePatch(JsonSchemaMixin, Replaceable):
|
||||
name: str = field(
|
||||
metadata=dict(description='The name of the source to override'),
|
||||
)
|
||||
overrides: str = field(
|
||||
metadata=dict(description='The package of the source to override'),
|
||||
)
|
||||
path: Path = field(
|
||||
metadata=dict(description='The path to the patch-defining yml file'),
|
||||
)
|
||||
description: Optional[str] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
database: Optional[str] = None
|
||||
schema: Optional[str] = None
|
||||
loader: Optional[str] = None
|
||||
quoting: Optional[Quoting] = None
|
||||
freshness: Optional[Optional[FreshnessThreshold]] = field(
|
||||
default_factory=FreshnessThreshold
|
||||
)
|
||||
loaded_at_field: Optional[str] = None
|
||||
tables: Optional[List[SourceTablePatch]] = None
|
||||
tags: Optional[List[str]] = None
|
||||
|
||||
def to_patch_dict(self) -> Dict[str, Any]:
|
||||
dct = self.to_dict(omit_none=True)
|
||||
remove_keys = ('name', 'overrides', 'tables', 'path')
|
||||
for key in remove_keys:
|
||||
if key in dct:
|
||||
del dct[key]
|
||||
|
||||
if self.freshness is None:
|
||||
dct['freshness'] = None
|
||||
|
||||
return dct
|
||||
|
||||
def get_table_named(self, name: str) -> Optional[SourceTablePatch]:
|
||||
if self.tables is not None:
|
||||
for table in self.tables:
|
||||
if table.name == name:
|
||||
return table
|
||||
return None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedDocumentation(JsonSchemaMixin, Replaceable):
|
||||
package_name: str
|
||||
root_path: str
|
||||
path: str
|
||||
original_file_path: str
|
||||
|
||||
@property
|
||||
def resource_type(self):
|
||||
return NodeType.Documentation
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedDocumentationFile(UnparsedDocumentation):
|
||||
file_contents: str
|
||||
|
||||
|
||||
# can't use total_ordering decorator here, as str provides an ordering already
|
||||
# and it's not the one we want.
|
||||
class Maturity(StrEnum):
|
||||
low = 'low'
|
||||
medium = 'medium'
|
||||
high = 'high'
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, Maturity):
|
||||
return NotImplemented
|
||||
order = (Maturity.low, Maturity.medium, Maturity.high)
|
||||
return order.index(self) < order.index(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, Maturity):
|
||||
return NotImplemented
|
||||
return self != other and not (self < other)
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, Maturity):
|
||||
return NotImplemented
|
||||
return self == other or not (self < other)
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, Maturity):
|
||||
return NotImplemented
|
||||
return self == other or self < other
|
||||
|
||||
|
||||
class ExposureType(StrEnum):
|
||||
Dashboard = 'dashboard'
|
||||
Notebook = 'notebook'
|
||||
Analysis = 'analysis'
|
||||
ML = 'ml'
|
||||
Application = 'application'
|
||||
|
||||
|
||||
class MaturityType(StrEnum):
|
||||
Low = 'low'
|
||||
Medium = 'medium'
|
||||
High = 'high'
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExposureOwner(JsonSchemaMixin, Replaceable):
|
||||
email: str
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedExposure(JsonSchemaMixin, Replaceable):
|
||||
name: str
|
||||
type: ExposureType
|
||||
owner: ExposureOwner
|
||||
description: str = ''
|
||||
maturity: Optional[MaturityType] = None
|
||||
url: Optional[str] = None
|
||||
depends_on: List[str] = field(default_factory=list)
|
||||
253
core/dbt/contracts/project.py
Normal file
253
core/dbt/contracts/project.py
Normal file
@@ -0,0 +1,253 @@
|
||||
from dbt.contracts.util import Replaceable, Mergeable, list_str
|
||||
from dbt.contracts.connection import UserConfigContract, QueryComment
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
from dbt import tracking
|
||||
from dbt import ui
|
||||
|
||||
from hologram import JsonSchemaMixin, ValidationError
|
||||
from hologram.helpers import HyphenatedJsonSchemaMixin, register_pattern, \
|
||||
ExtensibleJsonSchemaMixin
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Dict, Union, Any, NewType
|
||||
|
||||
PIN_PACKAGE_URL = 'https://docs.getdbt.com/docs/package-management#section-specifying-package-versions' # noqa
|
||||
DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
|
||||
|
||||
|
||||
Name = NewType('Name', str)
|
||||
register_pattern(Name, r'^[^\d\W]\w*$')
|
||||
|
||||
# this does not support the full semver (does not allow a trailing -fooXYZ) and
|
||||
# is not restrictive enough for full semver, (allows '1.0'). But it's like
|
||||
# 'semver lite'.
|
||||
SemverString = NewType('SemverString', str)
|
||||
register_pattern(
|
||||
SemverString,
|
||||
r'^(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)(\.(?:0|[1-9]\d*))?$',
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Quoting(JsonSchemaMixin, Mergeable):
|
||||
identifier: Optional[bool]
|
||||
schema: Optional[bool]
|
||||
database: Optional[bool]
|
||||
project: Optional[bool]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Package(Replaceable, HyphenatedJsonSchemaMixin):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocalPackage(Package):
|
||||
local: str
|
||||
|
||||
|
||||
# `float` also allows `int`, according to PEP484 (and jsonschema!)
|
||||
RawVersion = Union[str, float]
|
||||
|
||||
|
||||
@dataclass
|
||||
class GitPackage(Package):
|
||||
git: str
|
||||
revision: Optional[RawVersion]
|
||||
warn_unpinned: Optional[bool] = None
|
||||
|
||||
def get_revisions(self) -> List[str]:
|
||||
if self.revision is None:
|
||||
return []
|
||||
else:
|
||||
return [str(self.revision)]
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryPackage(Package):
|
||||
package: str
|
||||
version: Union[RawVersion, List[RawVersion]]
|
||||
|
||||
def get_versions(self) -> List[str]:
|
||||
if isinstance(self.version, list):
|
||||
return [str(v) for v in self.version]
|
||||
else:
|
||||
return [str(self.version)]
|
||||
|
||||
|
||||
PackageSpec = Union[LocalPackage, GitPackage, RegistryPackage]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageConfig(JsonSchemaMixin, Replaceable):
|
||||
packages: List[PackageSpec]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectPackageMetadata:
|
||||
name: str
|
||||
packages: List[PackageSpec]
|
||||
|
||||
@classmethod
|
||||
def from_project(cls, project):
|
||||
return cls(name=project.project_name,
|
||||
packages=project.packages.packages)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Downloads(ExtensibleJsonSchemaMixin, Replaceable):
|
||||
tarball: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegistryPackageMetadata(
|
||||
ExtensibleJsonSchemaMixin,
|
||||
ProjectPackageMetadata,
|
||||
):
|
||||
downloads: Downloads
|
||||
|
||||
|
||||
# A list of all the reserved words that packages may not have as names.
|
||||
BANNED_PROJECT_NAMES = {
|
||||
'_sql_results',
|
||||
'adapter',
|
||||
'api',
|
||||
'column',
|
||||
'config',
|
||||
'context',
|
||||
'database',
|
||||
'env',
|
||||
'env_var',
|
||||
'exceptions',
|
||||
'execute',
|
||||
'flags',
|
||||
'fromjson',
|
||||
'fromyaml',
|
||||
'graph',
|
||||
'invocation_id',
|
||||
'load_agate_table',
|
||||
'load_result',
|
||||
'log',
|
||||
'model',
|
||||
'modules',
|
||||
'post_hooks',
|
||||
'pre_hooks',
|
||||
'ref',
|
||||
'render',
|
||||
'return',
|
||||
'run_started_at',
|
||||
'schema',
|
||||
'source',
|
||||
'sql',
|
||||
'sql_now',
|
||||
'store_result',
|
||||
'store_raw_result',
|
||||
'target',
|
||||
'this',
|
||||
'tojson',
|
||||
'toyaml',
|
||||
'try_or_compiler_error',
|
||||
'var',
|
||||
'write',
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Project(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
name: Name
|
||||
version: Union[SemverString, float]
|
||||
config_version: int
|
||||
project_root: Optional[str] = None
|
||||
source_paths: Optional[List[str]] = None
|
||||
macro_paths: Optional[List[str]] = None
|
||||
data_paths: Optional[List[str]] = None
|
||||
test_paths: Optional[List[str]] = None
|
||||
analysis_paths: Optional[List[str]] = None
|
||||
docs_paths: Optional[List[str]] = None
|
||||
asset_paths: Optional[List[str]] = None
|
||||
target_path: Optional[str] = None
|
||||
snapshot_paths: Optional[List[str]] = None
|
||||
clean_targets: Optional[List[str]] = None
|
||||
profile: Optional[str] = None
|
||||
log_path: Optional[str] = None
|
||||
modules_path: Optional[str] = None
|
||||
quoting: Optional[Quoting] = None
|
||||
on_run_start: Optional[List[str]] = field(default_factory=list_str)
|
||||
on_run_end: Optional[List[str]] = field(default_factory=list_str)
|
||||
require_dbt_version: Optional[Union[List[str], str]] = None
|
||||
models: Dict[str, Any] = field(default_factory=dict)
|
||||
seeds: Dict[str, Any] = field(default_factory=dict)
|
||||
snapshots: Dict[str, Any] = field(default_factory=dict)
|
||||
analyses: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: Dict[str, Any] = field(default_factory=dict)
|
||||
vars: Optional[Dict[str, Any]] = field(
|
||||
default=None,
|
||||
metadata=dict(
|
||||
description='map project names to their vars override dicts',
|
||||
),
|
||||
)
|
||||
packages: List[PackageSpec] = field(default_factory=list)
|
||||
query_comment: Optional[Union[QueryComment, NoValue, str]] = NoValue()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data, validate=True) -> 'Project':
|
||||
result = super().from_dict(data, validate=validate)
|
||||
if result.name in BANNED_PROJECT_NAMES:
|
||||
raise ValidationError(
|
||||
f'Invalid project name: {result.name} is a reserved word'
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserConfig(ExtensibleJsonSchemaMixin, Replaceable, UserConfigContract):
|
||||
send_anonymous_usage_stats: bool = DEFAULT_SEND_ANONYMOUS_USAGE_STATS
|
||||
use_colors: Optional[bool] = None
|
||||
partial_parse: Optional[bool] = None
|
||||
printer_width: Optional[int] = None
|
||||
|
||||
def set_values(self, cookie_dir):
|
||||
if self.send_anonymous_usage_stats:
|
||||
tracking.initialize_tracking(cookie_dir)
|
||||
else:
|
||||
tracking.do_not_track()
|
||||
|
||||
if self.use_colors is not None:
|
||||
ui.use_colors(self.use_colors)
|
||||
|
||||
if self.printer_width:
|
||||
ui.printer_width(self.printer_width)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProfileConfig(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
profile_name: str = field(metadata={'preserve_underscore': True})
|
||||
target_name: str = field(metadata={'preserve_underscore': True})
|
||||
config: UserConfig
|
||||
threads: int
|
||||
# TODO: make this a dynamic union of some kind?
|
||||
credentials: Optional[Dict[str, Any]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfiguredQuoting(Quoting, Replaceable):
|
||||
identifier: bool
|
||||
schema: bool
|
||||
database: Optional[bool]
|
||||
project: Optional[bool]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Configuration(Project, ProfileConfig):
|
||||
cli_vars: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata={'preserve_underscore': True},
|
||||
)
|
||||
quoting: Optional[ConfiguredQuoting] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectList(JsonSchemaMixin):
|
||||
projects: Dict[str, Project]
|
||||
122
core/dbt/contracts/relation.py
Normal file
122
core/dbt/contracts/relation.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass, fields
|
||||
from typing import (
|
||||
Optional, TypeVar, Generic, Dict,
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from hologram.helpers import StrEnum
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.utils import deep_merge
|
||||
|
||||
|
||||
class RelationType(StrEnum):
|
||||
Table = 'table'
|
||||
View = 'view'
|
||||
CTE = 'cte'
|
||||
MaterializedView = 'materializedview'
|
||||
External = 'external'
|
||||
|
||||
|
||||
class ComponentName(StrEnum):
|
||||
Database = 'database'
|
||||
Schema = 'schema'
|
||||
Identifier = 'identifier'
|
||||
|
||||
|
||||
class HasQuoting(Protocol):
|
||||
quoting: Dict[str, bool]
|
||||
|
||||
|
||||
class FakeAPIObject(JsonSchemaMixin, Replaceable, Mapping):
|
||||
# override the mapping truthiness, len is always >1
|
||||
def __bool__(self):
|
||||
return True
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key) from None
|
||||
|
||||
def __iter__(self):
|
||||
deprecations.warn('not-a-dictionary', obj=self)
|
||||
for _, name in self._get_fields():
|
||||
yield name
|
||||
|
||||
def __len__(self):
|
||||
deprecations.warn('not-a-dictionary', obj=self)
|
||||
return len(fields(self.__class__))
|
||||
|
||||
def incorporate(self, **kwargs):
|
||||
value = self.to_dict()
|
||||
value = deep_merge(value, kwargs)
|
||||
return self.from_dict(value)
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
@dataclass
|
||||
class _ComponentObject(FakeAPIObject, Generic[T]):
|
||||
database: T
|
||||
schema: T
|
||||
identifier: T
|
||||
|
||||
def get_part(self, key: ComponentName) -> T:
|
||||
if key == ComponentName.Database:
|
||||
return self.database
|
||||
elif key == ComponentName.Schema:
|
||||
return self.schema
|
||||
elif key == ComponentName.Identifier:
|
||||
return self.identifier
|
||||
else:
|
||||
raise ValueError(
|
||||
'Got a key of {}, expected one of {}'
|
||||
.format(key, list(ComponentName))
|
||||
)
|
||||
|
||||
def replace_dict(self, dct: Dict[ComponentName, T]):
|
||||
kwargs: Dict[str, T] = {}
|
||||
for k, v in dct.items():
|
||||
kwargs[str(k)] = v
|
||||
return self.replace(**kwargs)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Policy(_ComponentObject[bool]):
|
||||
database: bool = True
|
||||
schema: bool = True
|
||||
identifier: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class Path(_ComponentObject[Optional[str]]):
|
||||
database: Optional[str]
|
||||
schema: Optional[str]
|
||||
identifier: Optional[str]
|
||||
|
||||
def __post_init__(self):
|
||||
# handle pesky jinja2.Undefined sneaking in here and messing up rende
|
||||
if not isinstance(self.database, (type(None), str)):
|
||||
raise CompilationException(
|
||||
'Got an invalid path database: {}'.format(self.database)
|
||||
)
|
||||
if not isinstance(self.schema, (type(None), str)):
|
||||
raise CompilationException(
|
||||
'Got an invalid path schema: {}'.format(self.schema)
|
||||
)
|
||||
if not isinstance(self.identifier, (type(None), str)):
|
||||
raise CompilationException(
|
||||
'Got an invalid path identifier: {}'.format(self.identifier)
|
||||
)
|
||||
|
||||
def get_lowered_part(self, key: ComponentName) -> Optional[str]:
|
||||
part = self.get_part(key)
|
||||
if part is not None:
|
||||
part = part.lower()
|
||||
return part
|
||||
461
core/dbt/contracts/results.py
Normal file
461
core/dbt/contracts/results.py
Normal file
@@ -0,0 +1,461 @@
|
||||
from dbt.contracts.graph.manifest import CompileResultNode
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
FreshnessThreshold
|
||||
)
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition
|
||||
from dbt.contracts.util import (
|
||||
BaseArtifactMetadata,
|
||||
ArtifactMixin,
|
||||
VersionedSchema,
|
||||
Replaceable,
|
||||
schema_version,
|
||||
)
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.logger import (
|
||||
TimingProcessor,
|
||||
JsonOnly,
|
||||
GLOBAL_LOGGER as logger,
|
||||
)
|
||||
from dbt.utils import lowercase
|
||||
from hologram.helpers import StrEnum
|
||||
from hologram import JsonSchemaMixin
|
||||
|
||||
import agate
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Union, Dict, List, Optional, Any, NamedTuple, Sequence
|
||||
|
||||
from dbt.clients.system import write_json
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimingInfo(JsonSchemaMixin):
|
||||
name: str
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
def begin(self):
|
||||
self.started_at = datetime.utcnow()
|
||||
|
||||
def end(self):
|
||||
self.completed_at = datetime.utcnow()
|
||||
|
||||
|
||||
class collect_timing_info:
|
||||
def __init__(self, name: str):
|
||||
self.timing_info = TimingInfo(name=name)
|
||||
|
||||
def __enter__(self):
|
||||
self.timing_info.begin()
|
||||
return self.timing_info
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.timing_info.end()
|
||||
with JsonOnly(), TimingProcessor(self.timing_info):
|
||||
logger.debug('finished collecting timing info')
|
||||
|
||||
|
||||
class NodeStatus(StrEnum):
|
||||
Success = "success"
|
||||
Error = "error"
|
||||
Fail = "fail"
|
||||
Warn = "warn"
|
||||
Skipped = "skipped"
|
||||
Pass = "pass"
|
||||
RuntimeErr = "runtime error"
|
||||
|
||||
|
||||
class RunStatus(StrEnum):
|
||||
Success = NodeStatus.Success
|
||||
Error = NodeStatus.Error
|
||||
Skipped = NodeStatus.Skipped
|
||||
|
||||
|
||||
class TestStatus(StrEnum):
|
||||
Pass = NodeStatus.Pass
|
||||
Error = NodeStatus.Error
|
||||
Fail = NodeStatus.Fail
|
||||
Warn = NodeStatus.Warn
|
||||
|
||||
|
||||
class FreshnessStatus(StrEnum):
|
||||
Pass = NodeStatus.Pass
|
||||
Warn = NodeStatus.Warn
|
||||
Error = NodeStatus.Error
|
||||
RuntimeErr = NodeStatus.RuntimeErr
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseResult(JsonSchemaMixin):
|
||||
status: Union[RunStatus, TestStatus, FreshnessStatus]
|
||||
timing: List[TimingInfo]
|
||||
thread_id: str
|
||||
execution_time: float
|
||||
message: Optional[Union[str, int]]
|
||||
adapter_response: Dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeResult(BaseResult):
|
||||
node: CompileResultNode
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResult(NodeResult):
|
||||
agate_table: Optional[agate.Table] = None
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
return self.status == RunStatus.Skipped
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExecutionResult(JsonSchemaMixin):
|
||||
results: Sequence[BaseResult]
|
||||
elapsed_time: float
|
||||
|
||||
def __len__(self):
|
||||
return len(self.results)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.results)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self.results[idx]
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResultsMetadata(BaseArtifactMetadata):
|
||||
dbt_schema_version: str = field(
|
||||
default_factory=lambda: str(RunResultsArtifact.dbt_schema_version)
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResultOutput(BaseResult):
|
||||
unique_id: str
|
||||
|
||||
|
||||
def process_run_result(result: RunResult) -> RunResultOutput:
|
||||
return RunResultOutput(
|
||||
unique_id=result.node.unique_id,
|
||||
status=result.status,
|
||||
timing=result.timing,
|
||||
thread_id=result.thread_id,
|
||||
execution_time=result.execution_time,
|
||||
message=result.message,
|
||||
adapter_response=result.adapter_response
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunExecutionResult(
|
||||
ExecutionResult,
|
||||
):
|
||||
results: Sequence[RunResult]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = RunResultsArtifact.from_execution_results(
|
||||
results=self.results,
|
||||
elapsed_time=self.elapsed_time,
|
||||
generated_at=self.generated_at,
|
||||
args=self.args,
|
||||
)
|
||||
writable.write(path)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('run-results', 1)
|
||||
class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
||||
results: Sequence[RunResultOutput]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def from_execution_results(
|
||||
cls,
|
||||
results: Sequence[RunResult],
|
||||
elapsed_time: float,
|
||||
generated_at: datetime,
|
||||
args: Dict,
|
||||
):
|
||||
processed_results = [process_run_result(result) for result in results]
|
||||
meta = RunResultsMetadata(
|
||||
dbt_schema_version=str(cls.dbt_schema_version),
|
||||
generated_at=generated_at,
|
||||
)
|
||||
return cls(
|
||||
metadata=meta,
|
||||
results=processed_results,
|
||||
elapsed_time=elapsed_time,
|
||||
args=args
|
||||
)
|
||||
|
||||
def write(self, path: str, omit_none=False):
|
||||
write_json(path, self.to_dict(omit_none=omit_none))
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunOperationResult(ExecutionResult):
|
||||
success: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunOperationResultMetadata(BaseArtifactMetadata):
|
||||
dbt_schema_version: str = field(default_factory=lambda: str(
|
||||
RunOperationResultsArtifact.dbt_schema_version
|
||||
))
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('run-operation-result', 1)
|
||||
class RunOperationResultsArtifact(RunOperationResult, ArtifactMixin):
|
||||
|
||||
@classmethod
|
||||
def from_success(
|
||||
cls,
|
||||
success: bool,
|
||||
elapsed_time: float,
|
||||
generated_at: datetime,
|
||||
):
|
||||
meta = RunOperationResultMetadata(
|
||||
dbt_schema_version=str(cls.dbt_schema_version),
|
||||
generated_at=generated_at,
|
||||
)
|
||||
return cls(
|
||||
metadata=meta,
|
||||
results=[],
|
||||
elapsed_time=elapsed_time,
|
||||
success=success,
|
||||
)
|
||||
|
||||
# due to issues with typing.Union collapsing subclasses, this can't subclass
|
||||
# PartialResult
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceFreshnessResult(NodeResult):
|
||||
node: ParsedSourceDefinition
|
||||
status: FreshnessStatus
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
return False
|
||||
|
||||
|
||||
class FreshnessErrorEnum(StrEnum):
|
||||
runtime_error = 'runtime error'
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceFreshnessRuntimeError(JsonSchemaMixin):
|
||||
unique_id: str
|
||||
error: Optional[Union[str, int]]
|
||||
status: FreshnessErrorEnum
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceFreshnessOutput(JsonSchemaMixin):
|
||||
unique_id: str
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
max_loaded_at_time_ago_in_s: float
|
||||
status: FreshnessStatus
|
||||
criteria: FreshnessThreshold
|
||||
adapter_response: Dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PartialSourceFreshnessResult(NodeResult):
|
||||
status: FreshnessStatus
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
return False
|
||||
|
||||
|
||||
FreshnessNodeResult = Union[PartialSourceFreshnessResult,
|
||||
SourceFreshnessResult]
|
||||
FreshnessNodeOutput = Union[SourceFreshnessRuntimeError, SourceFreshnessOutput]
|
||||
|
||||
|
||||
def process_freshness_result(
|
||||
result: FreshnessNodeResult
|
||||
) -> FreshnessNodeOutput:
|
||||
unique_id = result.node.unique_id
|
||||
if result.status == FreshnessStatus.RuntimeErr:
|
||||
return SourceFreshnessRuntimeError(
|
||||
unique_id=unique_id,
|
||||
error=result.message,
|
||||
status=FreshnessErrorEnum.runtime_error,
|
||||
)
|
||||
|
||||
# we know that this must be a SourceFreshnessResult
|
||||
if not isinstance(result, SourceFreshnessResult):
|
||||
raise InternalException(
|
||||
'Got {} instead of a SourceFreshnessResult for a '
|
||||
'non-error result in freshness execution!'
|
||||
.format(type(result))
|
||||
)
|
||||
# if we're here, we must have a non-None freshness threshold
|
||||
criteria = result.node.freshness
|
||||
if criteria is None:
|
||||
raise InternalException(
|
||||
'Somehow evaluated a freshness result for a source '
|
||||
'that has no freshness criteria!'
|
||||
)
|
||||
return SourceFreshnessOutput(
|
||||
unique_id=unique_id,
|
||||
max_loaded_at=result.max_loaded_at,
|
||||
snapshotted_at=result.snapshotted_at,
|
||||
max_loaded_at_time_ago_in_s=result.age,
|
||||
status=result.status,
|
||||
criteria=criteria,
|
||||
adapter_response=result.adapter_response
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FreshnessMetadata(BaseArtifactMetadata):
|
||||
dbt_schema_version: str = field(
|
||||
default_factory=lambda: str(
|
||||
FreshnessExecutionResultArtifact.dbt_schema_version
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FreshnessResult(ExecutionResult):
|
||||
metadata: FreshnessMetadata
|
||||
results: Sequence[FreshnessNodeResult]
|
||||
|
||||
@classmethod
|
||||
def from_node_results(
|
||||
cls,
|
||||
results: List[FreshnessNodeResult],
|
||||
elapsed_time: float,
|
||||
generated_at: datetime,
|
||||
):
|
||||
meta = FreshnessMetadata(generated_at=generated_at)
|
||||
return cls(metadata=meta, results=results, elapsed_time=elapsed_time)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('sources', 1)
|
||||
class FreshnessExecutionResultArtifact(
|
||||
ArtifactMixin,
|
||||
VersionedSchema,
|
||||
):
|
||||
metadata: FreshnessMetadata
|
||||
results: Sequence[FreshnessNodeOutput]
|
||||
elapsed_time: float
|
||||
|
||||
@classmethod
|
||||
def from_result(cls, base: FreshnessResult):
|
||||
processed = [process_freshness_result(r) for r in base.results]
|
||||
return cls(
|
||||
metadata=base.metadata,
|
||||
results=processed,
|
||||
elapsed_time=base.elapsed_time,
|
||||
)
|
||||
|
||||
|
||||
Primitive = Union[bool, str, float, None]
|
||||
|
||||
CatalogKey = NamedTuple(
|
||||
'CatalogKey',
|
||||
[('database', Optional[str]), ('schema', str), ('name', str)]
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StatsItem(JsonSchemaMixin):
|
||||
id: str
|
||||
label: str
|
||||
value: Primitive
|
||||
description: Optional[str]
|
||||
include: bool
|
||||
|
||||
|
||||
StatsDict = Dict[str, StatsItem]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnMetadata(JsonSchemaMixin):
|
||||
type: str
|
||||
comment: Optional[str]
|
||||
index: int
|
||||
name: str
|
||||
|
||||
|
||||
ColumnMap = Dict[str, ColumnMetadata]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TableMetadata(JsonSchemaMixin):
|
||||
type: str
|
||||
database: Optional[str]
|
||||
schema: str
|
||||
name: str
|
||||
comment: Optional[str]
|
||||
owner: Optional[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogTable(JsonSchemaMixin, Replaceable):
|
||||
metadata: TableMetadata
|
||||
columns: ColumnMap
|
||||
stats: StatsDict
|
||||
# the same table with two unique IDs will just be listed two times
|
||||
unique_id: Optional[str] = None
|
||||
|
||||
def key(self) -> CatalogKey:
|
||||
return CatalogKey(
|
||||
lowercase(self.metadata.database),
|
||||
self.metadata.schema.lower(),
|
||||
self.metadata.name.lower(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogMetadata(BaseArtifactMetadata):
|
||||
dbt_schema_version: str = field(
|
||||
default_factory=lambda: str(CatalogArtifact.dbt_schema_version)
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogResults(JsonSchemaMixin):
|
||||
nodes: Dict[str, CatalogTable]
|
||||
sources: Dict[str, CatalogTable]
|
||||
errors: Optional[List[str]]
|
||||
_compile_results: Optional[Any] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('catalog', 1)
|
||||
class CatalogArtifact(CatalogResults, ArtifactMixin):
|
||||
metadata: CatalogMetadata
|
||||
|
||||
@classmethod
|
||||
def from_results(
|
||||
cls,
|
||||
generated_at: datetime,
|
||||
nodes: Dict[str, CatalogTable],
|
||||
sources: Dict[str, CatalogTable],
|
||||
compile_results: Optional[Any],
|
||||
errors: Optional[List[str]]
|
||||
) -> 'CatalogArtifact':
|
||||
meta = CatalogMetadata(generated_at=generated_at)
|
||||
return cls(
|
||||
metadata=meta,
|
||||
nodes=nodes,
|
||||
sources=sources,
|
||||
errors=errors,
|
||||
_compile_results=compile_results,
|
||||
)
|
||||
758
core/dbt/contracts/rpc.py
Normal file
758
core/dbt/contracts/rpc.py
Normal file
@@ -0,0 +1,758 @@
|
||||
import enum
|
||||
import os
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Union, List, Any, Dict, Type, Sequence
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
from hologram.helpers import StrEnum
|
||||
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import WritableManifest
|
||||
from dbt.contracts.results import (
|
||||
RunResult, RunResultsArtifact, TimingInfo,
|
||||
CatalogArtifact,
|
||||
CatalogResults,
|
||||
ExecutionResult,
|
||||
FreshnessExecutionResultArtifact,
|
||||
FreshnessResult,
|
||||
RunOperationResult,
|
||||
RunOperationResultsArtifact,
|
||||
RunExecutionResult,
|
||||
)
|
||||
from dbt.contracts.util import VersionedSchema, schema_version
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.logger import LogMessage
|
||||
from dbt.utils import restrict_to
|
||||
|
||||
|
||||
TaskTags = Optional[Dict[str, Any]]
|
||||
TaskID = uuid.UUID
|
||||
|
||||
# Inputs
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCParameters(JsonSchemaMixin):
|
||||
timeout: Optional[float]
|
||||
task_tags: TaskTags
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCExecParameters(RPCParameters):
|
||||
name: str
|
||||
sql: str
|
||||
macros: Optional[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCCompileParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
models: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCRunParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
models: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
defer: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCSnapshotParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCTestParameters(RPCCompileParameters):
|
||||
data: bool = False
|
||||
schema: bool = False
|
||||
state: Optional[str] = None
|
||||
defer: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCSeedParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
show: bool = False
|
||||
state: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCDocsGenerateParameters(RPCParameters):
|
||||
compile: bool = True
|
||||
state: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCCliParameters(RPCParameters):
|
||||
cli: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCDepsParameters(RPCParameters):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class KillParameters(RPCParameters):
|
||||
task_id: TaskID
|
||||
|
||||
|
||||
@dataclass
|
||||
class PollParameters(RPCParameters):
|
||||
request_token: TaskID
|
||||
logs: bool = True
|
||||
logs_start: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class PSParameters(RPCParameters):
|
||||
active: bool = True
|
||||
completed: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class StatusParameters(RPCParameters):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class GCSettings(JsonSchemaMixin):
|
||||
# start evicting the longest-ago-ended tasks here
|
||||
maxsize: int
|
||||
# start evicting all tasks before now - auto_reap_age when we have this
|
||||
# many tasks in the table
|
||||
reapsize: int
|
||||
# a positive timedelta indicating how far back we should go
|
||||
auto_reap_age: timedelta
|
||||
|
||||
|
||||
@dataclass
|
||||
class GCParameters(RPCParameters):
|
||||
"""The gc endpoint takes three arguments, any of which may be present:
|
||||
|
||||
- task_ids: An optional list of task ID UUIDs to try to GC
|
||||
- before: If provided, should be a datetime string. All tasks that finished
|
||||
before that datetime will be GCed
|
||||
- settings: If provided, should be a GCSettings object in JSON form. It
|
||||
will be applied to the task manager before GC starts. By default the
|
||||
existing gc settings remain.
|
||||
"""
|
||||
task_ids: Optional[List[TaskID]] = None
|
||||
before: Optional[datetime] = None
|
||||
settings: Optional[GCSettings] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCRunOperationParameters(RPCParameters):
|
||||
macro: str
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCSourceFreshnessParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class GetManifestParameters(RPCParameters):
|
||||
pass
|
||||
|
||||
# Outputs
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemoteResult(VersionedSchema):
|
||||
logs: List[LogMessage]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-deps-result', 1)
|
||||
class RemoteDepsResult(RemoteResult):
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-catalog-result', 1)
|
||||
class RemoteCatalogResults(CatalogResults, RemoteResult):
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def write(self, path: str):
|
||||
artifact = CatalogArtifact.from_results(
|
||||
generated_at=self.generated_at,
|
||||
nodes=self.nodes,
|
||||
sources=self.sources,
|
||||
compile_results=self._compile_results,
|
||||
errors=self.errors,
|
||||
)
|
||||
artifact.write(path)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemoteCompileResultMixin(RemoteResult):
|
||||
raw_sql: str
|
||||
compiled_sql: str
|
||||
node: CompileResultNode
|
||||
timing: List[TimingInfo]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-compile-result', 1)
|
||||
class RemoteCompileResult(RemoteCompileResultMixin):
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
@property
|
||||
def error(self):
|
||||
return None
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-execution-result', 1)
|
||||
class RemoteExecutionResult(ExecutionResult, RemoteResult):
|
||||
results: Sequence[RunResult]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = RunResultsArtifact.from_execution_results(
|
||||
generated_at=self.generated_at,
|
||||
results=self.results,
|
||||
elapsed_time=self.elapsed_time,
|
||||
args=self.args,
|
||||
)
|
||||
writable.write(path)
|
||||
|
||||
@classmethod
|
||||
def from_local_result(
|
||||
cls,
|
||||
base: RunExecutionResult,
|
||||
logs: List[LogMessage],
|
||||
) -> 'RemoteExecutionResult':
|
||||
return cls(
|
||||
generated_at=base.generated_at,
|
||||
results=base.results,
|
||||
elapsed_time=base.elapsed_time,
|
||||
args=base.args,
|
||||
logs=logs,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResultTable(JsonSchemaMixin):
|
||||
column_names: List[str]
|
||||
rows: List[Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-run-operation-result', 1)
|
||||
class RemoteRunOperationResult(RunOperationResult, RemoteResult):
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
@classmethod
|
||||
def from_local_result(
|
||||
cls,
|
||||
base: RunOperationResultsArtifact,
|
||||
logs: List[LogMessage],
|
||||
) -> 'RemoteRunOperationResult':
|
||||
return cls(
|
||||
generated_at=base.metadata.generated_at,
|
||||
results=base.results,
|
||||
elapsed_time=base.elapsed_time,
|
||||
success=base.success,
|
||||
logs=logs,
|
||||
)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = RunOperationResultsArtifact.from_success(
|
||||
success=self.success,
|
||||
generated_at=self.generated_at,
|
||||
elapsed_time=self.elapsed_time,
|
||||
)
|
||||
writable.write(path)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-freshness-result', 1)
|
||||
class RemoteFreshnessResult(FreshnessResult, RemoteResult):
|
||||
|
||||
@classmethod
|
||||
def from_local_result(
|
||||
cls,
|
||||
base: FreshnessResult,
|
||||
logs: List[LogMessage],
|
||||
) -> 'RemoteFreshnessResult':
|
||||
return cls(
|
||||
metadata=base.metadata,
|
||||
results=base.results,
|
||||
elapsed_time=base.elapsed_time,
|
||||
logs=logs,
|
||||
)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = FreshnessExecutionResultArtifact.from_result(base=self)
|
||||
writable.write(path)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-run-result', 1)
|
||||
class RemoteRunResult(RemoteCompileResultMixin):
|
||||
table: ResultTable
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
RPCResult = Union[
|
||||
RemoteCompileResult,
|
||||
RemoteExecutionResult,
|
||||
RemoteFreshnessResult,
|
||||
RemoteCatalogResults,
|
||||
RemoteDepsResult,
|
||||
RemoteRunOperationResult,
|
||||
]
|
||||
|
||||
|
||||
# GC types
|
||||
|
||||
class GCResultState(StrEnum):
|
||||
Deleted = 'deleted' # successful GC
|
||||
Missing = 'missing' # nothing to GC
|
||||
Running = 'running' # can't GC
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-gc-result', 1)
|
||||
class GCResult(RemoteResult):
|
||||
logs: List[LogMessage] = field(default_factory=list)
|
||||
deleted: List[TaskID] = field(default_factory=list)
|
||||
missing: List[TaskID] = field(default_factory=list)
|
||||
running: List[TaskID] = field(default_factory=list)
|
||||
|
||||
def add_result(self, task_id: TaskID, state: GCResultState):
|
||||
if state == GCResultState.Missing:
|
||||
self.missing.append(task_id)
|
||||
elif state == GCResultState.Running:
|
||||
self.running.append(task_id)
|
||||
elif state == GCResultState.Deleted:
|
||||
self.deleted.append(task_id)
|
||||
else:
|
||||
raise InternalException(
|
||||
f'Got invalid state in add_result: {state}'
|
||||
)
|
||||
|
||||
# Task management types
|
||||
|
||||
|
||||
class TaskHandlerState(StrEnum):
|
||||
NotStarted = 'not started'
|
||||
Initializing = 'initializing'
|
||||
Running = 'running'
|
||||
Success = 'success'
|
||||
Error = 'error'
|
||||
Killed = 'killed'
|
||||
Failed = 'failed'
|
||||
|
||||
def __lt__(self, other) -> bool:
|
||||
"""A logical ordering for TaskHandlerState:
|
||||
|
||||
NotStarted < Initializing < Running < (Success, Error, Killed, Failed)
|
||||
"""
|
||||
if not isinstance(other, TaskHandlerState):
|
||||
raise TypeError('cannot compare to non-TaskHandlerState')
|
||||
order = (self.NotStarted, self.Initializing, self.Running)
|
||||
smaller = set()
|
||||
for value in order:
|
||||
smaller.add(value)
|
||||
if self == value:
|
||||
return other not in smaller
|
||||
|
||||
return False
|
||||
|
||||
def __le__(self, other) -> bool:
|
||||
# so that ((Success <= Error) is True)
|
||||
return ((self < other) or
|
||||
(self == other) or
|
||||
(self.finished and other.finished))
|
||||
|
||||
def __gt__(self, other) -> bool:
|
||||
if not isinstance(other, TaskHandlerState):
|
||||
raise TypeError('cannot compare to non-TaskHandlerState')
|
||||
order = (self.NotStarted, self.Initializing, self.Running)
|
||||
smaller = set()
|
||||
for value in order:
|
||||
smaller.add(value)
|
||||
if self == value:
|
||||
return other in smaller
|
||||
return other in smaller
|
||||
|
||||
def __ge__(self, other) -> bool:
|
||||
# so that ((Success <= Error) is True)
|
||||
return ((self > other) or
|
||||
(self == other) or
|
||||
(self.finished and other.finished))
|
||||
|
||||
@property
|
||||
def finished(self) -> bool:
|
||||
return self in (self.Error, self.Success, self.Killed, self.Failed)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TaskTiming(JsonSchemaMixin):
|
||||
state: TaskHandlerState
|
||||
start: Optional[datetime]
|
||||
end: Optional[datetime]
|
||||
elapsed: Optional[float]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TaskRow(TaskTiming):
|
||||
task_id: TaskID
|
||||
request_id: Union[str, int]
|
||||
request_source: str
|
||||
method: str
|
||||
timeout: Optional[float]
|
||||
tags: TaskTags
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-ps-result', 1)
|
||||
class PSResult(RemoteResult):
|
||||
rows: List[TaskRow]
|
||||
|
||||
|
||||
class KillResultStatus(StrEnum):
|
||||
Missing = 'missing'
|
||||
NotStarted = 'not_started'
|
||||
Killed = 'killed'
|
||||
Finished = 'finished'
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-kill-result', 1)
|
||||
class KillResult(RemoteResult):
|
||||
state: KillResultStatus = KillResultStatus.Missing
|
||||
logs: List[LogMessage] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-manifest-result', 1)
|
||||
class GetManifestResult(RemoteResult):
|
||||
manifest: Optional[WritableManifest]
|
||||
|
||||
|
||||
# this is kind of carefuly structured: BlocksManifestTasks is implied by
|
||||
# RequiresConfigReloadBefore and RequiresManifestReloadAfter
|
||||
class RemoteMethodFlags(enum.Flag):
|
||||
Empty = 0
|
||||
BlocksManifestTasks = 1
|
||||
RequiresConfigReloadBefore = 3
|
||||
RequiresManifestReloadAfter = 5
|
||||
Builtin = 8
|
||||
|
||||
|
||||
# Polling types
|
||||
|
||||
|
||||
@dataclass
|
||||
class PollResult(RemoteResult, TaskTiming):
|
||||
state: TaskHandlerState
|
||||
tags: TaskTags
|
||||
start: Optional[datetime]
|
||||
end: Optional[datetime]
|
||||
elapsed: Optional[float]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-deps-result', 1)
|
||||
class PollRemoteEmptyCompleteResult(PollResult, RemoteResult):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollRemoteEmptyCompleteResult'],
|
||||
base: RemoteDepsResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollRemoteEmptyCompleteResult':
|
||||
return cls(
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
generated_at=base.generated_at
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-killed-result', 1)
|
||||
class PollKilledResult(PollResult):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Killed),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-execution-result', 1)
|
||||
class PollExecuteCompleteResult(
|
||||
RemoteExecutionResult,
|
||||
PollResult,
|
||||
):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollExecuteCompleteResult'],
|
||||
base: RemoteExecutionResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollExecuteCompleteResult':
|
||||
return cls(
|
||||
results=base.results,
|
||||
elapsed_time=base.elapsed_time,
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
generated_at=base.generated_at,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-compile-result', 1)
|
||||
class PollCompileCompleteResult(
|
||||
RemoteCompileResult,
|
||||
PollResult,
|
||||
):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollCompileCompleteResult'],
|
||||
base: RemoteCompileResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollCompileCompleteResult':
|
||||
return cls(
|
||||
raw_sql=base.raw_sql,
|
||||
compiled_sql=base.compiled_sql,
|
||||
node=base.node,
|
||||
timing=base.timing,
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
generated_at=base.generated_at
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-run-result', 1)
|
||||
class PollRunCompleteResult(
|
||||
RemoteRunResult,
|
||||
PollResult,
|
||||
):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollRunCompleteResult'],
|
||||
base: RemoteRunResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollRunCompleteResult':
|
||||
return cls(
|
||||
raw_sql=base.raw_sql,
|
||||
compiled_sql=base.compiled_sql,
|
||||
node=base.node,
|
||||
timing=base.timing,
|
||||
logs=logs,
|
||||
table=base.table,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
generated_at=base.generated_at
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-run-operation-result', 1)
|
||||
class PollRunOperationCompleteResult(
|
||||
RemoteRunOperationResult,
|
||||
PollResult,
|
||||
):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollRunOperationCompleteResult'],
|
||||
base: RemoteRunOperationResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollRunOperationCompleteResult':
|
||||
return cls(
|
||||
success=base.success,
|
||||
results=base.results,
|
||||
generated_at=base.generated_at,
|
||||
elapsed_time=base.elapsed_time,
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-catalog-result', 1)
|
||||
class PollCatalogCompleteResult(RemoteCatalogResults, PollResult):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollCatalogCompleteResult'],
|
||||
base: RemoteCatalogResults,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollCatalogCompleteResult':
|
||||
return cls(
|
||||
nodes=base.nodes,
|
||||
sources=base.sources,
|
||||
generated_at=base.generated_at,
|
||||
errors=base.errors,
|
||||
_compile_results=base._compile_results,
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-in-progress-result', 1)
|
||||
class PollInProgressResult(PollResult):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-get-manifest-result', 1)
|
||||
class PollGetManifestResult(GetManifestResult, PollResult):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollGetManifestResult'],
|
||||
base: GetManifestResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollGetManifestResult':
|
||||
return cls(
|
||||
manifest=base.manifest,
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('poll-remote-freshness-result', 1)
|
||||
class PollFreshnessResult(RemoteFreshnessResult, PollResult):
|
||||
state: TaskHandlerState = field(
|
||||
metadata=restrict_to(TaskHandlerState.Success,
|
||||
TaskHandlerState.Failed),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_result(
|
||||
cls: Type['PollFreshnessResult'],
|
||||
base: RemoteFreshnessResult,
|
||||
tags: TaskTags,
|
||||
timing: TaskTiming,
|
||||
logs: List[LogMessage],
|
||||
) -> 'PollFreshnessResult':
|
||||
return cls(
|
||||
logs=logs,
|
||||
tags=tags,
|
||||
state=timing.state,
|
||||
start=timing.start,
|
||||
end=timing.end,
|
||||
elapsed=timing.elapsed,
|
||||
metadata=base.metadata,
|
||||
results=base.results,
|
||||
elapsed_time=base.elapsed_time,
|
||||
)
|
||||
|
||||
# Manifest parsing types
|
||||
|
||||
|
||||
class ManifestStatus(StrEnum):
|
||||
Init = 'init'
|
||||
Compiling = 'compiling'
|
||||
Ready = 'ready'
|
||||
Error = 'error'
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('remote-status-result', 1)
|
||||
class LastParse(RemoteResult):
|
||||
state: ManifestStatus = ManifestStatus.Init
|
||||
logs: List[LogMessage] = field(default_factory=list)
|
||||
error: Optional[Dict[str, Any]] = None
|
||||
timestamp: datetime = field(default_factory=datetime.utcnow)
|
||||
pid: int = field(default_factory=os.getpid)
|
||||
22
core/dbt/contracts/selection.py
Normal file
22
core/dbt/contracts/selection.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from dataclasses import dataclass
|
||||
from hologram import JsonSchemaMixin
|
||||
|
||||
from typing import List, Dict, Any, Union
|
||||
|
||||
|
||||
@dataclass
|
||||
class SelectorDefinition(JsonSchemaMixin):
|
||||
name: str
|
||||
definition: Union[str, Dict[str, Any]]
|
||||
description: str = ''
|
||||
|
||||
|
||||
@dataclass
|
||||
class SelectorFile(JsonSchemaMixin):
|
||||
selectors: List[SelectorDefinition]
|
||||
version: int = 2
|
||||
|
||||
|
||||
# @dataclass
|
||||
# class SelectorCollection:
|
||||
# packages: Dict[str, List[SelectorFile]] = field(default_factory=dict)
|
||||
18
core/dbt/contracts/state.py
Normal file
18
core/dbt/contracts/state.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from pathlib import Path
|
||||
from .graph.manifest import WritableManifest
|
||||
from typing import Optional
|
||||
from dbt.exceptions import IncompatibleSchemaException
|
||||
|
||||
|
||||
class PreviousState:
|
||||
def __init__(self, path: Path):
|
||||
self.path: Path = path
|
||||
self.manifest: Optional[WritableManifest] = None
|
||||
|
||||
manifest_path = self.path / 'manifest.json'
|
||||
if manifest_path.exists() and manifest_path.is_file():
|
||||
try:
|
||||
self.manifest = WritableManifest.read(str(manifest_path))
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(manifest_path))
|
||||
raise
|
||||
197
core/dbt/contracts/util.py
Normal file
197
core/dbt/contracts/util.py
Normal file
@@ -0,0 +1,197 @@
|
||||
import dataclasses
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
List, Tuple, ClassVar, Type, TypeVar, Dict, Any, Optional
|
||||
)
|
||||
|
||||
from dbt.clients.system import write_json, read_json
|
||||
from dbt.exceptions import (
|
||||
IncompatibleSchemaException,
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.version import __version__
|
||||
from dbt.tracking import get_invocation_id
|
||||
from hologram import JsonSchemaMixin
|
||||
|
||||
MacroKey = Tuple[str, str]
|
||||
SourceKey = Tuple[str, str]
|
||||
|
||||
|
||||
def list_str() -> List[str]:
|
||||
"""Mypy gets upset about stuff like:
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List
|
||||
|
||||
@dataclass
|
||||
class Foo:
|
||||
x: Optional[List[str]] = field(default_factory=list)
|
||||
|
||||
|
||||
Because `list` could be any kind of list, I guess
|
||||
"""
|
||||
return []
|
||||
|
||||
|
||||
class Replaceable:
|
||||
def replace(self, **kwargs):
|
||||
return dataclasses.replace(self, **kwargs)
|
||||
|
||||
|
||||
class Mergeable(Replaceable):
|
||||
def merged(self, *args):
|
||||
"""Perform a shallow merge, where the last non-None write wins. This is
|
||||
intended to merge dataclasses that are a collection of optional values.
|
||||
"""
|
||||
replacements = {}
|
||||
cls = type(self)
|
||||
for arg in args:
|
||||
for field in dataclasses.fields(cls):
|
||||
value = getattr(arg, field.name)
|
||||
if value is not None:
|
||||
replacements[field.name] = value
|
||||
|
||||
return self.replace(**replacements)
|
||||
|
||||
|
||||
class Writable:
|
||||
def write(self, path: str, omit_none: bool = False):
|
||||
write_json(path, self.to_dict(omit_none=omit_none)) # type: ignore
|
||||
|
||||
|
||||
class AdditionalPropertiesMixin:
|
||||
"""Make this class an extensible property.
|
||||
|
||||
The underlying class definition must include a type definition for a field
|
||||
named '_extra' that is of type `Dict[str, Any]`.
|
||||
"""
|
||||
ADDITIONAL_PROPERTIES = True
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data, validate=True):
|
||||
self = super().from_dict(data=data, validate=validate)
|
||||
keys = self.to_dict(validate=False, omit_none=False)
|
||||
for key, value in data.items():
|
||||
if key not in keys:
|
||||
self.extra[key] = value
|
||||
return self
|
||||
|
||||
def to_dict(self, omit_none=True, validate=False):
|
||||
data = super().to_dict(omit_none=omit_none, validate=validate)
|
||||
data.update(self.extra)
|
||||
return data
|
||||
|
||||
def replace(self, **kwargs):
|
||||
dct = self.to_dict(omit_none=False, validate=False)
|
||||
dct.update(kwargs)
|
||||
return self.from_dict(dct)
|
||||
|
||||
@property
|
||||
def extra(self):
|
||||
return self._extra
|
||||
|
||||
|
||||
class Readable:
|
||||
@classmethod
|
||||
def read(cls, path: str):
|
||||
try:
|
||||
data = read_json(path)
|
||||
except (EnvironmentError, ValueError) as exc:
|
||||
raise RuntimeException(
|
||||
f'Could not read {cls.__name__} at "{path}" as JSON: {exc}'
|
||||
) from exc
|
||||
|
||||
return cls.from_dict(data) # type: ignore
|
||||
|
||||
|
||||
BASE_SCHEMAS_URL = 'https://schemas.getdbt.com/dbt/{name}/v{version}.json'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SchemaVersion:
|
||||
name: str
|
||||
version: int
|
||||
|
||||
def __str__(self) -> str:
|
||||
return BASE_SCHEMAS_URL.format(
|
||||
name=self.name,
|
||||
version=self.version,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_VERSION_KEY = 'dbt_schema_version'
|
||||
|
||||
|
||||
METADATA_ENV_PREFIX = 'DBT_ENV_CUSTOM_ENV_'
|
||||
|
||||
|
||||
def get_metadata_env() -> Dict[str, str]:
|
||||
return {
|
||||
k[len(METADATA_ENV_PREFIX):]: v for k, v in os.environ.items()
|
||||
if k.startswith(METADATA_ENV_PREFIX)
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class BaseArtifactMetadata(JsonSchemaMixin):
|
||||
dbt_schema_version: str
|
||||
dbt_version: str = __version__
|
||||
generated_at: datetime = dataclasses.field(
|
||||
default_factory=datetime.utcnow
|
||||
)
|
||||
invocation_id: Optional[str] = dataclasses.field(
|
||||
default_factory=get_invocation_id
|
||||
)
|
||||
env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_env)
|
||||
|
||||
|
||||
def schema_version(name: str, version: int):
|
||||
def inner(cls: Type[VersionedSchema]):
|
||||
cls.dbt_schema_version = SchemaVersion(
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
return cls
|
||||
return inner
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class VersionedSchema(JsonSchemaMixin):
|
||||
dbt_schema_version: ClassVar[SchemaVersion]
|
||||
|
||||
@classmethod
|
||||
def json_schema(cls, embeddable: bool = False) -> Dict[str, Any]:
|
||||
result = super().json_schema(embeddable=embeddable)
|
||||
if not embeddable:
|
||||
result['$id'] = str(cls.dbt_schema_version)
|
||||
return result
|
||||
|
||||
|
||||
T = TypeVar('T', bound='ArtifactMixin')
|
||||
|
||||
|
||||
# metadata should really be a Generic[T_M] where T_M is a TypeVar bound to
|
||||
# BaseArtifactMetadata. Unfortunately this isn't possible due to a mypy issue:
|
||||
# https://github.com/python/mypy/issues/7520
|
||||
@dataclasses.dataclass(init=False)
|
||||
class ArtifactMixin(VersionedSchema, Writable, Readable):
|
||||
metadata: BaseArtifactMetadata
|
||||
|
||||
@classmethod
|
||||
def from_dict(
|
||||
cls: Type[T], data: Dict[str, Any], validate: bool = True
|
||||
) -> T:
|
||||
if cls.dbt_schema_version is None:
|
||||
raise InternalException(
|
||||
'Cannot call from_dict with no schema version!'
|
||||
)
|
||||
|
||||
if validate:
|
||||
expected = str(cls.dbt_schema_version)
|
||||
found = data.get('metadata', {}).get(SCHEMA_VERSION_KEY)
|
||||
if found != expected:
|
||||
raise IncompatibleSchemaException(expected, found)
|
||||
|
||||
return super().from_dict(data=data, validate=validate)
|
||||
172
core/dbt/deprecations.py
Normal file
172
core/dbt/deprecations.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from typing import Optional, Set, List, Dict, ClassVar
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt import ui
|
||||
|
||||
import dbt.tracking
|
||||
|
||||
|
||||
class DBTDeprecation:
|
||||
_name: ClassVar[Optional[str]] = None
|
||||
_description: ClassVar[Optional[str]] = None
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
if self._name is not None:
|
||||
return self._name
|
||||
raise NotImplementedError(
|
||||
'name not implemented for {}'.format(self)
|
||||
)
|
||||
|
||||
def track_deprecation_warn(self) -> None:
|
||||
if dbt.tracking.active_user is not None:
|
||||
dbt.tracking.track_deprecation_warn({
|
||||
"deprecation_name": self.name
|
||||
})
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
if self._description is not None:
|
||||
return self._description
|
||||
raise NotImplementedError(
|
||||
'description not implemented for {}'.format(self)
|
||||
)
|
||||
|
||||
def show(self, *args, **kwargs) -> None:
|
||||
if self.name not in active_deprecations:
|
||||
desc = self.description.format(**kwargs)
|
||||
msg = ui.line_wrap_message(
|
||||
desc, prefix='* Deprecation Warning: '
|
||||
)
|
||||
dbt.exceptions.warn_or_error(msg)
|
||||
self.track_deprecation_warn()
|
||||
active_deprecations.add(self.name)
|
||||
|
||||
|
||||
class MaterializationReturnDeprecation(DBTDeprecation):
|
||||
_name = 'materialization-return'
|
||||
|
||||
_description = '''\
|
||||
The materialization ("{materialization}") did not explicitly return a list
|
||||
of relations to add to the cache. By default the target relation will be
|
||||
added, but this behavior will be removed in a future version of dbt.
|
||||
|
||||
|
||||
|
||||
For more information, see:
|
||||
|
||||
https://docs.getdbt.com/v0.15/docs/creating-new-materializations#section-6-returning-relations
|
||||
'''
|
||||
|
||||
|
||||
class NotADictionaryDeprecation(DBTDeprecation):
|
||||
_name = 'not-a-dictionary'
|
||||
|
||||
_description = '''\
|
||||
The object ("{obj}") was used as a dictionary. In a future version of dbt
|
||||
this capability will be removed from objects of this type.
|
||||
'''
|
||||
|
||||
|
||||
class ColumnQuotingDeprecation(DBTDeprecation):
|
||||
_name = 'column-quoting-unset'
|
||||
|
||||
_description = '''\
|
||||
The quote_columns parameter was not set for seeds, so the default value of
|
||||
False was chosen. The default will change to True in a future release.
|
||||
|
||||
|
||||
|
||||
For more information, see:
|
||||
|
||||
https://docs.getdbt.com/v0.15/docs/seeds#section-specify-column-quoting
|
||||
'''
|
||||
|
||||
|
||||
class ModelsKeyNonModelDeprecation(DBTDeprecation):
|
||||
_name = 'models-key-mismatch'
|
||||
|
||||
_description = '''\
|
||||
"{node.name}" is a {node.resource_type} node, but it is specified in
|
||||
the {patch.yaml_key} section of {patch.original_file_path}.
|
||||
|
||||
|
||||
|
||||
To fix this warning, place the `{node.name}` specification under
|
||||
the {expected_key} key instead.
|
||||
|
||||
This warning will become an error in a future release.
|
||||
'''
|
||||
|
||||
|
||||
class ExecuteMacrosReleaseDeprecation(DBTDeprecation):
|
||||
_name = 'execute-macro-release'
|
||||
_description = '''\
|
||||
The "release" argument to execute_macro is now ignored, and will be removed
|
||||
in a future relase of dbt. At that time, providing a `release` argument
|
||||
will result in an error.
|
||||
'''
|
||||
|
||||
|
||||
class AdapterMacroDeprecation(DBTDeprecation):
|
||||
_name = 'adapter-macro'
|
||||
_description = '''\
|
||||
The "adapter_macro" macro has been deprecated. Instead, use the
|
||||
`adapter.dispatch` method to find a macro and call the result.
|
||||
adapter_macro was called for: {macro_name}
|
||||
'''
|
||||
|
||||
|
||||
_adapter_renamed_description = """\
|
||||
The adapter function `adapter.{old_name}` is deprecated and will be removed in
|
||||
a future release of dbt. Please use `adapter.{new_name}` instead.
|
||||
|
||||
Documentation for {new_name} can be found here:
|
||||
|
||||
https://docs.getdbt.com/docs/adapter
|
||||
"""
|
||||
|
||||
|
||||
def renamed_method(old_name: str, new_name: str):
|
||||
|
||||
class AdapterDeprecationWarning(DBTDeprecation):
|
||||
_name = 'adapter:{}'.format(old_name)
|
||||
_description = _adapter_renamed_description.format(old_name=old_name,
|
||||
new_name=new_name)
|
||||
|
||||
dep = AdapterDeprecationWarning()
|
||||
deprecations_list.append(dep)
|
||||
deprecations[dep.name] = dep
|
||||
|
||||
|
||||
def warn(name, *args, **kwargs):
|
||||
if name not in deprecations:
|
||||
# this should (hopefully) never happen
|
||||
raise RuntimeError(
|
||||
"Error showing deprecation warning: {}".format(name)
|
||||
)
|
||||
|
||||
deprecations[name].show(*args, **kwargs)
|
||||
|
||||
|
||||
# these are globally available
|
||||
# since modules are only imported once, active_deprecations is a singleton
|
||||
|
||||
active_deprecations: Set[str] = set()
|
||||
|
||||
deprecations_list: List[DBTDeprecation] = [
|
||||
MaterializationReturnDeprecation(),
|
||||
NotADictionaryDeprecation(),
|
||||
ColumnQuotingDeprecation(),
|
||||
ModelsKeyNonModelDeprecation(),
|
||||
ExecuteMacrosReleaseDeprecation(),
|
||||
AdapterMacroDeprecation(),
|
||||
]
|
||||
|
||||
deprecations: Dict[str, DBTDeprecation] = {
|
||||
d.name: d for d in deprecations_list
|
||||
}
|
||||
|
||||
|
||||
def reset_deprecations():
|
||||
active_deprecations.clear()
|
||||
112
core/dbt/deps/base.py
Normal file
112
core/dbt/deps/base.py
Normal file
@@ -0,0 +1,112 @@
|
||||
import abc
|
||||
import os
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from typing import List, Optional, Generic, TypeVar
|
||||
|
||||
from dbt.clients import system
|
||||
from dbt.contracts.project import ProjectPackageMetadata
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
|
||||
DOWNLOADS_PATH = None
|
||||
|
||||
|
||||
def get_downloads_path():
|
||||
return DOWNLOADS_PATH
|
||||
|
||||
|
||||
@contextmanager
|
||||
def downloads_directory():
|
||||
global DOWNLOADS_PATH
|
||||
remove_downloads = False
|
||||
# the user might have set an environment variable. Set it to that, and do
|
||||
# not remove it when finished.
|
||||
if DOWNLOADS_PATH is None:
|
||||
DOWNLOADS_PATH = os.getenv('DBT_DOWNLOADS_DIR')
|
||||
remove_downloads = False
|
||||
# if we are making a per-run temp directory, remove it at the end of
|
||||
# successful runs
|
||||
if DOWNLOADS_PATH is None:
|
||||
DOWNLOADS_PATH = tempfile.mkdtemp(prefix='dbt-downloads-')
|
||||
remove_downloads = True
|
||||
|
||||
system.make_directory(DOWNLOADS_PATH)
|
||||
logger.debug("Set downloads directory='{}'".format(DOWNLOADS_PATH))
|
||||
|
||||
yield DOWNLOADS_PATH
|
||||
|
||||
if remove_downloads:
|
||||
system.rmtree(DOWNLOADS_PATH)
|
||||
DOWNLOADS_PATH = None
|
||||
|
||||
|
||||
class BasePackage(metaclass=abc.ABCMeta):
|
||||
@abc.abstractproperty
|
||||
def name(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def all_names(self) -> List[str]:
|
||||
return [self.name]
|
||||
|
||||
@abc.abstractmethod
|
||||
def source_type(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class PinnedPackage(BasePackage):
|
||||
def __init__(self) -> None:
|
||||
self._cached_metadata: Optional[ProjectPackageMetadata] = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
version = self.get_version()
|
||||
if not version:
|
||||
return self.name
|
||||
|
||||
return '{}@{}'.format(self.name, version)
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_version(self) -> Optional[str]:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def _fetch_metadata(self, project, renderer):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def install(self, project):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def nice_version_name(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def fetch_metadata(self, project, renderer):
|
||||
if not self._cached_metadata:
|
||||
self._cached_metadata = self._fetch_metadata(project, renderer)
|
||||
return self._cached_metadata
|
||||
|
||||
def get_project_name(self, project, renderer):
|
||||
metadata = self.fetch_metadata(project, renderer)
|
||||
return metadata.name
|
||||
|
||||
def get_installation_path(self, project, renderer):
|
||||
dest_dirname = self.get_project_name(project, renderer)
|
||||
return os.path.join(project.modules_path, dest_dirname)
|
||||
|
||||
|
||||
SomePinned = TypeVar('SomePinned', bound=PinnedPackage)
|
||||
SomeUnpinned = TypeVar('SomeUnpinned', bound='UnpinnedPackage')
|
||||
|
||||
|
||||
class UnpinnedPackage(Generic[SomePinned], BasePackage):
|
||||
@abc.abstractclassmethod
|
||||
def from_contract(cls, contract):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def incorporate(self: SomeUnpinned, other: SomeUnpinned) -> SomeUnpinned:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def resolved(self) -> SomePinned:
|
||||
raise NotImplementedError
|
||||
145
core/dbt/deps/git.py
Normal file
145
core/dbt/deps/git.py
Normal file
@@ -0,0 +1,145 @@
|
||||
import os
|
||||
import hashlib
|
||||
from typing import List
|
||||
|
||||
from dbt.clients import git, system
|
||||
from dbt.config import Project
|
||||
from dbt.contracts.project import (
|
||||
ProjectPackageMetadata,
|
||||
GitPackage,
|
||||
)
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||
from dbt.exceptions import (
|
||||
ExecutableError, warn_or_error, raise_dependency_error
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import ui
|
||||
|
||||
PIN_PACKAGE_URL = 'https://docs.getdbt.com/docs/package-management#section-specifying-package-versions' # noqa
|
||||
|
||||
|
||||
def md5sum(s: str):
|
||||
return hashlib.md5(s.encode('latin-1')).hexdigest()
|
||||
|
||||
|
||||
class GitPackageMixin:
|
||||
def __init__(self, git: str) -> None:
|
||||
super().__init__()
|
||||
self.git = git
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.git
|
||||
|
||||
def source_type(self) -> str:
|
||||
return 'git'
|
||||
|
||||
|
||||
class GitPinnedPackage(GitPackageMixin, PinnedPackage):
|
||||
def __init__(
|
||||
self, git: str, revision: str, warn_unpinned: bool = True
|
||||
) -> None:
|
||||
super().__init__(git)
|
||||
self.revision = revision
|
||||
self.warn_unpinned = warn_unpinned
|
||||
self._checkout_name = md5sum(self.git)
|
||||
|
||||
def get_version(self):
|
||||
return self.revision
|
||||
|
||||
def nice_version_name(self):
|
||||
return 'revision {}'.format(self.revision)
|
||||
|
||||
def _checkout(self):
|
||||
"""Performs a shallow clone of the repository into the downloads
|
||||
directory. This function can be called repeatedly. If the project has
|
||||
already been checked out at this version, it will be a no-op. Returns
|
||||
the path to the checked out directory."""
|
||||
try:
|
||||
dir_ = git.clone_and_checkout(
|
||||
self.git, get_downloads_path(), branch=self.revision,
|
||||
dirname=self._checkout_name
|
||||
)
|
||||
except ExecutableError as exc:
|
||||
if exc.cmd and exc.cmd[0] == 'git':
|
||||
logger.error(
|
||||
'Make sure git is installed on your machine. More '
|
||||
'information: '
|
||||
'https://docs.getdbt.com/docs/package-management'
|
||||
)
|
||||
raise
|
||||
return os.path.join(get_downloads_path(), dir_)
|
||||
|
||||
def _fetch_metadata(self, project, renderer) -> ProjectPackageMetadata:
|
||||
path = self._checkout()
|
||||
if self.revision == 'master' and self.warn_unpinned:
|
||||
warn_or_error(
|
||||
'The git package "{}" is not pinned.\n\tThis can introduce '
|
||||
'breaking changes into your project without warning!\n\nSee {}'
|
||||
.format(self.git, PIN_PACKAGE_URL),
|
||||
log_fmt=ui.yellow('WARNING: {}')
|
||||
)
|
||||
loaded = Project.from_project_root(path, renderer)
|
||||
return ProjectPackageMetadata.from_project(loaded)
|
||||
|
||||
def install(self, project, renderer):
|
||||
dest_path = self.get_installation_path(project, renderer)
|
||||
if os.path.exists(dest_path):
|
||||
if system.path_is_symlink(dest_path):
|
||||
system.remove_file(dest_path)
|
||||
else:
|
||||
system.rmdir(dest_path)
|
||||
|
||||
system.move(self._checkout(), dest_path)
|
||||
|
||||
|
||||
class GitUnpinnedPackage(GitPackageMixin, UnpinnedPackage[GitPinnedPackage]):
|
||||
def __init__(
|
||||
self, git: str, revisions: List[str], warn_unpinned: bool = True
|
||||
) -> None:
|
||||
super().__init__(git)
|
||||
self.revisions = revisions
|
||||
self.warn_unpinned = warn_unpinned
|
||||
|
||||
@classmethod
|
||||
def from_contract(
|
||||
cls, contract: GitPackage
|
||||
) -> 'GitUnpinnedPackage':
|
||||
revisions = contract.get_revisions()
|
||||
|
||||
# we want to map None -> True
|
||||
warn_unpinned = contract.warn_unpinned is not False
|
||||
return cls(git=contract.git, revisions=revisions,
|
||||
warn_unpinned=warn_unpinned)
|
||||
|
||||
def all_names(self) -> List[str]:
|
||||
if self.git.endswith('.git'):
|
||||
other = self.git[:-4]
|
||||
else:
|
||||
other = self.git + '.git'
|
||||
return [self.git, other]
|
||||
|
||||
def incorporate(
|
||||
self, other: 'GitUnpinnedPackage'
|
||||
) -> 'GitUnpinnedPackage':
|
||||
warn_unpinned = self.warn_unpinned and other.warn_unpinned
|
||||
|
||||
return GitUnpinnedPackage(
|
||||
git=self.git,
|
||||
revisions=self.revisions + other.revisions,
|
||||
warn_unpinned=warn_unpinned,
|
||||
)
|
||||
|
||||
def resolved(self) -> GitPinnedPackage:
|
||||
requested = set(self.revisions)
|
||||
if len(requested) == 0:
|
||||
requested = {'master'}
|
||||
elif len(requested) > 1:
|
||||
raise_dependency_error(
|
||||
'git dependencies should contain exactly one version. '
|
||||
'{} contains: {}'.format(self.git, requested))
|
||||
|
||||
return GitPinnedPackage(
|
||||
git=self.git, revision=requested.pop(),
|
||||
warn_unpinned=self.warn_unpinned
|
||||
)
|
||||
84
core/dbt/deps/local.py
Normal file
84
core/dbt/deps/local.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import shutil
|
||||
|
||||
from dbt.clients import system
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage
|
||||
from dbt.contracts.project import (
|
||||
ProjectPackageMetadata,
|
||||
LocalPackage,
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
|
||||
|
||||
class LocalPackageMixin:
|
||||
def __init__(self, local: str) -> None:
|
||||
super().__init__()
|
||||
self.local = local
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.local
|
||||
|
||||
def source_type(self):
|
||||
return 'local'
|
||||
|
||||
|
||||
class LocalPinnedPackage(LocalPackageMixin, PinnedPackage):
|
||||
def __init__(self, local: str) -> None:
|
||||
super().__init__(local)
|
||||
|
||||
def get_version(self):
|
||||
return None
|
||||
|
||||
def nice_version_name(self):
|
||||
return '<local @ {}>'.format(self.local)
|
||||
|
||||
def resolve_path(self, project):
|
||||
return system.resolve_path_from_base(
|
||||
self.local,
|
||||
project.project_root,
|
||||
)
|
||||
|
||||
def _fetch_metadata(self, project, renderer):
|
||||
loaded = project.from_project_root(
|
||||
self.resolve_path(project), renderer
|
||||
)
|
||||
return ProjectPackageMetadata.from_project(loaded)
|
||||
|
||||
def install(self, project, renderer):
|
||||
src_path = self.resolve_path(project)
|
||||
dest_path = self.get_installation_path(project, renderer)
|
||||
|
||||
can_create_symlink = system.supports_symlinks()
|
||||
|
||||
if system.path_exists(dest_path):
|
||||
if not system.path_is_symlink(dest_path):
|
||||
system.rmdir(dest_path)
|
||||
else:
|
||||
system.remove_file(dest_path)
|
||||
|
||||
if can_create_symlink:
|
||||
logger.debug(' Creating symlink to local dependency.')
|
||||
system.make_symlink(src_path, dest_path)
|
||||
|
||||
else:
|
||||
logger.debug(' Symlinks are not available on this '
|
||||
'OS, copying dependency.')
|
||||
shutil.copytree(src_path, dest_path)
|
||||
|
||||
|
||||
class LocalUnpinnedPackage(
|
||||
LocalPackageMixin, UnpinnedPackage[LocalPinnedPackage]
|
||||
):
|
||||
@classmethod
|
||||
def from_contract(
|
||||
cls, contract: LocalPackage
|
||||
) -> 'LocalUnpinnedPackage':
|
||||
return cls(local=contract.local)
|
||||
|
||||
def incorporate(
|
||||
self, other: 'LocalUnpinnedPackage'
|
||||
) -> 'LocalUnpinnedPackage':
|
||||
return LocalUnpinnedPackage(local=self.local)
|
||||
|
||||
def resolved(self) -> LocalPinnedPackage:
|
||||
return LocalPinnedPackage(local=self.local)
|
||||
122
core/dbt/deps/registry.py
Normal file
122
core/dbt/deps/registry.py
Normal file
@@ -0,0 +1,122 @@
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
from dbt import semver
|
||||
from dbt.clients import registry, system
|
||||
from dbt.contracts.project import (
|
||||
RegistryPackageMetadata,
|
||||
RegistryPackage,
|
||||
)
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||
from dbt.exceptions import (
|
||||
package_version_not_found,
|
||||
VersionsNotCompatibleException,
|
||||
DependencyException,
|
||||
package_not_found,
|
||||
)
|
||||
|
||||
|
||||
class RegistryPackageMixin:
|
||||
def __init__(self, package: str) -> None:
|
||||
super().__init__()
|
||||
self.package = package
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.package
|
||||
|
||||
def source_type(self) -> str:
|
||||
return 'hub'
|
||||
|
||||
|
||||
class RegistryPinnedPackage(RegistryPackageMixin, PinnedPackage):
|
||||
def __init__(self, package: str, version: str) -> None:
|
||||
super().__init__(package)
|
||||
self.version = version
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.package
|
||||
|
||||
def source_type(self):
|
||||
return 'hub'
|
||||
|
||||
def get_version(self):
|
||||
return self.version
|
||||
|
||||
def nice_version_name(self):
|
||||
return 'version {}'.format(self.version)
|
||||
|
||||
def _fetch_metadata(self, project, renderer) -> RegistryPackageMetadata:
|
||||
dct = registry.package_version(self.package, self.version)
|
||||
return RegistryPackageMetadata.from_dict(dct)
|
||||
|
||||
def install(self, project, renderer):
|
||||
metadata = self.fetch_metadata(project, renderer)
|
||||
|
||||
tar_name = '{}.{}.tar.gz'.format(self.package, self.version)
|
||||
tar_path = os.path.realpath(
|
||||
os.path.join(get_downloads_path(), tar_name)
|
||||
)
|
||||
system.make_directory(os.path.dirname(tar_path))
|
||||
|
||||
download_url = metadata.downloads.tarball
|
||||
system.download(download_url, tar_path)
|
||||
deps_path = project.modules_path
|
||||
package_name = self.get_project_name(project, renderer)
|
||||
system.untar_package(tar_path, deps_path, package_name)
|
||||
|
||||
|
||||
class RegistryUnpinnedPackage(
|
||||
RegistryPackageMixin, UnpinnedPackage[RegistryPinnedPackage]
|
||||
):
|
||||
def __init__(
|
||||
self, package: str, versions: List[semver.VersionSpecifier]
|
||||
) -> None:
|
||||
super().__init__(package)
|
||||
self.versions = versions
|
||||
|
||||
def _check_in_index(self):
|
||||
index = registry.index_cached()
|
||||
if self.package not in index:
|
||||
package_not_found(self.package)
|
||||
|
||||
@classmethod
|
||||
def from_contract(
|
||||
cls, contract: RegistryPackage
|
||||
) -> 'RegistryUnpinnedPackage':
|
||||
raw_version = contract.get_versions()
|
||||
|
||||
versions = [
|
||||
semver.VersionSpecifier.from_version_string(v)
|
||||
for v in raw_version
|
||||
]
|
||||
return cls(package=contract.package, versions=versions)
|
||||
|
||||
def incorporate(
|
||||
self, other: 'RegistryUnpinnedPackage'
|
||||
) -> 'RegistryUnpinnedPackage':
|
||||
return RegistryUnpinnedPackage(
|
||||
package=self.package,
|
||||
versions=self.versions + other.versions,
|
||||
)
|
||||
|
||||
def resolved(self) -> RegistryPinnedPackage:
|
||||
self._check_in_index()
|
||||
try:
|
||||
range_ = semver.reduce_versions(*self.versions)
|
||||
except VersionsNotCompatibleException as e:
|
||||
new_msg = ('Version error for package {}: {}'
|
||||
.format(self.name, e))
|
||||
raise DependencyException(new_msg) from e
|
||||
|
||||
available = registry.get_available_versions(self.package)
|
||||
|
||||
# for now, pick a version and then recurse. later on,
|
||||
# we'll probably want to traverse multiple options
|
||||
# so we can match packages. not going to make a difference
|
||||
# right now.
|
||||
target = semver.resolve_to_specific_version(range_, available)
|
||||
if not target:
|
||||
package_version_not_found(self.package, range_, available)
|
||||
return RegistryPinnedPackage(package=self.package, version=target)
|
||||
143
core/dbt/deps/resolver.py
Normal file
143
core/dbt/deps/resolver.py
Normal file
@@ -0,0 +1,143 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, NoReturn, Union, Type, Iterator, Set
|
||||
|
||||
from dbt.exceptions import raise_dependency_error, InternalException
|
||||
|
||||
from dbt.context.target import generate_target_context
|
||||
from dbt.config import Project, RuntimeConfig
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer
|
||||
from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage
|
||||
from dbt.deps.local import LocalUnpinnedPackage
|
||||
from dbt.deps.git import GitUnpinnedPackage
|
||||
from dbt.deps.registry import RegistryUnpinnedPackage
|
||||
|
||||
from dbt.contracts.project import (
|
||||
LocalPackage,
|
||||
GitPackage,
|
||||
RegistryPackage,
|
||||
)
|
||||
|
||||
PackageContract = Union[LocalPackage, GitPackage, RegistryPackage]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageListing:
|
||||
packages: Dict[str, UnpinnedPackage] = field(default_factory=dict)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.packages)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.packages)
|
||||
|
||||
def _pick_key(self, key: BasePackage) -> str:
|
||||
for name in key.all_names():
|
||||
if name in self.packages:
|
||||
return name
|
||||
return key.name
|
||||
|
||||
def __contains__(self, key: BasePackage):
|
||||
for name in key.all_names():
|
||||
if name in self.packages:
|
||||
return True
|
||||
|
||||
def __getitem__(self, key: BasePackage):
|
||||
key_str: str = self._pick_key(key)
|
||||
return self.packages[key_str]
|
||||
|
||||
def __setitem__(self, key: BasePackage, value):
|
||||
key_str: str = self._pick_key(key)
|
||||
self.packages[key_str] = value
|
||||
|
||||
def _mismatched_types(
|
||||
self, old: UnpinnedPackage, new: UnpinnedPackage
|
||||
) -> NoReturn:
|
||||
raise_dependency_error(
|
||||
f'Cannot incorporate {new} ({new.__class__.__name__}) in {old} '
|
||||
f'({old.__class__.__name__}): mismatched types'
|
||||
)
|
||||
|
||||
def incorporate(self, package: UnpinnedPackage):
|
||||
key: str = self._pick_key(package)
|
||||
if key in self.packages:
|
||||
existing: UnpinnedPackage = self.packages[key]
|
||||
if not isinstance(existing, type(package)):
|
||||
self._mismatched_types(existing, package)
|
||||
self.packages[key] = existing.incorporate(package)
|
||||
else:
|
||||
self.packages[key] = package
|
||||
|
||||
def update_from(self, src: List[PackageContract]) -> None:
|
||||
pkg: UnpinnedPackage
|
||||
for contract in src:
|
||||
if isinstance(contract, LocalPackage):
|
||||
pkg = LocalUnpinnedPackage.from_contract(contract)
|
||||
elif isinstance(contract, GitPackage):
|
||||
pkg = GitUnpinnedPackage.from_contract(contract)
|
||||
elif isinstance(contract, RegistryPackage):
|
||||
pkg = RegistryUnpinnedPackage.from_contract(contract)
|
||||
else:
|
||||
raise InternalException(
|
||||
'Invalid package type {}'.format(type(contract))
|
||||
)
|
||||
self.incorporate(pkg)
|
||||
|
||||
@classmethod
|
||||
def from_contracts(
|
||||
cls: Type['PackageListing'], src: List[PackageContract]
|
||||
) -> 'PackageListing':
|
||||
self = cls({})
|
||||
self.update_from(src)
|
||||
return self
|
||||
|
||||
def resolved(self) -> List[PinnedPackage]:
|
||||
return [p.resolved() for p in self.packages.values()]
|
||||
|
||||
def __iter__(self) -> Iterator[UnpinnedPackage]:
|
||||
return iter(self.packages.values())
|
||||
|
||||
|
||||
def _check_for_duplicate_project_names(
|
||||
final_deps: List[PinnedPackage],
|
||||
config: Project,
|
||||
renderer: DbtProjectYamlRenderer,
|
||||
):
|
||||
seen: Set[str] = set()
|
||||
for package in final_deps:
|
||||
project_name = package.get_project_name(config, renderer)
|
||||
if project_name in seen:
|
||||
raise_dependency_error(
|
||||
f'Found duplicate project "{project_name}". This occurs when '
|
||||
'a dependency has the same project name as some other '
|
||||
'dependency.'
|
||||
)
|
||||
elif project_name == config.project_name:
|
||||
raise_dependency_error(
|
||||
'Found a dependency with the same name as the root project '
|
||||
f'"{project_name}". Package names must be unique in a project.'
|
||||
' Please rename one of these packages.'
|
||||
)
|
||||
seen.add(project_name)
|
||||
|
||||
|
||||
def resolve_packages(
|
||||
packages: List[PackageContract], config: RuntimeConfig
|
||||
) -> List[PinnedPackage]:
|
||||
pending = PackageListing.from_contracts(packages)
|
||||
final = PackageListing()
|
||||
|
||||
ctx = generate_target_context(config, config.cli_vars)
|
||||
renderer = DbtProjectYamlRenderer(ctx)
|
||||
|
||||
while pending:
|
||||
next_pending = PackageListing()
|
||||
# resolve the dependency in question
|
||||
for package in pending:
|
||||
final.incorporate(package)
|
||||
target = final[package].resolved().fetch_metadata(config, renderer)
|
||||
next_pending.update_from(target.packages)
|
||||
pending = next_pending
|
||||
|
||||
resolved = final.resolved()
|
||||
_check_for_duplicate_project_names(resolved, config, renderer)
|
||||
return resolved
|
||||
1017
core/dbt/exceptions.py
Normal file
1017
core/dbt/exceptions.py
Normal file
File diff suppressed because it is too large
Load Diff
96
core/dbt/flags.py
Normal file
96
core/dbt/flags.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import os
|
||||
import multiprocessing
|
||||
if os.name != 'nt':
|
||||
# https://bugs.python.org/issue41567
|
||||
import multiprocessing.popen_spawn_posix # type: ignore
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
# initially all flags are set to None, the on-load call of reset() will set
|
||||
# them for their first time.
|
||||
STRICT_MODE = None
|
||||
FULL_REFRESH = None
|
||||
USE_CACHE = None
|
||||
WARN_ERROR = None
|
||||
TEST_NEW_PARSER = None
|
||||
WRITE_JSON = None
|
||||
PARTIAL_PARSE = None
|
||||
USE_COLORS = None
|
||||
|
||||
|
||||
def env_set_truthy(key: str) -> Optional[str]:
|
||||
"""Return the value if it was set to a "truthy" string value, or None
|
||||
otherwise.
|
||||
"""
|
||||
value = os.getenv(key)
|
||||
if not value or value.lower() in ('0', 'false', 'f'):
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def env_set_path(key: str) -> Optional[Path]:
|
||||
value = os.getenv(key)
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return Path(value)
|
||||
|
||||
|
||||
SINGLE_THREADED_WEBSERVER = env_set_truthy('DBT_SINGLE_THREADED_WEBSERVER')
|
||||
SINGLE_THREADED_HANDLER = env_set_truthy('DBT_SINGLE_THREADED_HANDLER')
|
||||
MACRO_DEBUGGING = env_set_truthy('DBT_MACRO_DEBUGGING')
|
||||
DEFER_MODE = env_set_truthy('DBT_DEFER_TO_STATE')
|
||||
ARTIFACT_STATE_PATH = env_set_path('DBT_ARTIFACT_STATE_PATH')
|
||||
|
||||
|
||||
def _get_context():
|
||||
# TODO: change this back to use fork() on linux when we have made that safe
|
||||
return multiprocessing.get_context('spawn')
|
||||
|
||||
|
||||
MP_CONTEXT = _get_context()
|
||||
|
||||
|
||||
def reset():
|
||||
global STRICT_MODE, FULL_REFRESH, USE_CACHE, WARN_ERROR, TEST_NEW_PARSER, \
|
||||
WRITE_JSON, PARTIAL_PARSE, MP_CONTEXT, USE_COLORS
|
||||
|
||||
STRICT_MODE = False
|
||||
FULL_REFRESH = False
|
||||
USE_CACHE = True
|
||||
WARN_ERROR = False
|
||||
TEST_NEW_PARSER = False
|
||||
WRITE_JSON = True
|
||||
PARTIAL_PARSE = False
|
||||
MP_CONTEXT = _get_context()
|
||||
USE_COLORS = True
|
||||
|
||||
|
||||
def set_from_args(args):
|
||||
global STRICT_MODE, FULL_REFRESH, USE_CACHE, WARN_ERROR, TEST_NEW_PARSER, \
|
||||
WRITE_JSON, PARTIAL_PARSE, MP_CONTEXT, USE_COLORS
|
||||
|
||||
USE_CACHE = getattr(args, 'use_cache', USE_CACHE)
|
||||
|
||||
FULL_REFRESH = getattr(args, 'full_refresh', FULL_REFRESH)
|
||||
STRICT_MODE = getattr(args, 'strict', STRICT_MODE)
|
||||
WARN_ERROR = (
|
||||
STRICT_MODE or
|
||||
getattr(args, 'warn_error', STRICT_MODE or WARN_ERROR)
|
||||
)
|
||||
|
||||
TEST_NEW_PARSER = getattr(args, 'test_new_parser', TEST_NEW_PARSER)
|
||||
WRITE_JSON = getattr(args, 'write_json', WRITE_JSON)
|
||||
PARTIAL_PARSE = getattr(args, 'partial_parse', None)
|
||||
MP_CONTEXT = _get_context()
|
||||
|
||||
# The use_colors attribute will always have a value because it is assigned
|
||||
# None by default from the add_mutually_exclusive_group function
|
||||
use_colors_override = getattr(args, 'use_colors')
|
||||
|
||||
if use_colors_override is not None:
|
||||
USE_COLORS = use_colors_override
|
||||
|
||||
|
||||
# initialize everything to the defaults on module load
|
||||
reset()
|
||||
18
core/dbt/graph/__init__.py
Normal file
18
core/dbt/graph/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from .selector_spec import ( # noqa: F401
|
||||
SelectionUnion,
|
||||
SelectionSpec,
|
||||
SelectionIntersection,
|
||||
SelectionDifference,
|
||||
SelectionCriteria,
|
||||
)
|
||||
from .selector import ( # noqa: F401
|
||||
ResourceTypeSelector,
|
||||
NodeSelector,
|
||||
)
|
||||
from .cli import ( # noqa: F401
|
||||
parse_difference,
|
||||
parse_test_selectors,
|
||||
parse_from_selectors_definition,
|
||||
)
|
||||
from .queue import GraphQueue # noqa: F401
|
||||
from .graph import Graph, UniqueId # noqa: F401
|
||||
281
core/dbt/graph/cli.py
Normal file
281
core/dbt/graph/cli.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# special support for CLI argument parsing.
|
||||
import itertools
|
||||
import yaml
|
||||
|
||||
from typing import (
|
||||
Dict, List, Optional, Tuple, Any, Union
|
||||
)
|
||||
|
||||
from dbt.contracts.selection import SelectorDefinition, SelectorFile
|
||||
from dbt.exceptions import InternalException, ValidationException
|
||||
|
||||
from .selector_spec import (
|
||||
SelectionUnion,
|
||||
SelectionSpec,
|
||||
SelectionIntersection,
|
||||
SelectionDifference,
|
||||
SelectionCriteria,
|
||||
)
|
||||
|
||||
INTERSECTION_DELIMITER = ','
|
||||
|
||||
DEFAULT_INCLUDES: List[str] = ['fqn:*', 'source:*', 'exposure:*']
|
||||
DEFAULT_EXCLUDES: List[str] = []
|
||||
DATA_TEST_SELECTOR: str = 'test_type:data'
|
||||
SCHEMA_TEST_SELECTOR: str = 'test_type:schema'
|
||||
|
||||
|
||||
def parse_union(
|
||||
components: List[str], expect_exists: bool
|
||||
) -> SelectionUnion:
|
||||
# turn ['a b', 'c'] -> ['a', 'b', 'c']
|
||||
raw_specs = itertools.chain.from_iterable(
|
||||
r.split(' ') for r in components
|
||||
)
|
||||
union_components: List[SelectionSpec] = []
|
||||
|
||||
# ['a', 'b', 'c,d'] -> union('a', 'b', intersection('c', 'd'))
|
||||
for raw_spec in raw_specs:
|
||||
intersection_components: List[SelectionSpec] = [
|
||||
SelectionCriteria.from_single_spec(part)
|
||||
for part in raw_spec.split(INTERSECTION_DELIMITER)
|
||||
]
|
||||
union_components.append(SelectionIntersection(
|
||||
components=intersection_components,
|
||||
expect_exists=expect_exists,
|
||||
raw=raw_spec,
|
||||
))
|
||||
|
||||
return SelectionUnion(
|
||||
components=union_components,
|
||||
expect_exists=False,
|
||||
raw=components,
|
||||
)
|
||||
|
||||
|
||||
def parse_union_from_default(
|
||||
raw: Optional[List[str]], default: List[str]
|
||||
) -> SelectionUnion:
|
||||
components: List[str]
|
||||
expect_exists: bool
|
||||
if raw is None:
|
||||
return parse_union(components=default, expect_exists=False)
|
||||
else:
|
||||
return parse_union(components=raw, expect_exists=True)
|
||||
|
||||
|
||||
def parse_difference(
|
||||
include: Optional[List[str]], exclude: Optional[List[str]]
|
||||
) -> SelectionDifference:
|
||||
included = parse_union_from_default(include, DEFAULT_INCLUDES)
|
||||
excluded = parse_union_from_default(exclude, DEFAULT_EXCLUDES)
|
||||
return SelectionDifference(components=[included, excluded])
|
||||
|
||||
|
||||
def parse_test_selectors(
|
||||
data: bool, schema: bool, base: SelectionSpec
|
||||
) -> SelectionSpec:
|
||||
union_components = []
|
||||
|
||||
if data:
|
||||
union_components.append(
|
||||
SelectionCriteria.from_single_spec(DATA_TEST_SELECTOR)
|
||||
)
|
||||
if schema:
|
||||
union_components.append(
|
||||
SelectionCriteria.from_single_spec(SCHEMA_TEST_SELECTOR)
|
||||
)
|
||||
|
||||
intersect_with: SelectionSpec
|
||||
if not union_components:
|
||||
return base
|
||||
elif len(union_components) == 1:
|
||||
intersect_with = union_components[0]
|
||||
else: # data and schema tests
|
||||
intersect_with = SelectionUnion(
|
||||
components=union_components,
|
||||
expect_exists=True,
|
||||
raw=[DATA_TEST_SELECTOR, SCHEMA_TEST_SELECTOR],
|
||||
)
|
||||
|
||||
return SelectionIntersection(
|
||||
components=[base, intersect_with], expect_exists=True
|
||||
)
|
||||
|
||||
|
||||
RawDefinition = Union[str, Dict[str, Any]]
|
||||
|
||||
|
||||
def _get_list_dicts(
|
||||
dct: Dict[str, Any], key: str
|
||||
) -> List[RawDefinition]:
|
||||
result: List[RawDefinition] = []
|
||||
if key not in dct:
|
||||
raise InternalException(
|
||||
f'Expected to find key {key} in dict, only found {list(dct)}'
|
||||
)
|
||||
values = dct[key]
|
||||
if not isinstance(values, list):
|
||||
raise ValidationException(
|
||||
f'Invalid value for key "{key}". Expected a list.'
|
||||
)
|
||||
for value in values:
|
||||
if isinstance(value, dict):
|
||||
for value_key in value:
|
||||
if not isinstance(value_key, str):
|
||||
raise ValidationException(
|
||||
f'Expected all keys to "{key}" dict to be strings, '
|
||||
f'but "{value_key}" is a "{type(value_key)}"'
|
||||
)
|
||||
result.append(value)
|
||||
elif isinstance(value, str):
|
||||
result.append(value)
|
||||
else:
|
||||
raise ValidationException(
|
||||
f'Invalid value type {type(value)} in key "{key}", expected '
|
||||
f'dict or str (value: {value}).'
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _parse_exclusions(definition) -> Optional[SelectionSpec]:
|
||||
exclusions = _get_list_dicts(definition, 'exclude')
|
||||
parsed_exclusions = [
|
||||
parse_from_definition(excl) for excl in exclusions
|
||||
]
|
||||
if len(parsed_exclusions) == 1:
|
||||
return parsed_exclusions[0]
|
||||
elif len(parsed_exclusions) > 1:
|
||||
return SelectionUnion(
|
||||
components=parsed_exclusions,
|
||||
raw=exclusions
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _parse_include_exclude_subdefs(
|
||||
definitions: List[RawDefinition]
|
||||
) -> Tuple[List[SelectionSpec], Optional[SelectionSpec]]:
|
||||
include_parts: List[SelectionSpec] = []
|
||||
diff_arg: Optional[SelectionSpec] = None
|
||||
|
||||
for definition in definitions:
|
||||
if isinstance(definition, dict) and 'exclude' in definition:
|
||||
# do not allow multiple exclude: defs at the same level
|
||||
if diff_arg is not None:
|
||||
yaml_sel_cfg = yaml.dump(definition)
|
||||
raise ValidationException(
|
||||
f"You cannot provide multiple exclude arguments to the "
|
||||
f"same selector set operator:\n{yaml_sel_cfg}"
|
||||
)
|
||||
diff_arg = _parse_exclusions(definition)
|
||||
else:
|
||||
include_parts.append(parse_from_definition(definition))
|
||||
|
||||
return (include_parts, diff_arg)
|
||||
|
||||
|
||||
def parse_union_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
union_def_parts = _get_list_dicts(definition, 'union')
|
||||
include, exclude = _parse_include_exclude_subdefs(union_def_parts)
|
||||
|
||||
union = SelectionUnion(components=include)
|
||||
|
||||
if exclude is None:
|
||||
union.raw = definition
|
||||
return union
|
||||
else:
|
||||
return SelectionDifference(
|
||||
components=[union, exclude],
|
||||
raw=definition
|
||||
)
|
||||
|
||||
|
||||
def parse_intersection_definition(
|
||||
definition: Dict[str, Any]
|
||||
) -> SelectionSpec:
|
||||
intersection_def_parts = _get_list_dicts(definition, 'intersection')
|
||||
include, exclude = _parse_include_exclude_subdefs(intersection_def_parts)
|
||||
intersection = SelectionIntersection(components=include)
|
||||
|
||||
if exclude is None:
|
||||
intersection.raw = definition
|
||||
return intersection
|
||||
else:
|
||||
return SelectionDifference(
|
||||
components=[intersection, exclude],
|
||||
raw=definition
|
||||
)
|
||||
|
||||
|
||||
def parse_dict_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
diff_arg: Optional[SelectionSpec] = None
|
||||
if len(definition) == 1:
|
||||
key = list(definition)[0]
|
||||
value = definition[key]
|
||||
if not isinstance(key, str):
|
||||
raise ValidationException(
|
||||
f'Expected definition key to be a "str", got one of type '
|
||||
f'"{type(key)}" ({key})'
|
||||
)
|
||||
dct = {
|
||||
'method': key,
|
||||
'value': value,
|
||||
}
|
||||
elif 'method' in definition and 'value' in definition:
|
||||
dct = definition
|
||||
if 'exclude' in definition:
|
||||
diff_arg = _parse_exclusions(definition)
|
||||
dct = {k: v for k, v in dct.items() if k != 'exclude'}
|
||||
else:
|
||||
raise ValidationException(
|
||||
f'Expected either 1 key or else "method" '
|
||||
f'and "value" keys, but got {list(definition)}'
|
||||
)
|
||||
|
||||
# if key isn't a valid method name, this will raise
|
||||
base = SelectionCriteria.from_dict(definition, dct)
|
||||
if diff_arg is None:
|
||||
return base
|
||||
else:
|
||||
return SelectionDifference(components=[base, diff_arg])
|
||||
|
||||
|
||||
def parse_from_definition(
|
||||
definition: RawDefinition, rootlevel=False
|
||||
) -> SelectionSpec:
|
||||
|
||||
if (isinstance(definition, dict) and
|
||||
('union' in definition or 'intersection' in definition) and
|
||||
rootlevel and len(definition) > 1):
|
||||
keys = ",".join(definition.keys())
|
||||
raise ValidationException(
|
||||
f"Only a single 'union' or 'intersection' key is allowed "
|
||||
f"in a root level selector definition; found {keys}."
|
||||
)
|
||||
if isinstance(definition, str):
|
||||
return SelectionCriteria.from_single_spec(definition)
|
||||
elif 'union' in definition:
|
||||
return parse_union_definition(definition)
|
||||
elif 'intersection' in definition:
|
||||
return parse_intersection_definition(definition)
|
||||
elif isinstance(definition, dict):
|
||||
return parse_dict_definition(definition)
|
||||
else:
|
||||
raise ValidationException(
|
||||
f'Expected to find union, intersection, str or dict, instead '
|
||||
f'found {type(definition)}: {definition}'
|
||||
)
|
||||
|
||||
|
||||
def parse_from_selectors_definition(
|
||||
source: SelectorFile
|
||||
) -> Dict[str, SelectionSpec]:
|
||||
result: Dict[str, SelectionSpec] = {}
|
||||
selector: SelectorDefinition
|
||||
for selector in source.selectors:
|
||||
result[selector.name] = parse_from_definition(selector.definition,
|
||||
rootlevel=True)
|
||||
return result
|
||||
105
core/dbt/graph/graph.py
Normal file
105
core/dbt/graph/graph.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from typing import (
|
||||
Set, Iterable, Iterator, Optional, NewType
|
||||
)
|
||||
import networkx as nx # type: ignore
|
||||
|
||||
from dbt.exceptions import InternalException
|
||||
|
||||
UniqueId = NewType('UniqueId', str)
|
||||
|
||||
|
||||
class Graph:
|
||||
"""A wrapper around the networkx graph that understands SelectionCriteria
|
||||
and how they interact with the graph.
|
||||
"""
|
||||
def __init__(self, graph):
|
||||
self.graph = graph
|
||||
|
||||
def nodes(self) -> Set[UniqueId]:
|
||||
return set(self.graph.nodes())
|
||||
|
||||
def edges(self):
|
||||
return self.graph.edges()
|
||||
|
||||
def __iter__(self) -> Iterator[UniqueId]:
|
||||
return iter(self.graph.nodes())
|
||||
|
||||
def ancestors(
|
||||
self, node: UniqueId, max_depth: Optional[int] = None
|
||||
) -> Set[UniqueId]:
|
||||
"""Returns all nodes having a path to `node` in `graph`"""
|
||||
if not self.graph.has_node(node):
|
||||
raise InternalException(f'Node {node} not found in the graph!')
|
||||
with nx.utils.reversed(self.graph):
|
||||
anc = nx.single_source_shortest_path_length(G=self.graph,
|
||||
source=node,
|
||||
cutoff=max_depth)\
|
||||
.keys()
|
||||
return anc - {node}
|
||||
|
||||
def descendants(
|
||||
self, node: UniqueId, max_depth: Optional[int] = None
|
||||
) -> Set[UniqueId]:
|
||||
"""Returns all nodes reachable from `node` in `graph`"""
|
||||
if not self.graph.has_node(node):
|
||||
raise InternalException(f'Node {node} not found in the graph!')
|
||||
des = nx.single_source_shortest_path_length(G=self.graph,
|
||||
source=node,
|
||||
cutoff=max_depth)\
|
||||
.keys()
|
||||
return des - {node}
|
||||
|
||||
def select_childrens_parents(
|
||||
self, selected: Set[UniqueId]
|
||||
) -> Set[UniqueId]:
|
||||
ancestors_for = self.select_children(selected) | selected
|
||||
return self.select_parents(ancestors_for) | ancestors_for
|
||||
|
||||
def select_children(
|
||||
self, selected: Set[UniqueId], max_depth: Optional[int] = None
|
||||
) -> Set[UniqueId]:
|
||||
descendants: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
descendants.update(self.descendants(node, max_depth))
|
||||
return descendants
|
||||
|
||||
def select_parents(
|
||||
self, selected: Set[UniqueId], max_depth: Optional[int] = None
|
||||
) -> Set[UniqueId]:
|
||||
ancestors: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
ancestors.update(self.ancestors(node, max_depth))
|
||||
return ancestors
|
||||
|
||||
def select_successors(self, selected: Set[UniqueId]) -> Set[UniqueId]:
|
||||
successors: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
successors.update(self.graph.successors(node))
|
||||
return successors
|
||||
|
||||
def get_subset_graph(self, selected: Iterable[UniqueId]) -> 'Graph':
|
||||
"""Create and return a new graph that is a shallow copy of the graph,
|
||||
but with only the nodes in include_nodes. Transitive edges across
|
||||
removed nodes are preserved as explicit new edges.
|
||||
"""
|
||||
new_graph = nx.algorithms.transitive_closure(self.graph)
|
||||
|
||||
include_nodes = set(selected)
|
||||
|
||||
for node in self:
|
||||
if node not in include_nodes:
|
||||
new_graph.remove_node(node)
|
||||
|
||||
for node in include_nodes:
|
||||
if node not in new_graph:
|
||||
raise ValueError(
|
||||
"Couldn't find model '{}' -- does it exist or is "
|
||||
"it disabled?".format(node)
|
||||
)
|
||||
return Graph(new_graph)
|
||||
|
||||
def subgraph(self, nodes: Iterable[UniqueId]) -> 'Graph':
|
||||
return Graph(self.graph.subgraph(nodes))
|
||||
|
||||
def get_dependent_nodes(self, node: UniqueId):
|
||||
return nx.descendants(self.graph, node)
|
||||
181
core/dbt/graph/queue.py
Normal file
181
core/dbt/graph/queue.py
Normal file
@@ -0,0 +1,181 @@
|
||||
import threading
|
||||
from queue import PriorityQueue
|
||||
from typing import (
|
||||
Dict, Set, Optional
|
||||
)
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
|
||||
from .graph import UniqueId
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedExposure
|
||||
from dbt.contracts.graph.compiled import GraphMemberNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
|
||||
class GraphQueue:
|
||||
"""A fancy queue that is backed by the dependency graph.
|
||||
Note: this will mutate input!
|
||||
|
||||
This queue is thread-safe for `mark_done` calls, though you must ensure
|
||||
that separate threads do not call `.empty()` or `__len__()` and `.get()` at
|
||||
the same time, as there is an unlocked race!
|
||||
"""
|
||||
def __init__(
|
||||
self, graph: nx.DiGraph, manifest: Manifest, selected: Set[UniqueId]
|
||||
):
|
||||
self.graph = graph
|
||||
self.manifest = manifest
|
||||
self._selected = selected
|
||||
# store the queue as a priority queue.
|
||||
self.inner: PriorityQueue = PriorityQueue()
|
||||
# things that have been popped off the queue but not finished
|
||||
# and worker thread reservations
|
||||
self.in_progress: Set[UniqueId] = set()
|
||||
# things that are in the queue
|
||||
self.queued: Set[UniqueId] = set()
|
||||
# this lock controls most things
|
||||
self.lock = threading.Lock()
|
||||
# store the 'score' of each node as a number. Lower is higher priority.
|
||||
self._scores = self._calculate_scores()
|
||||
# populate the initial queue
|
||||
self._find_new_additions()
|
||||
# awaits after task end
|
||||
self.some_task_done = threading.Condition(self.lock)
|
||||
|
||||
def get_selected_nodes(self) -> Set[UniqueId]:
|
||||
return self._selected.copy()
|
||||
|
||||
def _include_in_cost(self, node_id: UniqueId) -> bool:
|
||||
node = self.manifest.expect(node_id)
|
||||
if node.resource_type != NodeType.Model:
|
||||
return False
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure
|
||||
assert not isinstance(node, (ParsedSourceDefinition, ParsedExposure))
|
||||
if node.is_ephemeral:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _calculate_scores(self) -> Dict[UniqueId, int]:
|
||||
"""Calculate the 'value' of each node in the graph based on how many
|
||||
blocking descendants it has. We use this score for the internal
|
||||
priority queue's ordering, so the quality of this metric is important.
|
||||
|
||||
The score is stored as a negative number because the internal
|
||||
PriorityQueue picks lowest values first.
|
||||
|
||||
We could do this in one pass over the graph instead of len(self.graph)
|
||||
passes but this is easy. For large graphs this may hurt performance.
|
||||
|
||||
This operates on the graph, so it would require a lock if called from
|
||||
outside __init__.
|
||||
|
||||
:return Dict[str, int]: The score dict, mapping unique IDs to integer
|
||||
scores. Lower scores are higher priority.
|
||||
"""
|
||||
scores = {}
|
||||
for node in self.graph.nodes():
|
||||
score = -1 * len([
|
||||
d for d in nx.descendants(self.graph, node)
|
||||
if self._include_in_cost(d)
|
||||
])
|
||||
scores[node] = score
|
||||
return scores
|
||||
|
||||
def get(
|
||||
self, block: bool = True, timeout: Optional[float] = None
|
||||
) -> GraphMemberNode:
|
||||
"""Get a node off the inner priority queue. By default, this blocks.
|
||||
|
||||
This takes the lock, but only for part of it.
|
||||
|
||||
:param block: If True, block until the inner queue has data
|
||||
:param timeout: If set, block for timeout seconds waiting for data.
|
||||
:return: The node as present in the manifest.
|
||||
|
||||
See `queue.PriorityQueue` for more information on `get()` behavior and
|
||||
exceptions.
|
||||
"""
|
||||
_, node_id = self.inner.get(block=block, timeout=timeout)
|
||||
with self.lock:
|
||||
self._mark_in_progress(node_id)
|
||||
return self.manifest.expect(node_id)
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""The length of the queue is the number of tasks left for the queue to
|
||||
give out, regardless of where they are. Incomplete tasks are not part
|
||||
of the length.
|
||||
|
||||
This takes the lock.
|
||||
"""
|
||||
with self.lock:
|
||||
return len(self.graph) - len(self.in_progress)
|
||||
|
||||
def empty(self) -> bool:
|
||||
"""The graph queue is 'empty' if it all remaining nodes in the graph
|
||||
are in progress.
|
||||
|
||||
This takes the lock.
|
||||
"""
|
||||
return len(self) == 0
|
||||
|
||||
def _already_known(self, node: UniqueId) -> bool:
|
||||
"""Decide if a node is already known (either handed out as a task, or
|
||||
in the queue).
|
||||
|
||||
Callers must hold the lock.
|
||||
|
||||
:param str node: The node ID to check
|
||||
:returns bool: If the node is in progress/queued.
|
||||
"""
|
||||
return node in self.in_progress or node in self.queued
|
||||
|
||||
def _find_new_additions(self) -> None:
|
||||
"""Find any nodes in the graph that need to be added to the internal
|
||||
queue and add them.
|
||||
|
||||
Callers must hold the lock.
|
||||
"""
|
||||
for node, in_degree in self.graph.in_degree():
|
||||
if not self._already_known(node) and in_degree == 0:
|
||||
self.inner.put((self._scores[node], node))
|
||||
self.queued.add(node)
|
||||
|
||||
def mark_done(self, node_id: UniqueId) -> None:
|
||||
"""Given a node's unique ID, mark it as done.
|
||||
|
||||
This method takes the lock.
|
||||
|
||||
:param str node_id: The node ID to mark as complete.
|
||||
"""
|
||||
with self.lock:
|
||||
self.in_progress.remove(node_id)
|
||||
self.graph.remove_node(node_id)
|
||||
self._find_new_additions()
|
||||
self.inner.task_done()
|
||||
self.some_task_done.notify_all()
|
||||
|
||||
def _mark_in_progress(self, node_id: UniqueId) -> None:
|
||||
"""Mark the node as 'in progress'.
|
||||
|
||||
Callers must hold the lock.
|
||||
|
||||
:param str node_id: The node ID to mark as in progress.
|
||||
"""
|
||||
self.queued.remove(node_id)
|
||||
self.in_progress.add(node_id)
|
||||
|
||||
def join(self) -> None:
|
||||
"""Join the queue. Blocks until all tasks are marked as done.
|
||||
|
||||
Make sure not to call this before the queue reports that it is empty.
|
||||
"""
|
||||
self.inner.join()
|
||||
|
||||
def wait_until_something_was_done(self) -> int:
|
||||
"""Block until a task is done, then return the number of unfinished
|
||||
tasks.
|
||||
"""
|
||||
with self.lock:
|
||||
self.some_task_done.wait()
|
||||
return self.inner.unfinished_tasks
|
||||
211
core/dbt/graph/selector.py
Normal file
211
core/dbt/graph/selector.py
Normal file
@@ -0,0 +1,211 @@
|
||||
|
||||
from typing import Set, List, Optional
|
||||
|
||||
from .graph import Graph, UniqueId
|
||||
from .queue import GraphQueue
|
||||
from .selector_methods import MethodManager
|
||||
from .selector_spec import SelectionCriteria, SelectionSpec
|
||||
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
InvalidSelectorException,
|
||||
warn_or_error,
|
||||
)
|
||||
from dbt.contracts.graph.compiled import GraphMemberNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.state import PreviousState
|
||||
|
||||
|
||||
def get_package_names(nodes):
|
||||
return set([node.split(".")[1] for node in nodes])
|
||||
|
||||
|
||||
def alert_non_existence(raw_spec, nodes):
|
||||
if len(nodes) == 0:
|
||||
warn_or_error(
|
||||
f"The selector '{str(raw_spec)}' does not match any nodes and will"
|
||||
f" be ignored"
|
||||
)
|
||||
|
||||
|
||||
class NodeSelector(MethodManager):
|
||||
"""The node selector is aware of the graph and manifest,
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
graph: Graph,
|
||||
manifest: Manifest,
|
||||
previous_state: Optional[PreviousState] = None,
|
||||
):
|
||||
super().__init__(manifest, previous_state)
|
||||
self.full_graph = graph
|
||||
|
||||
# build a subgraph containing only non-empty, enabled nodes and enabled
|
||||
# sources.
|
||||
graph_members = {
|
||||
unique_id for unique_id in self.full_graph.nodes()
|
||||
if self._is_graph_member(unique_id)
|
||||
}
|
||||
self.graph = self.full_graph.subgraph(graph_members)
|
||||
|
||||
def select_included(
|
||||
self, included_nodes: Set[UniqueId], spec: SelectionCriteria,
|
||||
) -> Set[UniqueId]:
|
||||
"""Select the explicitly included nodes, using the given spec. Return
|
||||
the selected set of unique IDs.
|
||||
"""
|
||||
method = self.get_method(spec.method, spec.method_arguments)
|
||||
return set(method.search(included_nodes, spec.value))
|
||||
|
||||
def get_nodes_from_criteria(
|
||||
self,
|
||||
spec: SelectionCriteria,
|
||||
) -> Set[UniqueId]:
|
||||
"""Get all nodes specified by the single selection criteria.
|
||||
|
||||
- collect the directly included nodes
|
||||
- find their specified relatives
|
||||
- perform any selector-specific expansion
|
||||
"""
|
||||
|
||||
nodes = self.graph.nodes()
|
||||
try:
|
||||
collected = self.select_included(nodes, spec)
|
||||
except InvalidSelectorException:
|
||||
valid_selectors = ", ".join(self.SELECTOR_METHODS)
|
||||
logger.info(
|
||||
f"The '{spec.method}' selector specified in {spec.raw} is "
|
||||
f"invalid. Must be one of [{valid_selectors}]"
|
||||
)
|
||||
return set()
|
||||
|
||||
extras = self.collect_specified_neighbors(spec, collected)
|
||||
result = self.expand_selection(collected | extras)
|
||||
return result
|
||||
|
||||
def collect_specified_neighbors(
|
||||
self, spec: SelectionCriteria, selected: Set[UniqueId]
|
||||
) -> Set[UniqueId]:
|
||||
"""Given the set of models selected by the explicit part of the
|
||||
selector (like "tag:foo"), apply the modifiers on the spec ("+"/"@").
|
||||
Return the set of additional nodes that should be collected (which may
|
||||
overlap with the selected set).
|
||||
"""
|
||||
additional: Set[UniqueId] = set()
|
||||
if spec.childrens_parents:
|
||||
additional.update(self.graph.select_childrens_parents(selected))
|
||||
|
||||
if spec.parents:
|
||||
depth = spec.parents_depth
|
||||
additional.update(self.graph.select_parents(selected, depth))
|
||||
|
||||
if spec.children:
|
||||
depth = spec.children_depth
|
||||
additional.update(self.graph.select_children(selected, depth))
|
||||
return additional
|
||||
|
||||
def select_nodes(self, spec: SelectionSpec) -> Set[UniqueId]:
|
||||
"""Select the nodes in the graph according to the spec.
|
||||
|
||||
If the spec is a composite spec (a union, difference, or intersection),
|
||||
recurse into its selections and combine them. If the spec is a concrete
|
||||
selection criteria, resolve that using the given graph.
|
||||
"""
|
||||
if isinstance(spec, SelectionCriteria):
|
||||
result = self.get_nodes_from_criteria(spec)
|
||||
else:
|
||||
node_selections = [
|
||||
self.select_nodes(component)
|
||||
for component in spec
|
||||
]
|
||||
result = spec.combined(node_selections)
|
||||
if spec.expect_exists:
|
||||
alert_non_existence(spec.raw, result)
|
||||
return result
|
||||
|
||||
def _is_graph_member(self, unique_id: UniqueId) -> bool:
|
||||
if unique_id in self.manifest.sources:
|
||||
source = self.manifest.sources[unique_id]
|
||||
return source.config.enabled
|
||||
elif unique_id in self.manifest.exposures:
|
||||
return True
|
||||
node = self.manifest.nodes[unique_id]
|
||||
return not node.empty and node.config.enabled
|
||||
|
||||
def node_is_match(self, node: GraphMemberNode) -> bool:
|
||||
"""Determine if a node is a match for the selector. Non-match nodes
|
||||
will be excluded from results during filtering.
|
||||
"""
|
||||
return True
|
||||
|
||||
def _is_match(self, unique_id: UniqueId) -> bool:
|
||||
node: GraphMemberNode
|
||||
if unique_id in self.manifest.nodes:
|
||||
node = self.manifest.nodes[unique_id]
|
||||
elif unique_id in self.manifest.sources:
|
||||
node = self.manifest.sources[unique_id]
|
||||
elif unique_id in self.manifest.exposures:
|
||||
node = self.manifest.exposures[unique_id]
|
||||
else:
|
||||
raise InternalException(
|
||||
f'Node {unique_id} not found in the manifest!'
|
||||
)
|
||||
return self.node_is_match(node)
|
||||
|
||||
def filter_selection(self, selected: Set[UniqueId]) -> Set[UniqueId]:
|
||||
"""Return the subset of selected nodes that is a match for this
|
||||
selector.
|
||||
"""
|
||||
return {
|
||||
unique_id for unique_id in selected if self._is_match(unique_id)
|
||||
}
|
||||
|
||||
def expand_selection(self, selected: Set[UniqueId]) -> Set[UniqueId]:
|
||||
"""Perform selector-specific expansion."""
|
||||
return selected
|
||||
|
||||
def get_selected(self, spec: SelectionSpec) -> Set[UniqueId]:
|
||||
"""get_selected runs trhough the node selection process:
|
||||
|
||||
- node selection. Based on the include/exclude sets, the set
|
||||
of matched unique IDs is returned
|
||||
- expand the graph at each leaf node, before combination
|
||||
- selectors might override this. for example, this is where
|
||||
tests are added
|
||||
- filtering:
|
||||
- selectors can filter the nodes after all of them have been
|
||||
selected
|
||||
"""
|
||||
selected_nodes = self.select_nodes(spec)
|
||||
filtered_nodes = self.filter_selection(selected_nodes)
|
||||
return filtered_nodes
|
||||
|
||||
def get_graph_queue(self, spec: SelectionSpec) -> GraphQueue:
|
||||
"""Returns a queue over nodes in the graph that tracks progress of
|
||||
dependecies.
|
||||
"""
|
||||
selected_nodes = self.get_selected(spec)
|
||||
new_graph = self.full_graph.get_subset_graph(selected_nodes)
|
||||
# should we give a way here for consumers to mutate the graph?
|
||||
return GraphQueue(new_graph.graph, self.manifest, selected_nodes)
|
||||
|
||||
|
||||
class ResourceTypeSelector(NodeSelector):
|
||||
def __init__(
|
||||
self,
|
||||
graph: Graph,
|
||||
manifest: Manifest,
|
||||
previous_state: Optional[PreviousState],
|
||||
resource_types: List[NodeType],
|
||||
):
|
||||
super().__init__(
|
||||
graph=graph,
|
||||
manifest=manifest,
|
||||
previous_state=previous_state,
|
||||
)
|
||||
self.resource_types: Set[NodeType] = set(resource_types)
|
||||
|
||||
def node_is_match(self, node):
|
||||
return node.resource_type in self.resource_types
|
||||
523
core/dbt/graph/selector_methods.py
Normal file
523
core/dbt/graph/selector_methods.py
Normal file
@@ -0,0 +1,523 @@
|
||||
import abc
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import Set, List, Dict, Iterator, Tuple, Any, Union, Type, Optional
|
||||
|
||||
from hologram.helpers import StrEnum
|
||||
|
||||
from .graph import UniqueId
|
||||
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledDataTestNode,
|
||||
CompiledSchemaTestNode,
|
||||
CompileResultNode,
|
||||
ManifestNode,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest, WritableManifest
|
||||
from dbt.contracts.graph.parsed import (
|
||||
HasTestMetadata,
|
||||
ParsedDataTestNode,
|
||||
ParsedExposure,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
from dbt.contracts.state import PreviousState
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
|
||||
SELECTOR_GLOB = '*'
|
||||
SELECTOR_DELIMITER = ':'
|
||||
|
||||
|
||||
class MethodName(StrEnum):
|
||||
FQN = 'fqn'
|
||||
Tag = 'tag'
|
||||
Source = 'source'
|
||||
Path = 'path'
|
||||
Package = 'package'
|
||||
Config = 'config'
|
||||
TestName = 'test_name'
|
||||
TestType = 'test_type'
|
||||
ResourceType = 'resource_type'
|
||||
State = 'state'
|
||||
Exposure = 'exposure'
|
||||
|
||||
|
||||
def is_selected_node(real_node, node_selector):
|
||||
for i, selector_part in enumerate(node_selector):
|
||||
|
||||
is_last = (i == len(node_selector) - 1)
|
||||
|
||||
# if we hit a GLOB, then this node is selected
|
||||
if selector_part == SELECTOR_GLOB:
|
||||
return True
|
||||
|
||||
# match package.node_name or package.dir.node_name
|
||||
elif is_last and selector_part == real_node[-1]:
|
||||
return True
|
||||
|
||||
elif len(real_node) <= i:
|
||||
return False
|
||||
|
||||
elif real_node[i] == selector_part:
|
||||
continue
|
||||
|
||||
else:
|
||||
return False
|
||||
|
||||
# if we get all the way down here, then the node is a match
|
||||
return True
|
||||
|
||||
|
||||
SelectorTarget = Union[ParsedSourceDefinition, ManifestNode, ParsedExposure]
|
||||
|
||||
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def __init__(
|
||||
self,
|
||||
manifest: Manifest,
|
||||
previous_state: Optional[PreviousState],
|
||||
arguments: List[str]
|
||||
):
|
||||
self.manifest: Manifest = manifest
|
||||
self.previous_state = previous_state
|
||||
self.arguments: List[str] = arguments
|
||||
|
||||
def parsed_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, ManifestNode]]:
|
||||
|
||||
for key, node in self.manifest.nodes.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, node
|
||||
|
||||
def source_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, ParsedSourceDefinition]]:
|
||||
|
||||
for key, source in self.manifest.sources.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, source
|
||||
|
||||
def exposure_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, ParsedExposure]]:
|
||||
|
||||
for key, exposure in self.manifest.exposures.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, exposure
|
||||
|
||||
def all_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, SelectorTarget]]:
|
||||
yield from chain(self.parsed_nodes(included_nodes),
|
||||
self.source_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes))
|
||||
|
||||
def configurable_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, CompileResultNode]]:
|
||||
yield from chain(self.parsed_nodes(included_nodes),
|
||||
self.source_nodes(included_nodes))
|
||||
|
||||
def non_source_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
) -> Iterator[Tuple[UniqueId, Union[ParsedExposure, ManifestNode]]]:
|
||||
yield from chain(self.parsed_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes))
|
||||
|
||||
@abc.abstractmethod
|
||||
def search(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
selector: str,
|
||||
) -> Iterator[UniqueId]:
|
||||
raise NotImplementedError('subclasses should implement this')
|
||||
|
||||
|
||||
class QualifiedNameSelectorMethod(SelectorMethod):
|
||||
def node_is_match(
|
||||
self,
|
||||
qualified_name: List[str],
|
||||
package_names: Set[str],
|
||||
fqn: List[str],
|
||||
) -> bool:
|
||||
"""Determine if a qualfied name matches an fqn, given the set of package
|
||||
names in the graph.
|
||||
|
||||
:param List[str] qualified_name: The components of the selector or node
|
||||
name, split on '.'.
|
||||
:param Set[str] package_names: The set of pacakge names in the graph.
|
||||
:param List[str] fqn: The node's fully qualified name in the graph.
|
||||
"""
|
||||
if len(qualified_name) == 1 and fqn[-1] == qualified_name[0]:
|
||||
return True
|
||||
|
||||
if qualified_name[0] in package_names:
|
||||
if is_selected_node(fqn, qualified_name):
|
||||
return True
|
||||
|
||||
for package_name in package_names:
|
||||
local_qualified_node_name = [package_name] + qualified_name
|
||||
if is_selected_node(fqn, local_qualified_node_name):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
"""Yield all nodes in the graph that match the selector.
|
||||
|
||||
:param str selector: The selector or node name
|
||||
"""
|
||||
qualified_name = selector.split(".")
|
||||
parsed_nodes = list(self.parsed_nodes(included_nodes))
|
||||
package_names = {n.package_name for _, n in parsed_nodes}
|
||||
for node, real_node in parsed_nodes:
|
||||
if self.node_is_match(
|
||||
qualified_name,
|
||||
package_names,
|
||||
real_node.fqn,
|
||||
):
|
||||
yield node
|
||||
|
||||
|
||||
class TagSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
""" yields nodes from included that have the specified tag """
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if selector in real_node.tags:
|
||||
yield node
|
||||
|
||||
|
||||
class SourceSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
"""yields nodes from included are the specified source."""
|
||||
parts = selector.split('.')
|
||||
target_package = SELECTOR_GLOB
|
||||
if len(parts) == 1:
|
||||
target_source, target_table = parts[0], None
|
||||
elif len(parts) == 2:
|
||||
target_source, target_table = parts
|
||||
elif len(parts) == 3:
|
||||
target_package, target_source, target_table = parts
|
||||
else: # len(parts) > 3 or len(parts) == 0
|
||||
msg = (
|
||||
'Invalid source selector value "{}". Sources must be of the '
|
||||
'form `${{source_name}}`, '
|
||||
'`${{source_name}}.${{target_name}}`, or '
|
||||
'`${{package_name}}.${{source_name}}.${{target_name}}'
|
||||
).format(selector)
|
||||
raise RuntimeException(msg)
|
||||
|
||||
for node, real_node in self.source_nodes(included_nodes):
|
||||
if target_package not in (real_node.package_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_source not in (real_node.source_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_table not in (None, real_node.name, SELECTOR_GLOB):
|
||||
continue
|
||||
|
||||
yield node
|
||||
|
||||
|
||||
class ExposureSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
parts = selector.split('.')
|
||||
target_package = SELECTOR_GLOB
|
||||
if len(parts) == 1:
|
||||
target_name = parts[0]
|
||||
elif len(parts) == 2:
|
||||
target_package, target_name = parts
|
||||
else:
|
||||
msg = (
|
||||
'Invalid exposure selector value "{}". Exposures must be of '
|
||||
'the form ${{exposure_name}} or '
|
||||
'${{exposure_package.exposure_name}}'
|
||||
).format(selector)
|
||||
raise RuntimeException(msg)
|
||||
|
||||
for node, real_node in self.exposure_nodes(included_nodes):
|
||||
if target_package not in (real_node.package_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_name not in (real_node.name, SELECTOR_GLOB):
|
||||
continue
|
||||
|
||||
yield node
|
||||
|
||||
|
||||
class PathSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
"""Yields nodes from inclucded that match the given path.
|
||||
|
||||
"""
|
||||
# use '.' and not 'root' for easy comparison
|
||||
root = Path.cwd()
|
||||
paths = set(p.relative_to(root) for p in root.glob(selector))
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if Path(real_node.root_path) != root:
|
||||
continue
|
||||
ofp = Path(real_node.original_file_path)
|
||||
if ofp in paths:
|
||||
yield node
|
||||
elif any(parent in paths for parent in ofp.parents):
|
||||
yield node
|
||||
|
||||
|
||||
class PackageSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
"""Yields nodes from included that have the specified package"""
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if real_node.package_name == selector:
|
||||
yield node
|
||||
|
||||
|
||||
def _getattr_descend(obj: Any, attrs: List[str]) -> Any:
|
||||
value = obj
|
||||
for attr in attrs:
|
||||
try:
|
||||
value = getattr(value, attr)
|
||||
except AttributeError:
|
||||
# if it implements getitem (dict, list, ...), use that. On failure,
|
||||
# raise an attribute error instead of the KeyError, TypeError, etc.
|
||||
# that arbitrary getitem calls might raise
|
||||
try:
|
||||
value = value[attr]
|
||||
except Exception as exc:
|
||||
raise AttributeError(
|
||||
f"'{type(value)}' object has no attribute '{attr}'"
|
||||
) from exc
|
||||
return value
|
||||
|
||||
|
||||
class CaseInsensitive(str):
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, str):
|
||||
return self.upper() == other.upper()
|
||||
else:
|
||||
return self.upper() == other
|
||||
|
||||
|
||||
class ConfigSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
selector: Any,
|
||||
) -> Iterator[UniqueId]:
|
||||
parts = self.arguments
|
||||
# special case: if the user wanted to compare test severity,
|
||||
# make the comparison case-insensitive
|
||||
if parts == ['severity']:
|
||||
selector = CaseInsensitive(selector)
|
||||
|
||||
# search sources is kind of useless now source configs only have
|
||||
# 'enabled', which you can't really filter on anyway, but maybe we'll
|
||||
# add more someday, so search them anyway.
|
||||
for node, real_node in self.configurable_nodes(included_nodes):
|
||||
try:
|
||||
value = _getattr_descend(real_node.config, parts)
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
if selector == value:
|
||||
yield node
|
||||
|
||||
|
||||
class ResourceTypeSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
try:
|
||||
resource_type = NodeType(selector)
|
||||
except ValueError as exc:
|
||||
raise RuntimeException(
|
||||
f'Invalid resource_type selector "{selector}"'
|
||||
) from exc
|
||||
for node, real_node in self.parsed_nodes(included_nodes):
|
||||
if real_node.resource_type == resource_type:
|
||||
yield node
|
||||
|
||||
|
||||
class TestNameSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
for node, real_node in self.parsed_nodes(included_nodes):
|
||||
if isinstance(real_node, HasTestMetadata):
|
||||
if real_node.test_metadata.name == selector:
|
||||
yield node
|
||||
|
||||
|
||||
class TestTypeSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
search_types: Tuple[Type, ...]
|
||||
if selector == 'schema':
|
||||
search_types = (ParsedSchemaTestNode, CompiledSchemaTestNode)
|
||||
elif selector == 'data':
|
||||
search_types = (ParsedDataTestNode, CompiledDataTestNode)
|
||||
else:
|
||||
raise RuntimeException(
|
||||
f'Invalid test type selector {selector}: expected "data" or '
|
||||
'"schema"'
|
||||
)
|
||||
|
||||
for node, real_node in self.parsed_nodes(included_nodes):
|
||||
if isinstance(real_node, search_types):
|
||||
yield node
|
||||
|
||||
|
||||
class StateSelectorMethod(SelectorMethod):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.macros_were_modified: Optional[List[str]] = None
|
||||
|
||||
def _macros_modified(self) -> List[str]:
|
||||
# we checked in the caller!
|
||||
if self.previous_state is None or self.previous_state.manifest is None:
|
||||
raise InternalException(
|
||||
'No comparison manifest in _macros_modified'
|
||||
)
|
||||
old_macros = self.previous_state.manifest.macros
|
||||
new_macros = self.manifest.macros
|
||||
|
||||
modified = []
|
||||
for uid, macro in new_macros.items():
|
||||
name = f'{macro.package_name}.{macro.name}'
|
||||
if uid in old_macros:
|
||||
old_macro = old_macros[uid]
|
||||
if macro.macro_sql != old_macro.macro_sql:
|
||||
modified.append(f'{name} changed')
|
||||
else:
|
||||
modified.append(f'{name} added')
|
||||
|
||||
for uid, macro in old_macros.items():
|
||||
if uid not in new_macros:
|
||||
modified.append(f'{macro.package_name}.{macro.name} removed')
|
||||
|
||||
return modified[:3]
|
||||
|
||||
def check_modified(
|
||||
self,
|
||||
old: Optional[SelectorTarget],
|
||||
new: SelectorTarget,
|
||||
) -> bool:
|
||||
# check if there are any changes in macros, if so, log a warning the
|
||||
# first time
|
||||
if self.macros_were_modified is None:
|
||||
self.macros_were_modified = self._macros_modified()
|
||||
if self.macros_were_modified:
|
||||
log_str = ', '.join(self.macros_were_modified)
|
||||
logger.warning(warning_tag(
|
||||
f'During a state comparison, dbt detected a change in '
|
||||
f'macros. This will not be marked as a modification. Some '
|
||||
f'macros: {log_str}'
|
||||
))
|
||||
|
||||
return not new.same_contents(old) # type: ignore
|
||||
|
||||
def check_new(
|
||||
self,
|
||||
old: Optional[SelectorTarget],
|
||||
new: SelectorTarget,
|
||||
) -> bool:
|
||||
return old is None
|
||||
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
if self.previous_state is None or self.previous_state.manifest is None:
|
||||
raise RuntimeException(
|
||||
'Got a state selector method, but no comparison manifest'
|
||||
)
|
||||
|
||||
state_checks = {
|
||||
'modified': self.check_modified,
|
||||
'new': self.check_new,
|
||||
}
|
||||
if selector in state_checks:
|
||||
checker = state_checks[selector]
|
||||
else:
|
||||
raise RuntimeException(
|
||||
f'Got an invalid selector "{selector}", expected one of '
|
||||
f'"{list(state_checks)}"'
|
||||
)
|
||||
|
||||
manifest: WritableManifest = self.previous_state.manifest
|
||||
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
previous_node: Optional[SelectorTarget] = None
|
||||
if node in manifest.nodes:
|
||||
previous_node = manifest.nodes[node]
|
||||
elif node in manifest.sources:
|
||||
previous_node = manifest.sources[node]
|
||||
elif node in manifest.exposures:
|
||||
previous_node = manifest.exposures[node]
|
||||
|
||||
if checker(previous_node, real_node):
|
||||
yield node
|
||||
|
||||
|
||||
class MethodManager:
|
||||
SELECTOR_METHODS: Dict[MethodName, Type[SelectorMethod]] = {
|
||||
MethodName.FQN: QualifiedNameSelectorMethod,
|
||||
MethodName.Tag: TagSelectorMethod,
|
||||
MethodName.Source: SourceSelectorMethod,
|
||||
MethodName.Path: PathSelectorMethod,
|
||||
MethodName.Package: PackageSelectorMethod,
|
||||
MethodName.Config: ConfigSelectorMethod,
|
||||
MethodName.TestName: TestNameSelectorMethod,
|
||||
MethodName.TestType: TestTypeSelectorMethod,
|
||||
MethodName.State: StateSelectorMethod,
|
||||
MethodName.Exposure: ExposureSelectorMethod,
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
manifest: Manifest,
|
||||
previous_state: Optional[PreviousState],
|
||||
):
|
||||
self.manifest = manifest
|
||||
self.previous_state = previous_state
|
||||
|
||||
def get_method(
|
||||
self, method: MethodName, method_arguments: List[str]
|
||||
) -> SelectorMethod:
|
||||
|
||||
if method not in self.SELECTOR_METHODS:
|
||||
raise InternalException(
|
||||
f'Method name "{method}" is a valid node selection '
|
||||
f'method name, but it is not handled'
|
||||
)
|
||||
cls: Type[SelectorMethod] = self.SELECTOR_METHODS[method]
|
||||
return cls(self.manifest, self.previous_state, method_arguments)
|
||||
208
core/dbt/graph/selector_spec.py
Normal file
208
core/dbt/graph/selector_spec.py
Normal file
@@ -0,0 +1,208 @@
|
||||
import os
|
||||
import re
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
from typing import (
|
||||
Set, Iterator, List, Optional, Dict, Union, Any, Iterable, Tuple
|
||||
)
|
||||
from .graph import UniqueId
|
||||
from .selector_methods import MethodName
|
||||
from dbt.exceptions import RuntimeException, InvalidSelectorException
|
||||
|
||||
|
||||
RAW_SELECTOR_PATTERN = re.compile(
|
||||
r'\A'
|
||||
r'(?P<childrens_parents>(\@))?'
|
||||
r'(?P<parents>((?P<parents_depth>(\d*))\+))?'
|
||||
r'((?P<method>([\w.]+)):)?(?P<value>(.*?))'
|
||||
r'(?P<children>(\+(?P<children_depth>(\d*))))?'
|
||||
r'\Z'
|
||||
)
|
||||
SELECTOR_METHOD_SEPARATOR = '.'
|
||||
|
||||
|
||||
def _probably_path(value: str):
|
||||
"""Decide if value is probably a path. Windows has two path separators, so
|
||||
we should check both sep ('\\') and altsep ('/') there.
|
||||
"""
|
||||
if os.path.sep in value:
|
||||
return True
|
||||
elif os.path.altsep is not None and os.path.altsep in value:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _match_to_int(match: Dict[str, str], key: str) -> Optional[int]:
|
||||
raw = match.get(key)
|
||||
# turn the empty string into None, too.
|
||||
if not raw:
|
||||
return None
|
||||
try:
|
||||
return int(raw)
|
||||
except ValueError as exc:
|
||||
raise RuntimeException(
|
||||
f'Invalid node spec - could not handle parent depth {raw}'
|
||||
) from exc
|
||||
|
||||
|
||||
SelectionSpec = Union[
|
||||
'SelectionCriteria',
|
||||
'SelectionIntersection',
|
||||
'SelectionDifference',
|
||||
'SelectionUnion',
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class SelectionCriteria:
|
||||
raw: Any
|
||||
method: MethodName
|
||||
method_arguments: List[str]
|
||||
value: Any
|
||||
childrens_parents: bool
|
||||
parents: bool
|
||||
parents_depth: Optional[int]
|
||||
children: bool
|
||||
children_depth: Optional[int]
|
||||
|
||||
def __post_init__(self):
|
||||
if self.children and self.childrens_parents:
|
||||
raise RuntimeException(
|
||||
f'Invalid node spec {self.raw} - "@" prefix and "+" suffix '
|
||||
'are incompatible'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def default_method(cls, value: str) -> MethodName:
|
||||
if _probably_path(value):
|
||||
return MethodName.Path
|
||||
else:
|
||||
return MethodName.FQN
|
||||
|
||||
@classmethod
|
||||
def parse_method(
|
||||
cls, groupdict: Dict[str, Any]
|
||||
) -> Tuple[MethodName, List[str]]:
|
||||
raw_method = groupdict.get('method')
|
||||
if raw_method is None:
|
||||
return cls.default_method(groupdict['value']), []
|
||||
|
||||
method_parts: List[str] = raw_method.split(SELECTOR_METHOD_SEPARATOR)
|
||||
try:
|
||||
method_name = MethodName(method_parts[0])
|
||||
except ValueError as exc:
|
||||
raise InvalidSelectorException(
|
||||
f"'{method_parts[0]}' is not a valid method name"
|
||||
) from exc
|
||||
|
||||
method_arguments: List[str] = method_parts[1:]
|
||||
|
||||
return method_name, method_arguments
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, raw: Any, dct: Dict[str, Any]) -> 'SelectionCriteria':
|
||||
if 'value' not in dct:
|
||||
raise RuntimeException(
|
||||
f'Invalid node spec "{raw}" - no search value!'
|
||||
)
|
||||
method_name, method_arguments = cls.parse_method(dct)
|
||||
|
||||
parents_depth = _match_to_int(dct, 'parents_depth')
|
||||
children_depth = _match_to_int(dct, 'children_depth')
|
||||
return cls(
|
||||
raw=raw,
|
||||
method=method_name,
|
||||
method_arguments=method_arguments,
|
||||
value=dct['value'],
|
||||
childrens_parents=bool(dct.get('childrens_parents')),
|
||||
parents=bool(dct.get('parents')),
|
||||
parents_depth=parents_depth,
|
||||
children=bool(dct.get('children')),
|
||||
children_depth=children_depth,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dict_from_single_spec(cls, raw: str):
|
||||
result = RAW_SELECTOR_PATTERN.match(raw)
|
||||
if result is None:
|
||||
return {'error': 'Invalid selector spec'}
|
||||
dct: Dict[str, Any] = result.groupdict()
|
||||
method_name, method_arguments = cls.parse_method(dct)
|
||||
meth_name = str(method_name)
|
||||
if method_arguments:
|
||||
meth_name = meth_name + '.' + '.'.join(method_arguments)
|
||||
dct['method'] = meth_name
|
||||
dct = {k: v for k, v in dct.items() if (v is not None and v != '')}
|
||||
if 'childrens_parents' in dct:
|
||||
dct['childrens_parents'] = bool(dct.get('childrens_parents'))
|
||||
if 'parents' in dct:
|
||||
dct['parents'] = bool(dct.get('parents'))
|
||||
if 'children' in dct:
|
||||
dct['children'] = bool(dct.get('children'))
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
def from_single_spec(cls, raw: str) -> 'SelectionCriteria':
|
||||
result = RAW_SELECTOR_PATTERN.match(raw)
|
||||
if result is None:
|
||||
# bad spec!
|
||||
raise RuntimeException(f'Invalid selector spec "{raw}"')
|
||||
|
||||
return cls.from_dict(raw, result.groupdict())
|
||||
|
||||
|
||||
class BaseSelectionGroup(Iterable[SelectionSpec], metaclass=ABCMeta):
|
||||
def __init__(
|
||||
self,
|
||||
components: Iterable[SelectionSpec],
|
||||
expect_exists: bool = False,
|
||||
raw: Any = None,
|
||||
):
|
||||
self.components: List[SelectionSpec] = list(components)
|
||||
self.expect_exists = expect_exists
|
||||
self.raw = raw
|
||||
|
||||
def __iter__(self) -> Iterator[SelectionSpec]:
|
||||
for component in self.components:
|
||||
yield component
|
||||
|
||||
@abstractmethod
|
||||
def combine_selections(
|
||||
self,
|
||||
selections: List[Set[UniqueId]],
|
||||
) -> Set[UniqueId]:
|
||||
raise NotImplementedError(
|
||||
'_combine_selections not implemented!'
|
||||
)
|
||||
|
||||
def combined(self, selections: List[Set[UniqueId]]) -> Set[UniqueId]:
|
||||
if not selections:
|
||||
return set()
|
||||
|
||||
return self.combine_selections(selections)
|
||||
|
||||
|
||||
class SelectionIntersection(BaseSelectionGroup):
|
||||
def combine_selections(
|
||||
self,
|
||||
selections: List[Set[UniqueId]],
|
||||
) -> Set[UniqueId]:
|
||||
return set.intersection(*selections)
|
||||
|
||||
|
||||
class SelectionDifference(BaseSelectionGroup):
|
||||
def combine_selections(
|
||||
self,
|
||||
selections: List[Set[UniqueId]],
|
||||
) -> Set[UniqueId]:
|
||||
return set.difference(*selections)
|
||||
|
||||
|
||||
class SelectionUnion(BaseSelectionGroup):
|
||||
def combine_selections(
|
||||
self,
|
||||
selections: List[Set[UniqueId]],
|
||||
) -> Set[UniqueId]:
|
||||
return set.union(*selections)
|
||||
82
core/dbt/helper_types.py
Normal file
82
core/dbt/helper_types.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# never name this package "types", or mypy will crash in ugly ways
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import NewType, Tuple, AbstractSet
|
||||
|
||||
from hologram import (
|
||||
FieldEncoder, JsonSchemaMixin, JsonDict, ValidationError
|
||||
)
|
||||
from hologram.helpers import StrEnum
|
||||
|
||||
Port = NewType('Port', int)
|
||||
|
||||
|
||||
class PortEncoder(FieldEncoder):
|
||||
@property
|
||||
def json_schema(self):
|
||||
return {'type': 'integer', 'minimum': 0, 'maximum': 65535}
|
||||
|
||||
|
||||
class TimeDeltaFieldEncoder(FieldEncoder[timedelta]):
|
||||
"""Encodes timedeltas to dictionaries"""
|
||||
|
||||
def to_wire(self, value: timedelta) -> float:
|
||||
return value.total_seconds()
|
||||
|
||||
def to_python(self, value) -> timedelta:
|
||||
if isinstance(value, timedelta):
|
||||
return value
|
||||
try:
|
||||
return timedelta(seconds=value)
|
||||
except TypeError:
|
||||
raise ValidationError(
|
||||
'cannot encode {} into timedelta'.format(value)
|
||||
) from None
|
||||
|
||||
@property
|
||||
def json_schema(self) -> JsonDict:
|
||||
return {'type': 'number'}
|
||||
|
||||
|
||||
class PathEncoder(FieldEncoder):
|
||||
def to_wire(self, value: Path) -> str:
|
||||
return str(value)
|
||||
|
||||
def to_python(self, value) -> Path:
|
||||
if isinstance(value, Path):
|
||||
return value
|
||||
try:
|
||||
return Path(value)
|
||||
except TypeError:
|
||||
raise ValidationError(
|
||||
'cannot encode {} into timedelta'.format(value)
|
||||
) from None
|
||||
|
||||
@property
|
||||
def json_schema(self) -> JsonDict:
|
||||
return {'type': 'string'}
|
||||
|
||||
|
||||
class NVEnum(StrEnum):
|
||||
novalue = 'novalue'
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, NVEnum)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NoValue(JsonSchemaMixin):
|
||||
"""Sometimes, you want a way to say none that isn't None"""
|
||||
novalue: NVEnum = NVEnum.novalue
|
||||
|
||||
|
||||
JsonSchemaMixin.register_field_encoders({
|
||||
Port: PortEncoder(),
|
||||
timedelta: TimeDeltaFieldEncoder(),
|
||||
Path: PathEncoder(),
|
||||
})
|
||||
|
||||
|
||||
FQNPath = Tuple[str, ...]
|
||||
PathSet = AbstractSet[FQNPath]
|
||||
21
core/dbt/hooks.py
Normal file
21
core/dbt/hooks.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from hologram.helpers import StrEnum
|
||||
import json
|
||||
|
||||
from typing import Union, Dict, Any
|
||||
|
||||
|
||||
class ModelHookType(StrEnum):
|
||||
PreHook = 'pre-hook'
|
||||
PostHook = 'post-hook'
|
||||
|
||||
|
||||
def get_hook_dict(source: Union[str, Dict[str, Any]]) -> Dict[str, Any]:
|
||||
"""From a source string-or-dict, get a dictionary that can be passed to
|
||||
Hook.from_dict
|
||||
"""
|
||||
if isinstance(source, dict):
|
||||
return source
|
||||
try:
|
||||
return json.loads(source)
|
||||
except ValueError:
|
||||
return {'sql': source}
|
||||
7
core/dbt/include/global_project/__init__.py
Normal file
7
core/dbt/include/global_project/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import os
|
||||
|
||||
PACKAGE_PATH = os.path.dirname(__file__)
|
||||
PROJECT_NAME = 'dbt'
|
||||
|
||||
DOCS_INDEX_FILE_PATH = os.path.normpath(
|
||||
os.path.join(PACKAGE_PATH, '..', "index.html"))
|
||||
@@ -1,5 +1,6 @@
|
||||
|
||||
config-version: 2
|
||||
name: dbt
|
||||
version: 1.0
|
||||
|
||||
docs-paths: ['docs']
|
||||
macro-paths: ["macros"]
|
||||
43
core/dbt/include/global_project/docs/overview.md
Normal file
43
core/dbt/include/global_project/docs/overview.md
Normal file
@@ -0,0 +1,43 @@
|
||||
|
||||
{% docs __overview__ %}
|
||||
|
||||
### Welcome!
|
||||
|
||||
Welcome to the auto-generated documentation for your dbt project!
|
||||
|
||||
### Navigation
|
||||
|
||||
You can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models
|
||||
in your project.
|
||||
|
||||
#### Project Tab
|
||||
The `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the
|
||||
models defined in your dbt project, as well as models imported from dbt packages.
|
||||
|
||||
#### Database Tab
|
||||
The `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view
|
||||
shows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown
|
||||
in this interface, as they do not exist in the database.
|
||||
|
||||
### Graph Exploration
|
||||
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
|
||||
|
||||
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`
|
||||
button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,
|
||||
or are built from, the model you're exploring.
|
||||
|
||||
Once expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the
|
||||
models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
|
||||
|
||||
Note that you can also right-click on models to interactively filter and explore the graph.
|
||||
|
||||
---
|
||||
|
||||
### More information
|
||||
|
||||
- [What is dbt](https://docs.getdbt.com/docs/overview)?
|
||||
- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)
|
||||
- [Installation](https://docs.getdbt.com/docs/installation)
|
||||
- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support.
|
||||
|
||||
{% enddocs %}
|
||||
290
core/dbt/include/global_project/macros/adapters/common.sql
Normal file
290
core/dbt/include/global_project/macros/adapters/common.sql
Normal file
@@ -0,0 +1,290 @@
|
||||
{% macro get_columns_in_query(select_sql) -%}
|
||||
{{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_columns_in_query(select_sql) %}
|
||||
{% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}
|
||||
select * from (
|
||||
{{ select_sql }}
|
||||
) as __dbt_sbq
|
||||
where false
|
||||
limit 0
|
||||
{% endcall %}
|
||||
|
||||
{{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_schema(relation) -%}
|
||||
{{ adapter.dispatch('create_schema')(relation) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__create_schema(relation) -%}
|
||||
{%- call statement('create_schema') -%}
|
||||
create schema if not exists {{ relation.without_identifier() }}
|
||||
{% endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro drop_schema(relation) -%}
|
||||
{{ adapter.dispatch('drop_schema')(relation) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__drop_schema(relation) -%}
|
||||
{%- call statement('drop_schema') -%}
|
||||
drop schema if exists {{ relation.without_identifier() }} cascade
|
||||
{% endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_table_as(temporary, relation, sql) -%}
|
||||
{{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__create_table_as(temporary, relation, sql) -%}
|
||||
{%- set sql_header = config.get('sql_header', none) -%}
|
||||
|
||||
{{ sql_header if sql_header is not none }}
|
||||
|
||||
create {% if temporary: -%}temporary{%- endif %} table
|
||||
{{ relation.include(database=(not temporary), schema=(not temporary)) }}
|
||||
as (
|
||||
{{ sql }}
|
||||
);
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_view_as(relation, sql) -%}
|
||||
{{ adapter.dispatch('create_view_as')(relation, sql) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__create_view_as(relation, sql) -%}
|
||||
{%- set sql_header = config.get('sql_header', none) -%}
|
||||
|
||||
{{ sql_header if sql_header is not none }}
|
||||
create view {{ relation }} as (
|
||||
{{ sql }}
|
||||
);
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro get_catalog(information_schema, schemas) -%}
|
||||
{{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_catalog(information_schema, schemas) -%}
|
||||
|
||||
{% set typename = adapter.type() %}
|
||||
{% set msg -%}
|
||||
get_catalog not implemented for {{ typename }}
|
||||
{%- endset %}
|
||||
|
||||
{{ exceptions.raise_compiler_error(msg) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro get_columns_in_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro sql_convert_columns_in_relation(table) -%}
|
||||
{% set columns = [] %}
|
||||
{% for row in table %}
|
||||
{% do columns.append(api.Column(*row)) %}
|
||||
{% endfor %}
|
||||
{{ return(columns) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_columns_in_relation(relation) -%}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alter_column_type(relation, column_name, new_column_type) -%}
|
||||
{{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
{% macro alter_column_comment(relation, column_dict) -%}
|
||||
{{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__alter_column_comment(relation, column_dict) -%}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'alter_column_comment macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alter_relation_comment(relation, relation_comment) -%}
|
||||
{{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__alter_relation_comment(relation, relation_comment) -%}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}
|
||||
{{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}
|
||||
{% if for_relation and config.persist_relation_docs() and model.description %}
|
||||
{% do run_query(alter_relation_comment(relation, model.description)) %}
|
||||
{% endif %}
|
||||
|
||||
{% if for_columns and config.persist_column_docs() and model.columns %}
|
||||
{% do run_query(alter_column_comment(relation, model.columns)) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
{% macro default__alter_column_type(relation, column_name, new_column_type) -%}
|
||||
{#
|
||||
1. Create a new column (w/ temp name and correct type)
|
||||
2. Copy data over to it
|
||||
3. Drop the existing column (cascade!)
|
||||
4. Rename the new column to existing column
|
||||
#}
|
||||
{%- set tmp_column = column_name + "__dbt_alter" -%}
|
||||
|
||||
{% call statement('alter_column_type') %}
|
||||
alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};
|
||||
update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};
|
||||
alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;
|
||||
alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}
|
||||
{% endcall %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro drop_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_relation')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro default__drop_relation(relation) -%}
|
||||
{% call statement('drop_relation', auto_begin=False) -%}
|
||||
drop {{ relation.type }} if exists {{ relation }} cascade
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro truncate_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('truncate_relation')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro default__truncate_relation(relation) -%}
|
||||
{% call statement('truncate_relation') -%}
|
||||
truncate table {{ relation }}
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro rename_relation(from_relation, to_relation) -%}
|
||||
{{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__rename_relation(from_relation, to_relation) -%}
|
||||
{% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}
|
||||
{% call statement('rename_relation') -%}
|
||||
alter table {{ from_relation }} rename to {{ target_name }}
|
||||
{%- endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro information_schema_name(database) %}
|
||||
{{ return(adapter.dispatch('information_schema_name')(database)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__information_schema_name(database) -%}
|
||||
{%- if database -%}
|
||||
{{ database }}.INFORMATION_SCHEMA
|
||||
{%- else -%}
|
||||
INFORMATION_SCHEMA
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro list_schemas(database) -%}
|
||||
{{ return(adapter.dispatch('list_schemas')(database)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__list_schemas(database) -%}
|
||||
{% set sql %}
|
||||
select distinct schema_name
|
||||
from {{ information_schema_name(database) }}.SCHEMATA
|
||||
where catalog_name ilike '{{ database }}'
|
||||
{% endset %}
|
||||
{{ return(run_query(sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro check_schema_exists(information_schema, schema) -%}
|
||||
{{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__check_schema_exists(information_schema, schema) -%}
|
||||
{% set sql -%}
|
||||
select count(*)
|
||||
from {{ information_schema.replace(information_schema_view='SCHEMATA') }}
|
||||
where catalog_name='{{ information_schema.database }}'
|
||||
and schema_name='{{ schema }}'
|
||||
{%- endset %}
|
||||
{{ return(run_query(sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro list_relations_without_caching(schema_relation) %}
|
||||
{{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro default__list_relations_without_caching(schema_relation) %}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro current_timestamp() -%}
|
||||
{{ adapter.dispatch('current_timestamp')() }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro default__current_timestamp() -%}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'current_timestamp macro not implemented for adapter '+adapter.type()) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro collect_freshness(source, loaded_at_field, filter) %}
|
||||
{{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro default__collect_freshness(source, loaded_at_field, filter) %}
|
||||
{% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}
|
||||
select
|
||||
max({{ loaded_at_field }}) as max_loaded_at,
|
||||
{{ current_timestamp() }} as snapshotted_at
|
||||
from {{ source }}
|
||||
{% if filter %}
|
||||
where {{ filter }}
|
||||
{% endif %}
|
||||
{% endcall %}
|
||||
{{ return(load_result('collect_freshness').table) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}
|
||||
{{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__make_temp_relation(base_relation, suffix) %}
|
||||
{% set tmp_identifier = base_relation.identifier ~ suffix %}
|
||||
{% set tmp_relation = base_relation.incorporate(
|
||||
path={"identifier": tmp_identifier}) -%}
|
||||
|
||||
{% do return(tmp_relation) %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_sql_header(config) -%}
|
||||
{{ config.set('sql_header', caller()) }}
|
||||
{%- endmacro %}
|
||||
|
||||
30
core/dbt/include/global_project/macros/core.sql
Normal file
30
core/dbt/include/global_project/macros/core.sql
Normal file
@@ -0,0 +1,30 @@
|
||||
{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}
|
||||
{%- if execute: -%}
|
||||
{%- set sql = caller() -%}
|
||||
|
||||
{%- if name == 'main' -%}
|
||||
{{ log('Writing runtime SQL for node "{}"'.format(model['unique_id'])) }}
|
||||
{{ write(sql) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}
|
||||
{%- if name is not none -%}
|
||||
{{ store_result(name, response=res, agate_table=table) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}
|
||||
{%- set sql = caller() -%}
|
||||
|
||||
{%- if name == 'main' -%}
|
||||
{{ log('Writing runtime SQL for node "{}"'.format(model['unique_id'])) }}
|
||||
{{ write(sql) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if name is not none -%}
|
||||
{{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- endmacro %}
|
||||
60
core/dbt/include/global_project/macros/etc/datetime.sql
Normal file
60
core/dbt/include/global_project/macros/etc/datetime.sql
Normal file
@@ -0,0 +1,60 @@
|
||||
|
||||
{% macro convert_datetime(date_str, date_fmt) %}
|
||||
|
||||
{% set error_msg -%}
|
||||
The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'
|
||||
{%- endset %}
|
||||
|
||||
{% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}
|
||||
{{ return(res) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt="%Y%m%d", out_fmt="%Y%m%d") %}
|
||||
{% set end_date_str = start_date_str if end_date_str is none else end_date_str %}
|
||||
|
||||
{% set start_date = convert_datetime(start_date_str, in_fmt) %}
|
||||
{% set end_date = convert_datetime(end_date_str, in_fmt) %}
|
||||
|
||||
{% set day_count = (end_date - start_date).days %}
|
||||
{% if day_count < 0 %}
|
||||
{% set msg -%}
|
||||
Partiton start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
{%- endset %}
|
||||
|
||||
{{ exceptions.raise_compiler_error(msg, model) }}
|
||||
{% endif %}
|
||||
|
||||
{% set date_list = [] %}
|
||||
{% for i in range(0, day_count + 1) %}
|
||||
{% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}
|
||||
{% if not out_fmt %}
|
||||
{% set _ = date_list.append(the_date) %}
|
||||
{% else %}
|
||||
{% set _ = date_list.append(the_date.strftime(out_fmt)) %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{{ return(date_list) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}
|
||||
{% set partition_range = (raw_partition_date | string).split(",") %}
|
||||
|
||||
{% if (partition_range | length) == 1 %}
|
||||
{% set start_date = partition_range[0] %}
|
||||
{% set end_date = none %}
|
||||
{% elif (partition_range | length) == 2 %}
|
||||
{% set start_date = partition_range[0] %}
|
||||
{% set end_date = partition_range[1] %}
|
||||
{% else %}
|
||||
{{ exceptions.raise_compiler_error("Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: " ~ raw_partition_date) }}
|
||||
{% endif %}
|
||||
|
||||
{{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro py_current_timestring() %}
|
||||
{% set dt = modules.datetime.datetime.now() %}
|
||||
{% do return(dt.strftime("%Y%m%d%H%M%S%f")) %}
|
||||
{% endmacro %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user