mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-18 23:41:28 +00:00
Compare commits
501 Commits
arky/add-p
...
v1.8.8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3858ec4892 | ||
|
|
69742e1f94 | ||
|
|
1285917066 | ||
|
|
924ce2d36a | ||
|
|
69f9212b37 | ||
|
|
b4aba0d16b | ||
|
|
a800147b2a | ||
|
|
e3e4c977e0 | ||
|
|
88041d6fe7 | ||
|
|
fc7f7ec338 | ||
|
|
6f0531e688 | ||
|
|
24f840b94d | ||
|
|
b33faf5490 | ||
|
|
75b956bda4 | ||
|
|
8ee5788518 | ||
|
|
ac54d542de | ||
|
|
4346ebca49 | ||
|
|
60315b6a75 | ||
|
|
4f50172d17 | ||
|
|
1a2d367e7f | ||
|
|
2b378bec3b | ||
|
|
2a8b0d6259 | ||
|
|
23545fe264 | ||
|
|
5d8b6aa16f | ||
|
|
d66c32ae7b | ||
|
|
26caf86f17 | ||
|
|
e174759f2b | ||
|
|
4f71d3f80c | ||
|
|
5953fa253e | ||
|
|
10fed9f1c3 | ||
|
|
2ae5e32170 | ||
|
|
341d025ea6 | ||
|
|
20eecada48 | ||
|
|
c31522ed64 | ||
|
|
f0c2e6e7fc | ||
|
|
1a130bd2bf | ||
|
|
9c6f4fa9ec | ||
|
|
4869ceff99 | ||
|
|
11ea5734c0 | ||
|
|
48a3a098ed | ||
|
|
af134bbfb3 | ||
|
|
3400322072 | ||
|
|
bdca6b5453 | ||
|
|
77bf86db35 | ||
|
|
38879493a9 | ||
|
|
f3b6c622b6 | ||
|
|
de23ff4d8b | ||
|
|
aaa22f3002 | ||
|
|
5561e94544 | ||
|
|
a59ee804d1 | ||
|
|
85f461bfd9 | ||
|
|
8926c2c3a3 | ||
|
|
bbfd03e13a | ||
|
|
8f506a1b3d | ||
|
|
40343274d9 | ||
|
|
062a7788bd | ||
|
|
cf600a62fd | ||
|
|
dc744f6e8f | ||
|
|
8b1f1d52c4 | ||
|
|
1bcef62e73 | ||
|
|
487a5321df | ||
|
|
f884eb4473 | ||
|
|
4cb6d47bf7 | ||
|
|
01f5dc8b85 | ||
|
|
2e3c6fe614 | ||
|
|
a36057d6e9 | ||
|
|
1a9fb612ed | ||
|
|
35062ca6af | ||
|
|
4e1b44e353 | ||
|
|
f6b2cb7fdc | ||
|
|
bcbde3ac42 | ||
|
|
1e4bed0f1b | ||
|
|
9a0b714fda | ||
|
|
ddd6506bea | ||
|
|
37b1fce205 | ||
|
|
2325759ba8 | ||
|
|
29429ecf7b | ||
|
|
9a7be6de67 | ||
|
|
0dc2a2f963 | ||
|
|
2d336553af | ||
|
|
1014a6d490 | ||
|
|
27943a5ebc | ||
|
|
528b95cba8 | ||
|
|
0290cf7dd0 | ||
|
|
5c8a4ab986 | ||
|
|
8736508617 | ||
|
|
4811ada35a | ||
|
|
afb2d61a08 | ||
|
|
514647b29f | ||
|
|
4c587544b6 | ||
|
|
f5f9591d09 | ||
|
|
61727ab5b6 | ||
|
|
f87964ec1c | ||
|
|
2edd5b3335 | ||
|
|
668fe78e2d | ||
|
|
fe28d9e115 | ||
|
|
5cb127999c | ||
|
|
86b349f812 | ||
|
|
a70024f745 | ||
|
|
8b5884b527 | ||
|
|
4c1d0e92cd | ||
|
|
6e7e55212b | ||
|
|
11dbe679b9 | ||
|
|
c63ae89efb | ||
|
|
ee74a60082 | ||
|
|
607646b627 | ||
|
|
7e164e3ab7 | ||
|
|
7e72cace2b | ||
|
|
c53d67d3b5 | ||
|
|
cb56f4fdc1 | ||
|
|
f15e128d6c | ||
|
|
99d033ffec | ||
|
|
6fee361183 | ||
|
|
95581cc661 | ||
|
|
3c4456ddbf | ||
|
|
b44c2e498d | ||
|
|
c86cec3256 | ||
|
|
a1f005789d | ||
|
|
d03292e8b9 | ||
|
|
ebacedd89d | ||
|
|
fb41ce93d6 | ||
|
|
1e4e15c023 | ||
|
|
cf08b8411a | ||
|
|
e81f7fdbd5 | ||
|
|
96f54264b4 | ||
|
|
b945d177d3 | ||
|
|
ebc22fa26c | ||
|
|
a994ace2db | ||
|
|
f2a5ad0504 | ||
|
|
fe33dcc3d6 | ||
|
|
c95b1ea5e6 | ||
|
|
0d87d314ac | ||
|
|
71f3519611 | ||
|
|
02d7727365 | ||
|
|
f683e36468 | ||
|
|
cfaacc6e49 | ||
|
|
a029661e23 | ||
|
|
80b2a47d60 | ||
|
|
9af5ec6069 | ||
|
|
e46eae1f0e | ||
|
|
c07186855f | ||
|
|
3e7778c380 | ||
|
|
68970d09fa | ||
|
|
8c8c6284fb | ||
|
|
b435e26aa4 | ||
|
|
58f9af7d58 | ||
|
|
c6c0c79216 | ||
|
|
461e8e5323 | ||
|
|
0fa9690e38 | ||
|
|
cc42ec39e6 | ||
|
|
952cca8c58 | ||
|
|
0c8a8de7cd | ||
|
|
c52a015f36 | ||
|
|
c5eb6d70b4 | ||
|
|
2c1926cee9 | ||
|
|
29395ac617 | ||
|
|
58344f4d25 | ||
|
|
65b366bca9 | ||
|
|
c501d71645 | ||
|
|
d65bae5f05 | ||
|
|
c48aaa03de | ||
|
|
7b8ae21c36 | ||
|
|
d9c36c3a57 | ||
|
|
bfb68b2619 | ||
|
|
a0abc58130 | ||
|
|
8a395e928d | ||
|
|
7072a53770 | ||
|
|
deedeeb9ce | ||
|
|
6fd0a94729 | ||
|
|
e4fe839e45 | ||
|
|
ce10240f5b | ||
|
|
f48a927b86 | ||
|
|
fa993f3ea5 | ||
|
|
ef1cb97755 | ||
|
|
fc431010ef | ||
|
|
0d723f180a | ||
|
|
9d232398ee | ||
|
|
865b09b2f0 | ||
|
|
7329143ffb | ||
|
|
d50aebb117 | ||
|
|
b337e0b726 | ||
|
|
4b6c57cede | ||
|
|
2b23a038d4 | ||
|
|
d1ebf9d12a | ||
|
|
12e40e2581 | ||
|
|
d1e400eed2 | ||
|
|
7ea4670832 | ||
|
|
869ba181c7 | ||
|
|
f36c4e7275 | ||
|
|
7f9874d260 | ||
|
|
9a32716374 | ||
|
|
9bc80d52df | ||
|
|
83e51618d0 | ||
|
|
7df747ae04 | ||
|
|
20f904951f | ||
|
|
5198031d5b | ||
|
|
1ec5e22e2b | ||
|
|
c7522d27ad | ||
|
|
6965eca079 | ||
|
|
8a1b9276f9 | ||
|
|
2411f93240 | ||
|
|
5841d52792 | ||
|
|
03a4d118f3 | ||
|
|
a1f6451090 | ||
|
|
c12f6fbf4d | ||
|
|
f732b76dc3 | ||
|
|
edc60034a5 | ||
|
|
63f40543ab | ||
|
|
6f603f6006 | ||
|
|
581d8563cc | ||
|
|
2b6e2e18df | ||
|
|
1220fdfdd6 | ||
|
|
07726b0047 | ||
|
|
e33b06badf | ||
|
|
15dcb9a19d | ||
|
|
7885e874c6 | ||
|
|
93f1bd5df6 | ||
|
|
ef03ea2697 | ||
|
|
db65e627ca | ||
|
|
1a5d6922dd | ||
|
|
2d59a51874 | ||
|
|
0836095a57 | ||
|
|
87178287c7 | ||
|
|
dc3f60801e | ||
|
|
9c8b28aa64 | ||
|
|
719a50cc91 | ||
|
|
7a410ab228 | ||
|
|
98bbbc126b | ||
|
|
06e55bb93e | ||
|
|
3e2ec1601b | ||
|
|
5ae8f6aad7 | ||
|
|
1cbc6d333d | ||
|
|
3bf148c443 | ||
|
|
77d48cc27a | ||
|
|
6663846026 | ||
|
|
bb2017a839 | ||
|
|
bdcf264963 | ||
|
|
50b85a0b01 | ||
|
|
ad723a6db8 | ||
|
|
f1f0c38c55 | ||
|
|
07c40d6574 | ||
|
|
2f2e0cee10 | ||
|
|
dc59c706ff | ||
|
|
cc7170dead | ||
|
|
af188624d5 | ||
|
|
c547aace36 | ||
|
|
2a5ad17e6d | ||
|
|
cd8b652568 | ||
|
|
eecaee1fe6 | ||
|
|
d3f412daab | ||
|
|
0da5dfecbb | ||
|
|
dc47f6b7b9 | ||
|
|
321031cb47 | ||
|
|
b5a0c4c228 | ||
|
|
15704ab3d5 | ||
|
|
a1f78a8f62 | ||
|
|
00f4a25bdc | ||
|
|
cff0b65b01 | ||
|
|
0726df85eb | ||
|
|
1e4286a62d | ||
|
|
84dfb22cd5 | ||
|
|
34d8ac7c6e | ||
|
|
125982a4ad | ||
|
|
43136bbfb6 | ||
|
|
11cc71b75f | ||
|
|
e42b7ca214 | ||
|
|
48d9a67aaa | ||
|
|
7763212297 | ||
|
|
c2bc2f009b | ||
|
|
6e0a387205 | ||
|
|
1740df534b | ||
|
|
0ab954e1af | ||
|
|
5488dfb992 | ||
|
|
09355701f6 | ||
|
|
c836b7585e | ||
|
|
32fde75504 | ||
|
|
81236a3dca | ||
|
|
6d834a18ed | ||
|
|
9bb970e6ef | ||
|
|
1c9cec1787 | ||
|
|
4d02ef637b | ||
|
|
19f027b7a7 | ||
|
|
ab90c777d0 | ||
|
|
3902137dfc | ||
|
|
0131feac68 | ||
|
|
017faf4bd1 | ||
|
|
c2f7d75e9e | ||
|
|
e24f9b3da7 | ||
|
|
b58e8e3ffc | ||
|
|
f45b013321 | ||
|
|
e547c0ec64 | ||
|
|
6871fc46b5 | ||
|
|
931b2dbe40 | ||
|
|
bb35b3eb87 | ||
|
|
01d481bc8d | ||
|
|
46b9a1d621 | ||
|
|
839c720e91 | ||
|
|
d88c6987a2 | ||
|
|
6c1822f186 | ||
|
|
c7c3ac872c | ||
|
|
7fddd6e448 | ||
|
|
bb21403c9e | ||
|
|
ac972948b8 | ||
|
|
211392c4a4 | ||
|
|
7317de23a3 | ||
|
|
a2a7b7d795 | ||
|
|
4122f6c308 | ||
|
|
6aeebc4c76 | ||
|
|
98310b6612 | ||
|
|
ef9d6a870f | ||
|
|
35f46dac8c | ||
|
|
efa6339e18 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
26c7675c28 | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
b39eeb328c | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.8.8
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
@@ -35,13 +35,3 @@ first_value = 1
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
[bumpversion:file:docker/Dockerfile]
|
||||
|
||||
[bumpversion:file:tests/adapter/setup.py]
|
||||
|
||||
[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
276
.changes/1.8.0.md
Normal file
276
.changes/1.8.0.md
Normal file
@@ -0,0 +1,276 @@
|
||||
## dbt-core 1.8.0 - May 09, 2024
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Remove adapter.get_compiler interface ([#9148](https://github.com/dbt-labs/dbt-core/issues/9148))
|
||||
- Move AdapterLogger to adapters folder ([#9151](https://github.com/dbt-labs/dbt-core/issues/9151))
|
||||
- Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps --lock' ([#9100](https://github.com/dbt-labs/dbt-core/issues/9100))
|
||||
- move event manager setup back to core, remove ref to global EVENT_MANAGER and clean up event manager functions ([#9150](https://github.com/dbt-labs/dbt-core/issues/9150))
|
||||
- Remove dbt-tests-adapter and dbt-postgres packages from dbt-core ([#9455](https://github.com/dbt-labs/dbt-core/issues/9455))
|
||||
- Update the default behaviour of require_explicit_package_overrides_for_builtin_materializations to True. ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
||||
|
||||
### Features
|
||||
|
||||
- Initial implementation of unit testing ([#8287](https://github.com/dbt-labs/dbt-core/issues/8287))
|
||||
- Unit test manifest artifacts and selection ([#8295](https://github.com/dbt-labs/dbt-core/issues/8295))
|
||||
- Support config with tags & meta for unit tests ([#8294](https://github.com/dbt-labs/dbt-core/issues/8294))
|
||||
- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859))
|
||||
- Enable inline csv fixtures in unit tests ([#8626](https://github.com/dbt-labs/dbt-core/issues/8626))
|
||||
- Add drop_schema_named macro ([#8025](https://github.com/dbt-labs/dbt-core/issues/8025))
|
||||
- migrate utils to common and adapters folders ([#8924](https://github.com/dbt-labs/dbt-core/issues/8924))
|
||||
- Move Agate helper client into common ([#8926](https://github.com/dbt-labs/dbt-core/issues/8926))
|
||||
- remove usage of dbt.config.PartialProject from dbt/adapters ([#8928](https://github.com/dbt-labs/dbt-core/issues/8928))
|
||||
- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
||||
- Support unit testing incremental models ([#8422](https://github.com/dbt-labs/dbt-core/issues/8422))
|
||||
- Add support of csv file fixtures to unit testing ([#8290](https://github.com/dbt-labs/dbt-core/issues/8290))
|
||||
- Remove legacy logger ([#8027](https://github.com/dbt-labs/dbt-core/issues/8027))
|
||||
- Unit tests support --defer and state:modified ([#8517](https://github.com/dbt-labs/dbt-core/issues/8517))
|
||||
- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956))
|
||||
- Support source inputs in unit tests ([#8507](https://github.com/dbt-labs/dbt-core/issues/8507))
|
||||
- Use daff to render diff displayed in stdout when unit test fails ([#8558](https://github.com/dbt-labs/dbt-core/issues/8558))
|
||||
- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
||||
- Move unit testing to test command ([#8979](https://github.com/dbt-labs/dbt-core/issues/8979))
|
||||
- Support --empty flag for schema-only dry runs ([#8971](https://github.com/dbt-labs/dbt-core/issues/8971))
|
||||
- Support unit tests in non-root packages ([#8285](https://github.com/dbt-labs/dbt-core/issues/8285))
|
||||
- Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. in schema files. ([#8699](https://github.com/dbt-labs/dbt-core/issues/8699))
|
||||
- Make fixture files full-fledged parts of the manifest and enable partial parsing ([#9067](https://github.com/dbt-labs/dbt-core/issues/9067))
|
||||
- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
- Package selector syntax for the current package ([#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
||||
- In build command run unit tests before models ([#9128](https://github.com/dbt-labs/dbt-core/issues/9128))
|
||||
- Move flags from UserConfig in profiles.yml to flags in dbt_project.yml ([#9183](https://github.com/dbt-labs/dbt-core/issues/9183))
|
||||
- Added hook support for `dbt source freshness` ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
||||
- Align with order of unit test output when `actual` differs from `expected` ([#9370](https://github.com/dbt-labs/dbt-core/issues/9370))
|
||||
- Added support for external nodes in unit test nodes ([#8944](https://github.com/dbt-labs/dbt-core/issues/8944))
|
||||
- Enable unit testing versioned models ([#9344](https://github.com/dbt-labs/dbt-core/issues/9344))
|
||||
- Enable list command for unit tests ([#8508](https://github.com/dbt-labs/dbt-core/issues/8508))
|
||||
- Integration Test Optimizations ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
||||
- Accelerate integration tests with caching. ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
||||
- Cache environment variables ([#9489](https://github.com/dbt-labs/dbt-core/issues/9489))
|
||||
- Support meta at the config level for Metric nodes ([#9441](https://github.com/dbt-labs/dbt-core/issues/9441))
|
||||
- Add cache to SavedQuery config ([#9540](https://github.com/dbt-labs/dbt-core/issues/9540))
|
||||
- Support scrubbing secret vars ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
||||
- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237))
|
||||
- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766))
|
||||
- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804))
|
||||
- Add wildcard support to the group selector method ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
||||
- source freshness precomputes metadata-based freshness in batch, if possible ([#8705](https://github.com/dbt-labs/dbt-core/issues/8705))
|
||||
- Better error message when trying to select a disabled model ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
||||
- Support SQL in unit testing fixtures ([#9405](https://github.com/dbt-labs/dbt-core/issues/9405))
|
||||
- Add require_explicit_package_overrides_for_builtin_materializations to dbt_project.yml flags, which can be used to opt-out of overriding built-in materializations from packages ([#10007](https://github.com/dbt-labs/dbt-core/issues/10007))
|
||||
- add --empty flag to dbt build command ([#10026](https://github.com/dbt-labs/dbt-core/issues/10026))
|
||||
- Ability to `silence` warnings via `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
||||
- Allow aliases `error` for `include` and `warn` for `exclude` in `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
||||
- Add unit_test: selection method ([#10053](https://github.com/dbt-labs/dbt-core/issues/10053))
|
||||
|
||||
### Fixes
|
||||
|
||||
- For packages installed with tarball method, fetch metadata to resolve nested dependencies ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
||||
- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859))
|
||||
- Handle unknown `type_code` for model contracts ([#8877](https://github.com/dbt-labs/dbt-core/issues/8877), [#8353](https://github.com/dbt-labs/dbt-core/issues/8353))
|
||||
- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846))
|
||||
- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857))
|
||||
- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836))
|
||||
- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939))
|
||||
- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864))
|
||||
- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895))
|
||||
- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974))
|
||||
- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010))
|
||||
- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016))
|
||||
- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000))
|
||||
- Use seed file from disk for unit testing if rows not specified in YAML config ([#8652](https://github.com/dbt-labs/dbt-core/issues/8652))
|
||||
- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062))
|
||||
- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050))
|
||||
- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127))
|
||||
- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991))
|
||||
- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119))
|
||||
- Fix parsing f-strings in python models ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
||||
- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
||||
- fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
||||
- Support reasonably long unit test names ([#9015](https://github.com/dbt-labs/dbt-core/issues/9015))
|
||||
- Fix back-compat parsing for model-level 'tests', source table-level 'tests', and 'tests' defined on model versions ([#9411](https://github.com/dbt-labs/dbt-core/issues/9411))
|
||||
- Fix retry command run from CLI ([#9444](https://github.com/dbt-labs/dbt-core/issues/9444))
|
||||
- Fix seed and source selection in `dbt docs generate` ([#9161](https://github.com/dbt-labs/dbt-core/issues/9161))
|
||||
- Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection during docs generate ([#9456](https://github.com/dbt-labs/dbt-core/issues/9456))
|
||||
- Fix node type plurals in FoundStats log message ([#9464](https://github.com/dbt-labs/dbt-core/issues/9464))
|
||||
- Run manifest upgrade preprocessing on any older manifest version, including v11 ([#9487](https://github.com/dbt-labs/dbt-core/issues/9487))
|
||||
- Update 'compiled_code' context member logic to route based on command ('clone' or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. ([#9502](https://github.com/dbt-labs/dbt-core/issues/9502))
|
||||
- Fix bug where Semantic Layer filter strings are parsed into lists. ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507))
|
||||
- Initialize invocation context before test fixtures are built. ([##9489](https://github.com/dbt-labs/dbt-core/issues/#9489))
|
||||
- Fix conflict with newer versions of Snowplow tracker ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
||||
- When patching versioned models, set constraints after config ([#9364](https://github.com/dbt-labs/dbt-core/issues/9364))
|
||||
- only include unmodified semantic mdodels in state:modified selection ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
||||
- Set query headers when manifest is passed in to dbtRunner ([#9546](https://github.com/dbt-labs/dbt-core/issues/9546))
|
||||
- Store node_info in node associated logging events ([#9557](https://github.com/dbt-labs/dbt-core/issues/9557))
|
||||
- Fix Semantic Model Compare node relations ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
||||
- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583))
|
||||
- Clearer no-op logging in stubbed SavedQueryRunner ([#9533](https://github.com/dbt-labs/dbt-core/issues/9533))
|
||||
- Fix node_info contextvar handling so incorrect node_info doesn't persist ([#8866](https://github.com/dbt-labs/dbt-core/issues/8866))
|
||||
- Add target-path to retry ([#8948](https://github.com/dbt-labs/dbt-core/issues/8948))
|
||||
- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360))
|
||||
- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581))
|
||||
- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860))
|
||||
- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532))
|
||||
- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755))
|
||||
- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624))
|
||||
- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511))
|
||||
- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593))
|
||||
- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770))
|
||||
- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787))
|
||||
- Only create the packages-install-path / dbt_packages folder during dbt deps ([#6985](https://github.com/dbt-labs/dbt-core/issues/6985), [#9584](https://github.com/dbt-labs/dbt-core/issues/9584))
|
||||
- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608))
|
||||
- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078))
|
||||
- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827))
|
||||
- Exclude password-like fields for considering reparse ([#9795](https://github.com/dbt-labs/dbt-core/issues/9795))
|
||||
- Fixed query comments test ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
||||
- Begin warning people about spaces in model names ([#9397](https://github.com/dbt-labs/dbt-core/issues/9397))
|
||||
- Add NodeRelation to SavedQuery Export ([#9534](https://github.com/dbt-labs/dbt-core/issues/9534))
|
||||
- Disambiguiate FreshnessConfigProblem error message ([#9891](https://github.com/dbt-labs/dbt-core/issues/9891))
|
||||
- Use consistent secret scrubbing with the log function. ([#9987](https://github.com/dbt-labs/dbt-core/issues/9987))
|
||||
- Validate against empty strings in package definitions ([#9985](https://github.com/dbt-labs/dbt-core/issues/9985))
|
||||
- Fix default value for indirect selection in selector cannot overwritten by CLI flag and env var ([#9976](https://github.com/dbt-labs/dbt-core/issues/9976), [#7673](https://github.com/dbt-labs/dbt-core/issues/7673))
|
||||
- Simplify error message if test severity isn't 'warn' or 'error' ([#9715](https://github.com/dbt-labs/dbt-core/issues/9715))
|
||||
- Support overriding source level loaded_at_field with a null table level definition ([#9320](https://github.com/dbt-labs/dbt-core/issues/9320))
|
||||
- Undo conditional agate import to prevent UnresolvedTypeReferenceError during RunResult serialization ([#10098](https://github.com/dbt-labs/dbt-core/issues/10098))
|
||||
- Restore previous behavior for --favor-state: only favor defer_relation if not selected in current command" ([#10107](https://github.com/dbt-labs/dbt-core/issues/10107))
|
||||
- Unit test fixture (csv) returns null for empty value ([#9881](https://github.com/dbt-labs/dbt-core/issues/9881))
|
||||
|
||||
### Docs
|
||||
|
||||
- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430))
|
||||
- fix get_custom_database docstring ([dbt-docs/#9003](https://github.com/dbt-labs/dbt-docs/issues/9003))
|
||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537))
|
||||
- Add unit testing functional tests ([#8512](https://github.com/dbt-labs/dbt-core/issues/8512))
|
||||
- Remove usage of dbt.include.global_project in dbt/adapters ([#8925](https://github.com/dbt-labs/dbt-core/issues/8925))
|
||||
- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893))
|
||||
- remove dbt.flags.MP_CONTEXT usage in dbt/adapters ([#8967](https://github.com/dbt-labs/dbt-core/issues/8967))
|
||||
- Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters ([#8969](https://github.com/dbt-labs/dbt-core/issues/8969))
|
||||
- Move CatalogRelationTypes test case to the shared test suite to be reused by adapter maintainers ([#8952](https://github.com/dbt-labs/dbt-core/issues/8952))
|
||||
- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
- Removing unused 'documentable' ([#8871](https://github.com/dbt-labs/dbt-core/issues/8871))
|
||||
- Remove use of dbt/core exceptions in dbt/adapter ([#8920](https://github.com/dbt-labs/dbt-core/issues/8920))
|
||||
- Cache dbt plugin modules to improve integration test performance ([#9029](https://github.com/dbt-labs/dbt-core/issues/9029))
|
||||
- Consolidate deferral methods & flags ([#7965](https://github.com/dbt-labs/dbt-core/issues/7965), [#8715](https://github.com/dbt-labs/dbt-core/issues/8715))
|
||||
- Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock variance ([#9057](https://github.com/dbt-labs/dbt-core/issues/9057))
|
||||
- Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific event types and protos ([#8927](https://github.com/dbt-labs/dbt-core/issues/8927), [#8918](https://github.com/dbt-labs/dbt-core/issues/8918))
|
||||
- Clean up unused adaptor folders ([#9123](https://github.com/dbt-labs/dbt-core/issues/9123))
|
||||
- Move column constraints into common/contracts, removing another dependency of adapters on core. ([#9024](https://github.com/dbt-labs/dbt-core/issues/9024))
|
||||
- Move dbt.semver to dbt.common.semver and update references. ([#9039](https://github.com/dbt-labs/dbt-core/issues/9039))
|
||||
- Move lowercase utils method to common ([#9180](https://github.com/dbt-labs/dbt-core/issues/9180))
|
||||
- Remove usages of dbt.clients.jinja in dbt/adapters ([#9205](https://github.com/dbt-labs/dbt-core/issues/9205))
|
||||
- Remove usage of dbt.contracts in dbt/adapters ([#9208](https://github.com/dbt-labs/dbt-core/issues/9208))
|
||||
- Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters ([#9214](https://github.com/dbt-labs/dbt-core/issues/9214))
|
||||
- Introduce RelationConfig Protocol, consolidate Relation.create_from ([#9215](https://github.com/dbt-labs/dbt-core/issues/9215))
|
||||
- remove manifest from adapter.set_relations_cache signature ([#9217](https://github.com/dbt-labs/dbt-core/issues/9217))
|
||||
- remove manifest from adapter catalog method signatures ([#9218](https://github.com/dbt-labs/dbt-core/issues/9218))
|
||||
- Move BaseConfig, Metadata and various other contract classes from model_config to common/contracts/config ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
||||
- Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro ([#9244](https://github.com/dbt-labs/dbt-core/issues/9244))
|
||||
- pass query header context to MacroQueryStringSetter ([#9249](https://github.com/dbt-labs/dbt-core/issues/9249), [#9250](https://github.com/dbt-labs/dbt-core/issues/9250))
|
||||
- add macro_context_generator on adapter ([#9247](https://github.com/dbt-labs/dbt-core/issues/9247))
|
||||
- pass mp_context to adapter factory as argument instead of import ([#9025](https://github.com/dbt-labs/dbt-core/issues/9025))
|
||||
- have dbt-postgres use RelationConfig protocol for materialized views' ([#9292](https://github.com/dbt-labs/dbt-core/issues/9292))
|
||||
- move system.py to common as dbt-bigquery relies on it to call gcloud ([#9293](https://github.com/dbt-labs/dbt-core/issues/9293))
|
||||
- Reorganizing event definitions to define core events in dbt/events rather than dbt/common ([#9152](https://github.com/dbt-labs/dbt-core/issues/9152))
|
||||
- move exceptions used only in dbt/common to dbt/common/exceptions ([#9332](https://github.com/dbt-labs/dbt-core/issues/9332))
|
||||
- Remove usage of dbt.adapters.factory in dbt/common ([#9334](https://github.com/dbt-labs/dbt-core/issues/9334))
|
||||
- Accept valid_error_names in WarnErrorOptions constructor, remove global usage of event modules ([#9337](https://github.com/dbt-labs/dbt-core/issues/9337))
|
||||
- Move result objects to dbt.artifacts ([#9193](https://github.com/dbt-labs/dbt-core/issues/9193))
|
||||
- dbt Labs OSS standardization of docs and templates. ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
||||
- Add dbt-common as a dependency and remove dbt/common ([#9357](https://github.com/dbt-labs/dbt-core/issues/9357))
|
||||
- move cache exceptions to dbt/adapters ([#9362](https://github.com/dbt-labs/dbt-core/issues/9362))
|
||||
- Clean up macro contexts. ([#9422](https://github.com/dbt-labs/dbt-core/issues/9422))
|
||||
- Add the @requires.manifest decorator to the retry command. ([#9426](https://github.com/dbt-labs/dbt-core/issues/9426))
|
||||
- Move WritableManifest + Documentation to dbt/artifacts ([#9378](https://github.com/dbt-labs/dbt-core/issues/9378), [#9379](https://github.com/dbt-labs/dbt-core/issues/9379))
|
||||
- Define Macro and Group resources in dbt/artifacts ([#9381](https://github.com/dbt-labs/dbt-core/issues/9381), [#9382](https://github.com/dbt-labs/dbt-core/issues/9382))
|
||||
- Move `SavedQuery` data definition to `dbt/artifacts` ([#9386](https://github.com/dbt-labs/dbt-core/issues/9386))
|
||||
- Migrate data parts of `Metric` node to dbt/artifacts ([#9383](https://github.com/dbt-labs/dbt-core/issues/9383))
|
||||
- Move data portion of `SemanticModel` to dbt/artifacts ([#9387](https://github.com/dbt-labs/dbt-core/issues/9387))
|
||||
- Move data parts of `Exposure` class to dbt/artifacts ([#9380](https://github.com/dbt-labs/dbt-core/issues/9380))
|
||||
- Split up deferral across parsing (adding 'defer_relation' from state manifest) and runtime ref resolution" ([#9199](https://github.com/dbt-labs/dbt-core/issues/9199))
|
||||
- Start using `Mergeable` from dbt-common ([#9505](https://github.com/dbt-labs/dbt-core/issues/9505))
|
||||
- Move manifest nodes to artifacts ([#9388](https://github.com/dbt-labs/dbt-core/issues/9388))
|
||||
- Move data parts of `SourceDefinition` class to dbt/artifacts ([#9384](https://github.com/dbt-labs/dbt-core/issues/9384))
|
||||
- Remove uses of Replaceable class ([#7802](https://github.com/dbt-labs/dbt-core/issues/7802))
|
||||
- Make dbt-core compatible with Python 3.12 ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
||||
- Restrict protobuf to major version 4. ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
||||
- Remove references to dbt.tracking and dbt.flags from dbt/artifacts ([#9390](https://github.com/dbt-labs/dbt-core/issues/9390))
|
||||
- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- Implement primary key inference for model nodes ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
||||
- Define UnitTestDefinition resource in dbt/artifacts/resources ([#9667](https://github.com/dbt-labs/dbt-core/issues/9667))
|
||||
- Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest ([#9567](https://github.com/dbt-labs/dbt-core/issues/9567))
|
||||
- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619))
|
||||
- Remove non dbt.artifacts dbt.* imports from dbt/artifacts ([#9926](https://github.com/dbt-labs/dbt-core/issues/9926))
|
||||
- Migrate to using `error_tag` provided by `dbt-common` ([#9914](https://github.com/dbt-labs/dbt-core/issues/9914))
|
||||
- Add a test for semantic manifest and move test fixtures needed for it ([#9665](https://github.com/dbt-labs/dbt-core/issues/9665))
|
||||
- Raise deprecation warning if installed package overrides built-in materialization ([#9971](https://github.com/dbt-labs/dbt-core/issues/9971))
|
||||
- Use the SECRET_ENV_PREFIX from dbt_common instead of duplicating it in dbt-core ([#10018](https://github.com/dbt-labs/dbt-core/issues/10018))
|
||||
- Enable use of record mode via environment variable ([#10045](https://github.com/dbt-labs/dbt-core/issues/10045))
|
||||
- Consistent naming + deprecation warnings for "legacy behavior" flags ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
||||
- Enable use of context in serialization ([#10093](https://github.com/dbt-labs/dbt-core/issues/10093))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump actions/checkout from 3 to 4 ([#8781](https://github.com/dbt-labs/dbt-core/issues/8781))
|
||||
- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
||||
- Update typing-extensions version to >=4.4 ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012))
|
||||
- Bump ddtrace from 2.1.7 to 2.3.0 ([#9132](https://github.com/dbt-labs/dbt-core/issues/9132))
|
||||
- Bump freezegun from 0.3.12 to 1.3.0 ([#9197](https://github.com/dbt-labs/dbt-core/issues/9197))
|
||||
- Bump actions/setup-python from 4 to 5 ([#9267](https://github.com/dbt-labs/dbt-core/issues/9267))
|
||||
- Bump actions/download-artifact from 3 to 4 ([#9374](https://github.com/dbt-labs/dbt-core/issues/9374))
|
||||
- Relax pathspec upper bound version restriction ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
||||
- remove dbt/adapters and add dependency on dbt-adapters ([#9430](https://github.com/dbt-labs/dbt-core/issues/9430))
|
||||
- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/issues/9470))
|
||||
- Bump actions/cache from 3 to 4 ([#9471](https://github.com/dbt-labs/dbt-core/issues/9471))
|
||||
- Bump peter-evans/create-pull-request from 5 to 6 ([#9552](https://github.com/dbt-labs/dbt-core/issues/9552))
|
||||
- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
||||
- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/issues/9659))
|
||||
- Cap dbt-semantic-interfaces version range to <0.6 ([#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
||||
- Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker ([#9687](https://github.com/dbt-labs/dbt-core/issues/9687))
|
||||
- bump dbt-common to accept major version 1 ([#9690](https://github.com/dbt-labs/dbt-core/issues/9690))
|
||||
- Remove duplicate dependency of protobuf in dev-requirements ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
||||
- Bump black from 23.3.0 to >=24.3.0,<25.0 ([#8074](https://github.com/dbt-labs/dbt-core/issues/8074))
|
||||
- Update the agate pin to "agate>=1.7.0,<1.10" ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
||||
|
||||
### Security
|
||||
|
||||
- Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 ([#9638](https://github.com/dbt-labs/dbt-core/issues/9638))
|
||||
- Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
||||
|
||||
### Contributors
|
||||
- [@LeoTheGriff](https://github.com/LeoTheGriff) ([#9003](https://github.com/dbt-labs/dbt-core/issues/9003))
|
||||
- [@SamuelBFavarin](https://github.com/SamuelBFavarin) ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
||||
- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
- [@adamlopez](https://github.com/adamlopez) ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
||||
- [@akurdyukov](https://github.com/akurdyukov) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
||||
- [@aliceliu](https://github.com/aliceliu) ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
||||
- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430))
|
||||
- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798), [#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
||||
- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
- [@colin-rorgers-dbt](https://github.com/colin-rorgers-dbt) ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
||||
- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9507](https://github.com/dbt-labs/dbt-core/issues/9507), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- [@damian3031](https://github.com/damian3031) ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- [@edgarrmondragon](https://github.com/edgarrmondragon) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
||||
- [@emmoop](https://github.com/emmoop) ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
||||
- [@heysweet](https://github.com/heysweet) ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
||||
- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319), [#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
||||
- [@l1xnan](https://github.com/l1xnan) ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
||||
- [@mederka](https://github.com/mederka) ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
||||
- [@mjkanji](https://github.com/mjkanji) ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
||||
- [@nielspardon](https://github.com/nielspardon) ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
||||
- [@niteshy](https://github.com/niteshy) ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
||||
- [@ofek1weiss](https://github.com/ofek1weiss) ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
||||
- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
- [@rzjfr](https://github.com/rzjfr) ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
||||
- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012), [#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
||||
- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
||||
15
.changes/1.8.1.md
Normal file
15
.changes/1.8.1.md
Normal file
@@ -0,0 +1,15 @@
|
||||
## dbt-core 1.8.1 - May 22, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add resource type to saved_query ([#10168](https://github.com/dbt-labs/dbt-core/issues/10168))
|
||||
|
||||
### Docs
|
||||
|
||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
||||
- Add support for Saved Query node ([dbt-docs/#486](https://github.com/dbt-labs/dbt-docs/issues/486))
|
||||
|
||||
### Security
|
||||
|
||||
- Explicitly bind to localhost in docs serve ([#10209](https://github.com/dbt-labs/dbt-core/issues/10209))
|
||||
9
.changes/1.8.2.md
Normal file
9
.changes/1.8.2.md
Normal file
@@ -0,0 +1,9 @@
|
||||
## dbt-core 1.8.2 - June 05, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- Add --host flag to dbt docs serve, defaulting to '127.0.0.1' ([#10229](https://github.com/dbt-labs/dbt-core/issues/10229))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix: Order-insensitive unit test equality assertion for expected/actual with multiple nulls ([#10167](https://github.com/dbt-labs/dbt-core/issues/10167))
|
||||
13
.changes/1.8.3.md
Normal file
13
.changes/1.8.3.md
Normal file
@@ -0,0 +1,13 @@
|
||||
## dbt-core 1.8.3 - June 20, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- add --empty value to jinja context as flags.EMPTY ([#10317](https://github.com/dbt-labs/dbt-core/issues/10317))
|
||||
|
||||
### Fixes
|
||||
|
||||
- DOn't warn on `unit_test` config paths that are properly used ([#10311](https://github.com/dbt-labs/dbt-core/issues/10311))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix npm security vulnerabilities as of June 2024 ([dbt-docs/#513](https://github.com/dbt-labs/dbt-docs/issues/513))
|
||||
8
.changes/1.8.4.md
Normal file
8
.changes/1.8.4.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.8.4 - July 18, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags ([#10160](https://github.com/dbt-labs/dbt-core/issues/10160))
|
||||
- Limit data_tests deprecation to root_project ([#9835](https://github.com/dbt-labs/dbt-core/issues/9835))
|
||||
- CLI flags should take precedence over env var flags ([#10304](https://github.com/dbt-labs/dbt-core/issues/10304))
|
||||
- Fix error constructing warn_error_options ([#10452](https://github.com/dbt-labs/dbt-core/issues/10452))
|
||||
5
.changes/1.8.5.md
Normal file
5
.changes/1.8.5.md
Normal file
@@ -0,0 +1,5 @@
|
||||
## dbt-core 1.8.5 - August 07, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- respect --quiet and --warn-error-options for flag deprecations ([#10105](https://github.com/dbt-labs/dbt-core/issues/10105))
|
||||
12
.changes/1.8.6.md
Normal file
12
.changes/1.8.6.md
Normal file
@@ -0,0 +1,12 @@
|
||||
## dbt-core 1.8.6 - August 29, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Late render pre- and post-hooks configs in properties / schema YAML files ([#10603](https://github.com/dbt-labs/dbt-core/issues/10603))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Improve speed of tree traversal when finding children, increasing build speed for some selectors ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
||||
|
||||
### Contributors
|
||||
- [@ttusing](https://github.com/ttusing) ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
||||
5
.changes/1.8.7.md
Normal file
5
.changes/1.8.7.md
Normal file
@@ -0,0 +1,5 @@
|
||||
## dbt-core 1.8.7 - September 24, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- Add support for behavior flags ([#10618](https://github.com/dbt-labs/dbt-core/issues/10618))
|
||||
16
.changes/1.8.8.md
Normal file
16
.changes/1.8.8.md
Normal file
@@ -0,0 +1,16 @@
|
||||
## dbt-core 1.8.8 - October 23, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix unit tests for incremental model with alias ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Remove support and testing for Python 3.8, which is now EOL. ([#10861](https://github.com/dbt-labs/dbt-core/issues/10861))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Pin dbt-common and dbt-adapters with upper bound. ([#10895](https://github.com/dbt-labs/dbt-core/issues/10895))
|
||||
|
||||
### Contributors
|
||||
- [@katsugeneration](https://github.com/katsugeneration) ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
||||
@@ -1,6 +1,6 @@
|
||||
# dbt Core Changelog
|
||||
|
||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||
- This file provides a full account of all changes to `dbt-core`
|
||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
@@ -31,43 +31,7 @@ kinds:
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
- label: Security
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
newlines:
|
||||
afterChangelogHeader: 1
|
||||
@@ -106,18 +70,10 @@ footerFormat: |
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
|
||||
2
.flake8
2
.flake8
@@ -10,3 +10,5 @@ ignore =
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
per-file-ignores =
|
||||
*/__init__.py: F401
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
||||
core/dbt/include/index.html binary
|
||||
core/dbt/task/docs/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
|
||||
42
.github/CODEOWNERS
vendored
42
.github/CODEOWNERS
vendored
@@ -13,48 +13,6 @@
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
/core/dbt/adapters @dbt-labs/core-adapters
|
||||
|
||||
# Global project (default macros + materializations), starter project
|
||||
/core/dbt/include @dbt-labs/core-adapters
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
### TESTS
|
||||
|
||||
# Overlapping ownership for vast majority of unit + functional tests
|
||||
|
||||
# Perf regression testing framework
|
||||
# This excludes the test project files itself since those aren't specific
|
||||
# framework changes (excluded by not setting an owner next to it- no owner)
|
||||
/performance @nathaniel-may
|
||||
/performance/projects
|
||||
|
||||
### ARTIFACTS
|
||||
|
||||
/schemas/dbt @dbt-labs/cloud-artifacts
|
||||
|
||||
37
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
37
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user_docs"]
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -11,7 +11,7 @@ body:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
@@ -25,16 +25,43 @@ body:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance critera
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Suggested Tests
|
||||
description: |
|
||||
Provide scenarios to test. Link to existing similar tests if appropriate.
|
||||
placeholder: |
|
||||
1. Test with no version specified in the schema file and use selection logic on a versioned model for a specific version. Expect pass.
|
||||
2. Test with a version specified in the schema file that is no valid. Expect ParsingError.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes and documentation as appropriate
|
||||
validations:
|
||||
required: false
|
||||
|
||||
3
.github/_README.md
vendored
3
.github/_README.md
vendored
@@ -47,7 +47,8 @@ ___
|
||||
|
||||
### How to re-run jobs
|
||||
|
||||
- Some actions cannot be rerun in the GitHub UI. Namely the snyk checks and the cla check. Snyk checks are rerun by closing and reopening the PR. You can retrigger the cla check by commenting on the PR with `@cla-bot check`
|
||||
- From the UI you can rerun from failure
|
||||
- You can retrigger the cla check by commenting on the PR with `@cla-bot check`
|
||||
|
||||
___
|
||||
|
||||
|
||||
21
.github/actions/latest-wrangler/action.yml
vendored
21
.github/actions/latest-wrangler/action.yml
vendored
@@ -1,20 +1,21 @@
|
||||
name: "Github package 'latest' tag wrangler for containers"
|
||||
description: "Determines wether or not a given dbt container should be given a bare 'latest' tag (I.E. dbt-core:latest)"
|
||||
name: "GitHub package `latest` tag wrangler for containers"
|
||||
description: "Determines if the published image should include `latest` tags"
|
||||
|
||||
inputs:
|
||||
package_name:
|
||||
description: "Package to check (I.E. dbt-core, dbt-redshift, etc)"
|
||||
description: "Package being published (i.e. `dbt-core`, `dbt-redshift`, etc.)"
|
||||
required: true
|
||||
new_version:
|
||||
description: "Semver of the container being built (I.E. 1.0.4)"
|
||||
description: "SemVer of the package being published (i.e. 1.7.2, 1.8.0a1, etc.)"
|
||||
required: true
|
||||
gh_token:
|
||||
description: "Auth token for github (must have view packages scope)"
|
||||
github_token:
|
||||
description: "Auth token for GitHub (must have view packages scope)"
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
latest:
|
||||
description: "Wether or not built container should be tagged latest (bool)"
|
||||
minor_latest:
|
||||
description: "Wether or not built container should be tagged minor.latest (bool)"
|
||||
tags:
|
||||
description: "A list of tags to associate with this version"
|
||||
|
||||
runs:
|
||||
using: "docker"
|
||||
image: "Dockerfile"
|
||||
|
||||
133
.github/actions/latest-wrangler/main.py
vendored
133
.github/actions/latest-wrangler/main.py
vendored
@@ -1,98 +1,71 @@
|
||||
import os
|
||||
import sys
|
||||
from packaging.version import Version, parse
|
||||
import requests
|
||||
from distutils.util import strtobool
|
||||
from typing import Union
|
||||
from packaging.version import parse, Version
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# get inputs
|
||||
package = os.environ["INPUT_PACKAGE"]
|
||||
new_version = parse(os.environ["INPUT_NEW_VERSION"])
|
||||
gh_token = os.environ["INPUT_GH_TOKEN"]
|
||||
halt_on_missing = strtobool(os.environ.get("INPUT_HALT_ON_MISSING", "False"))
|
||||
def main():
|
||||
package_name: str = os.environ["INPUT_PACKAGE_NAME"]
|
||||
new_version: Version = parse(os.environ["INPUT_NEW_VERSION"])
|
||||
github_token: str = os.environ["INPUT_GITHUB_TOKEN"]
|
||||
|
||||
# get package metadata from github
|
||||
package_request = requests.get(
|
||||
f"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions",
|
||||
auth=("", gh_token),
|
||||
)
|
||||
package_meta = package_request.json()
|
||||
response = _package_metadata(package_name, github_token)
|
||||
published_versions = _published_versions(response)
|
||||
new_version_tags = _new_version_tags(new_version, published_versions)
|
||||
_register_tags(new_version_tags, package_name)
|
||||
|
||||
# Log info if we don't get a 200
|
||||
if package_request.status_code != 200:
|
||||
print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}")
|
||||
|
||||
# Make an early exit if there is no matching package in github
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
# everything is the latest if the package doesn't exist
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write("latest=True")
|
||||
gh_output.write("minor_latest=True")
|
||||
sys.exit(0)
|
||||
def _package_metadata(package_name: str, github_token: str) -> requests.Response:
|
||||
url = f"https://api.github.com/orgs/dbt-labs/packages/container/{package_name}/versions"
|
||||
return requests.get(url, auth=("", github_token))
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
|
||||
# map versions and tags
|
||||
version_tag_map = {
|
||||
version["id"]: version["metadata"]["container"]["tags"] for version in package_meta
|
||||
}
|
||||
def _published_versions(response: requests.Response) -> List[Version]:
|
||||
package_metadata = response.json()
|
||||
return [
|
||||
parse(tag)
|
||||
for version in package_metadata
|
||||
for tag in version["metadata"]["container"]["tags"]
|
||||
if "latest" not in tag
|
||||
]
|
||||
|
||||
# is pre-release
|
||||
pre_rel = True if any(x in str(new_version) for x in ["a", "b", "rc"]) else False
|
||||
|
||||
# semver of current latest
|
||||
for version, tags in version_tag_map.items():
|
||||
if "latest" in tags:
|
||||
# N.B. This seems counterintuitive, but we expect any version tagged
|
||||
# 'latest' to have exactly three associated tags:
|
||||
# latest, major.minor.latest, and major.minor.patch.
|
||||
# Subtracting everything that contains the string 'latest' gets us
|
||||
# the major.minor.patch which is what's needed for comparison.
|
||||
current_latest = parse([tag for tag in tags if "latest" not in tag][0])
|
||||
else:
|
||||
current_latest = False
|
||||
def _new_version_tags(new_version: Version, published_versions: List[Version]) -> List[str]:
|
||||
# the package version is always a tag
|
||||
tags = [str(new_version)]
|
||||
|
||||
# semver of current_minor_latest
|
||||
for version, tags in version_tag_map.items():
|
||||
if f"{new_version.major}.{new_version.minor}.latest" in tags:
|
||||
# Similar to above, only now we expect exactly two tags:
|
||||
# major.minor.patch and major.minor.latest
|
||||
current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0])
|
||||
else:
|
||||
current_minor_latest = False
|
||||
# pre-releases don't get tagged with `latest`
|
||||
if new_version.is_prerelease:
|
||||
return tags
|
||||
|
||||
def is_latest(
|
||||
pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]
|
||||
) -> bool:
|
||||
"""Determine if a given contaier should be tagged 'latest' based on:
|
||||
- it's pre-release status
|
||||
- it's version
|
||||
- the version of a previously identified container tagged 'latest'
|
||||
if new_version > max(published_versions):
|
||||
tags.append("latest")
|
||||
|
||||
:param pre_rel: Wether or not the version of the new container is a pre-release
|
||||
:param new_version: The version of the new container
|
||||
:param remote_latest: The version of the previously identified container that's
|
||||
already tagged latest or False
|
||||
"""
|
||||
# is a pre-release = not latest
|
||||
if pre_rel:
|
||||
return False
|
||||
# + no latest tag found = is latest
|
||||
if not remote_latest:
|
||||
return True
|
||||
# + if remote version is lower than current = is latest, else not latest
|
||||
return True if remote_latest <= new_version else False
|
||||
published_patches = [
|
||||
version
|
||||
for version in published_versions
|
||||
if version.major == new_version.major and version.minor == new_version.minor
|
||||
]
|
||||
if new_version > max(published_patches):
|
||||
tags.append(f"{new_version.major}.{new_version.minor}.latest")
|
||||
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
return tags
|
||||
|
||||
|
||||
def _register_tags(tags: List[str], package_name: str) -> None:
|
||||
fully_qualified_tags = ",".join([f"ghcr.io/dbt-labs/{package_name}:{tag}" for tag in tags])
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write(f"latest={latest}")
|
||||
gh_output.write(f"minor_latest={minor_latest}")
|
||||
gh_output.write(f"fully_qualified_tags={fully_qualified_tags}")
|
||||
|
||||
|
||||
def _validate_response(response: requests.Response) -> None:
|
||||
message = response["message"]
|
||||
if response.status_code != 200:
|
||||
print(f"Call to GitHub API failed: {response.status_code} - {message}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -11,11 +11,6 @@ updates:
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/postgres"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# docker dependencies
|
||||
- package-ecosystem: "docker"
|
||||
@@ -28,3 +23,10 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# github dependencies
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Add the `user docs` label to this PR if it will need docs changes. An
|
||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
@@ -33,3 +30,4 @@ resolves #
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -35,6 +35,6 @@ jobs:
|
||||
github.event.pull_request.merged
|
||||
&& contains(github.event.label.name, 'backport')
|
||||
steps:
|
||||
- uses: tibdex/backport@v2.0.3
|
||||
- uses: tibdex/backport@v2.0.4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/bot-changelog.yml
vendored
4
.github/workflows/bot-changelog.yml
vendored
@@ -41,8 +41,6 @@ jobs:
|
||||
include:
|
||||
- label: "dependencies"
|
||||
changie_kind: "Dependencies"
|
||||
- label: "snyk"
|
||||
changie_kind: "Security"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -58,4 +56,4 @@ jobs:
|
||||
commit_message: "Add automated changelog yaml from template for bot PR"
|
||||
changie_kind: ${{ matrix.changie_kind }}
|
||||
label: ${{ matrix.label }}
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ${{ github.event.pull_request.number }}"
|
||||
|
||||
10
.github/workflows/changelog-existence.yml
vendored
10
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,8 +17,10 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
paths-ignore: ['.changes/**', '.github/**', 'tests/**', '**.md', '**.yml']
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
defaults:
|
||||
|
||||
41
.github/workflows/check-artifact-changes.yml
vendored
Normal file
41
.github/workflows/check-artifact-changes.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Check Artifact Changes
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-artifact-changes:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check for changes in core/dbt/artifacts
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
uses: dorny/paths-filter@v3
|
||||
id: check_artifact_changes
|
||||
with:
|
||||
filters: |
|
||||
artifacts_changed:
|
||||
- 'core/dbt/artifacts/**'
|
||||
list-files: shell
|
||||
|
||||
- name: Fail CI if artifacts have changed
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||
run: |
|
||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR."
|
||||
exit 1
|
||||
|
||||
- name: CI check passed
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'false'
|
||||
run: |
|
||||
echo "No prohibited artifact changes found in core/dbt/artifacts. CI check passed."
|
||||
39
.github/workflows/community-label.yml
vendored
Normal file
39
.github/workflows/community-label.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
# **what?**
|
||||
# Label a PR with a `community` label when a PR is opened by a user outside core/adapters
|
||||
|
||||
# **why?**
|
||||
# To streamline triage and ensure that community contributions are recognized and prioritized
|
||||
|
||||
# **when?**
|
||||
# When a PR is opened, not in draft or moved from draft to ready for review
|
||||
|
||||
|
||||
name: Label community PRs
|
||||
|
||||
on:
|
||||
# have to use pull_request_target since community PRs come from forks
|
||||
pull_request_target:
|
||||
types: [opened, ready_for_review]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
pull-requests: write # labels PRs
|
||||
contents: read # reads team membership
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# If this PR already has the community label, no need to relabel it
|
||||
# If this PR is opened and not draft, determine if it needs to be labeled
|
||||
# if the PR is converted out of draft, determine if it needs to be labeled
|
||||
if: |
|
||||
(!contains(github.event.pull_request.labels.*.name, 'community') &&
|
||||
(github.event.action == 'opened' && github.event.pull_request.draft == false ) ||
|
||||
github.event.action == 'ready_for_review' )
|
||||
uses: dbt-labs/actions/.github/workflows/label-community.yml@main
|
||||
with:
|
||||
github_team: 'core-group'
|
||||
label: 'community'
|
||||
secrets: inherit
|
||||
41
.github/workflows/docs-issue.yml
vendored
Normal file
41
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when an issue is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # comments on issues
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# we only want to run this when the issue is closed as completed and the label `user docs` has been assigned.
|
||||
# If this logic does not exist in this workflow, it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.issue.state == 'closed' &&
|
||||
github.event.issue.state_reason == 'completed' &&
|
||||
contains( github.event.issue.labels.*.name, 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
26
.github/workflows/jira-creation.yml
vendored
26
.github/workflows/jira-creation.yml
vendored
@@ -1,26 +0,0 @@
|
||||
# **what?**
|
||||
# Mirrors issues into Jira. Includes the information: title,
|
||||
# GitHub Issue ID and URL
|
||||
|
||||
# **why?**
|
||||
# Jira is our tool for tracking and we need to see these issues in there
|
||||
|
||||
# **when?**
|
||||
# On issue creation or when an issue is labeled `Jira`
|
||||
|
||||
name: Jira Issue Creation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
call-creation-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-creation-actions.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
26
.github/workflows/jira-label.yml
vendored
26
.github/workflows/jira-label.yml
vendored
@@ -1,26 +0,0 @@
|
||||
# **what?**
|
||||
# Calls mirroring Jira label Action. Includes adding a new label
|
||||
# to an existing issue or removing a label as well
|
||||
|
||||
# **why?**
|
||||
# Jira is our tool for tracking and we need to see these labels in there
|
||||
|
||||
# **when?**
|
||||
# On labels being added or removed from issues
|
||||
|
||||
name: Jira Label Mirroring
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
permissions:
|
||||
issues: read
|
||||
|
||||
jobs:
|
||||
call-label-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-label-actions.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
27
.github/workflows/jira-transition.yml
vendored
27
.github/workflows/jira-transition.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# **what?**
|
||||
# Transition a Jira issue to a new state
|
||||
# Only supports these GitHub Issue transitions:
|
||||
# closed, deleted, reopened
|
||||
|
||||
# **why?**
|
||||
# Jira needs to be kept up-to-date
|
||||
|
||||
# **when?**
|
||||
# On issue closing, deletion, reopened
|
||||
|
||||
name: Jira Issue Transition
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [closed, deleted, reopened]
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
call-transition-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-transition-actions.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
78
.github/workflows/main.yml
vendored
78
.github/workflows/main.yml
vendored
@@ -36,7 +36,7 @@ defaults:
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
@@ -47,12 +47,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
@@ -74,17 +74,17 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -95,8 +95,12 @@ jobs:
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
- name: Run unit tests
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: tox -e unit
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -107,9 +111,10 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
@@ -134,7 +139,7 @@ jobs:
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE=('"python-version":"3.9","os":"windows-latest"' '"python-version":"3.9","os":"macos-12"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
@@ -156,7 +161,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||
os: [ubuntu-20.04]
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
@@ -174,10 +179,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -200,8 +205,12 @@ jobs:
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
- name: Run integration tests
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
@@ -212,26 +221,35 @@ jobs:
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
name: integration test suite
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "[Notification] Integration test suite passes"
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title="Integration test suite passes""
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
@@ -240,12 +258,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
@@ -278,7 +296,7 @@ jobs:
|
||||
- name: Install source distributions
|
||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||
run: |
|
||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
|
||||
22
.github/workflows/model_performance.yml
vendored
22
.github/workflows/model_performance.yml
vendored
@@ -48,7 +48,7 @@ jobs:
|
||||
# explicitly checkout the performance runner from main regardless of which
|
||||
# version we are modeling.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
|
||||
@@ -87,12 +87,12 @@ jobs:
|
||||
# explicitly checkout the performance runner from main regardless of which
|
||||
# version we are modeling.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
|
||||
# attempts to access a previously cached runner
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/cache@v4
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||
@@ -148,9 +148,9 @@ jobs:
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.9"
|
||||
|
||||
- name: Install dbt
|
||||
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
||||
@@ -160,13 +160,13 @@ jobs:
|
||||
|
||||
# explicitly checkout main to get the latest project definitions
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
|
||||
# this was built in the previous job so it will be there.
|
||||
- name: Fetch Runner
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||
@@ -195,7 +195,7 @@ jobs:
|
||||
- name: '[DEBUG] ls baseline directory after run'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ matrix.base-branch }}
|
||||
|
||||
@@ -235,7 +235,7 @@ jobs:
|
||||
git push origin ${{ matrix.target-branch }}
|
||||
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
|
||||
@@ -253,7 +253,7 @@ jobs:
|
||||
push: 'origin origin/${{ matrix.target-branch }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{ matrix.base-branch }}
|
||||
|
||||
16
.github/workflows/nightly-release.yml
vendored
16
.github/workflows/nightly-release.yml
vendored
@@ -20,6 +20,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
packages: write # this is the permission that allows Docker release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -33,22 +34,15 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.RELEASE_BRANCH }}
|
||||
|
||||
- name: "Resolve Commit To Release"
|
||||
id: resolve-commit-sha
|
||||
run: |
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Current Version Number"
|
||||
id: version-number-sources
|
||||
run: |
|
||||
@@ -88,7 +82,6 @@ jobs:
|
||||
steps:
|
||||
- name: "[DEBUG] Log Outputs"
|
||||
run: |
|
||||
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
|
||||
@@ -97,13 +90,8 @@ jobs:
|
||||
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: true
|
||||
nightly_release: true
|
||||
secrets: inherit
|
||||
|
||||
118
.github/workflows/release-docker.yml
vendored
118
.github/workflows/release-docker.yml
vendored
@@ -1,118 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow will generate a series of docker images for dbt and push them to the github container registry
|
||||
|
||||
# **why?**
|
||||
# Docker images for dbt are used in a number of important places throughout the dbt ecosystem. This is how we keep those images up-to-date.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually
|
||||
|
||||
# **next steps**
|
||||
# - build this into the release workflow (or conversly, break out the different release methods into their own workflow files)
|
||||
|
||||
name: Docker release
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to release. _One_ of [dbt-core, dbt-redshift, dbt-bigquery, dbt-snowflake, dbt-spark, dbt-postgres]
|
||||
required: true
|
||||
version_number:
|
||||
description: The release version number (i.e. 1.0.0b1). Do not include `latest` tags or a leading `v`!
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
get_version_meta:
|
||||
name: Get version meta
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
major: ${{ steps.version.outputs.major }}
|
||||
minor: ${{ steps.version.outputs.minor }}
|
||||
patch: ${{ steps.version.outputs.patch }}
|
||||
latest: ${{ steps.latest.outputs.latest }}
|
||||
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Split version
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.version_number }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
halt_on_missing: False
|
||||
|
||||
setup_image_builder:
|
||||
name: Set up docker image builder
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get_version_meta]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
build_and_push:
|
||||
name: Build images and push to GHCR
|
||||
runs-on: ubuntu-latest
|
||||
needs: [setup_image_builder, get_version_meta]
|
||||
steps:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:latest
|
||||
148
.github/workflows/release.yml
vendored
148
.github/workflows/release.yml
vendored
@@ -7,6 +7,7 @@
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
# - release it to Docker
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
@@ -14,15 +15,12 @@
|
||||
# **when?**
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPI
|
||||
name: "Release to GitHub, PyPI & Docker"
|
||||
run-name: "Release ${{ inputs.version_number }} to GitHub, PyPI & Docker"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
@@ -31,26 +29,6 @@ on:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
@@ -61,12 +39,13 @@ on:
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
only_docker:
|
||||
description: "Only release Docker image, skip GitHub & PyPI"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
@@ -75,26 +54,6 @@ on:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
@@ -114,32 +73,47 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
log-inputs:
|
||||
job-setup:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
starting_sha: ${{ steps.set_sha.outputs.starting_sha }}
|
||||
steps:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||
echo Inputs
|
||||
echo The branch to release from: ${{ inputs.target_branch }}
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Build script path: ${{ inputs.build_script_path }}
|
||||
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||
echo Package test command: ${{ inputs.package_test_command }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
echo Only Docker: ${{ inputs.only_docker }}
|
||||
|
||||
- name: "Checkout target branch"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.target_branch }}
|
||||
|
||||
# release-prep.yml really shouldn't take in the sha but since core + all adapters
|
||||
# depend on it now this workaround lets us not input it manually with risk of error.
|
||||
# The changes always get merged into the head so we can't use a specific commit for
|
||||
# releases anyways.
|
||||
- name: "Capture sha"
|
||||
id: set_sha
|
||||
run: |
|
||||
echo "starting_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
needs: [job-setup]
|
||||
if: ${{ !inputs.only_docker }}
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ inputs.sha }}
|
||||
sha: ${{ needs.job-setup.outputs.starting_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
@@ -147,7 +121,7 @@ jobs:
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
@@ -161,8 +135,8 @@ jobs:
|
||||
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [bump-version-generate-changelog]
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
needs: [job-setup, bump-version-generate-changelog]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
|
||||
@@ -170,9 +144,9 @@ jobs:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: ${{ inputs.build_script_path }}
|
||||
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||
package_test_command: ${{ inputs.package_test_command }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
@@ -182,7 +156,7 @@ jobs:
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
@@ -209,6 +183,51 @@ jobs:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
determine-docker-package:
|
||||
# dbt-postgres exists within dbt-core for versions 1.7 and earlier but is a separate package for 1.8 and later.
|
||||
# determine if we need to release dbt-core or both dbt-core and dbt-postgres
|
||||
name: Determine Docker Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [pypi-release]
|
||||
outputs:
|
||||
matrix: ${{ steps.determine-docker-package.outputs.matrix }}
|
||||
steps:
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ inputs.version_number }}
|
||||
|
||||
- name: "Determine Packages to Release"
|
||||
id: determine-docker-package
|
||||
run: |
|
||||
if [ ${{ steps.semver.outputs.minor }} -ge 8 ]; then
|
||||
json_output={\"package\":[\"dbt-core\"]}
|
||||
else
|
||||
json_output={\"package\":[\"dbt-core\",\"dbt-postgres\"]}
|
||||
fi
|
||||
echo "matrix=$json_output" >> $GITHUB_OUTPUT
|
||||
|
||||
docker-release:
|
||||
name: "Docker Release for ${{ matrix.package }}"
|
||||
needs: [determine-docker-package]
|
||||
# We cannot release to docker on a test run because it uses the tag in GitHub as
|
||||
# what we need to release but draft releases don't actually tag the commit so it
|
||||
# finds nothing to release
|
||||
if: ${{ !failure() && !cancelled() && (!inputs.test_run || inputs.only_docker) }}
|
||||
strategy:
|
||||
matrix: ${{fromJson(needs.determine-docker-package.outputs.matrix)}}
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main
|
||||
with:
|
||||
package: ${{ matrix.package }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
@@ -219,6 +238,7 @@ jobs:
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
docker-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
|
||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# **what?**
|
||||
# Cleanup branches left over from automation and testing. Also cleanup
|
||||
# draft releases from release testing.
|
||||
|
||||
# **why?**
|
||||
# The automations are leaving behind branches and releases that clutter
|
||||
# the repository. Sometimes we need them to debug processes so we don't
|
||||
# want them immediately deleted. Running on Saturday to avoid running
|
||||
# at the same time as an actual release to prevent breaking a release
|
||||
# mid-release.
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 12:00 Saturday.
|
||||
# Manual trigger can also run on demand
|
||||
|
||||
name: Repository Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cleanup-repo:
|
||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||
secrets: inherit
|
||||
60
.github/workflows/schema-check.yml
vendored
60
.github/workflows/schema-check.yml
vendored
@@ -13,20 +13,18 @@
|
||||
name: Artifact Schema Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||
|
||||
workflow_dispatch:
|
||||
pull_request: #TODO: remove before merging
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
env:
|
||||
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}/schema_changes.txt
|
||||
DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt
|
||||
SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com
|
||||
|
||||
@@ -37,24 +35,41 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: 3.9
|
||||
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
|
||||
- name: Check for changes in core/dbt/artifacts
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
uses: dorny/paths-filter@v3
|
||||
id: check_artifact_changes
|
||||
with:
|
||||
filters: |
|
||||
artifacts_changed:
|
||||
- 'core/dbt/artifacts/**'
|
||||
list-files: shell
|
||||
working-directory: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
|
||||
- name: Succeed if no artifacts have changed
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'false'
|
||||
run: |
|
||||
echo "No artifact changes found in core/dbt/artifacts. CI check passed."
|
||||
|
||||
- name: Checkout schemas.getdbt.com repo
|
||||
uses: actions/checkout@v3
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: dbt-labs/schemas.getdbt.com
|
||||
ref: 'main'
|
||||
ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }}
|
||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
|
||||
- name: Generate current schema
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||
run: |
|
||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
||||
python3 -m venv env
|
||||
@@ -65,26 +80,17 @@ jobs:
|
||||
|
||||
# Copy generated schema files into the schemas.getdbt.com repo
|
||||
# Do a git diff to find any changes
|
||||
# Ignore any date or version changes though
|
||||
# Ignore any lines with date-like (yyyy-mm-dd) or version-like (x.y.z) changes
|
||||
- name: Compare schemas
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||
run: |
|
||||
cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
cd ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary)
|
||||
if [[ $(echo diff_results) ]]; then
|
||||
echo $diff_results
|
||||
echo "Schema changes detected!"
|
||||
git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||
exit 1
|
||||
else
|
||||
echo "No schema changes detected"
|
||||
fi
|
||||
git diff -I='*[0-9]{4}-[0-9]{2}-[0-9]{2}' -I='*[0-9]+\.[0-9]+\.[0-9]+' --exit-code > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||
|
||||
- name: Upload schema diff
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
||||
with:
|
||||
name: 'schema_schanges.txt'
|
||||
name: 'schema_changes.txt'
|
||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
||||
|
||||
@@ -21,7 +21,7 @@ permissions: read-all
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
@@ -69,14 +69,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.9"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
@@ -94,7 +94,11 @@ jobs:
|
||||
# integration tests generate a ton of logs in different files. the next step will find them all.
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
|
||||
9
.github/workflows/test-repeater.yml
vendored
9
.github/workflows/test-repeater.yml
vendored
@@ -27,7 +27,6 @@ on:
|
||||
description: 'Version of Python to Test Against'
|
||||
type: choice
|
||||
options:
|
||||
- '3.8'
|
||||
- '3.9'
|
||||
- '3.10'
|
||||
- '3.11'
|
||||
@@ -36,7 +35,7 @@ on:
|
||||
type: choice
|
||||
options:
|
||||
- 'ubuntu-latest'
|
||||
- 'macos-latest'
|
||||
- 'macos-12'
|
||||
- 'windows-latest'
|
||||
num_runs_per_batch:
|
||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||
@@ -83,12 +82,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
|
||||
- name: "Setup Python"
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "${{ inputs.python_version }}"
|
||||
|
||||
@@ -101,7 +100,7 @@ jobs:
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: "Set up postgres (macos)"
|
||||
if: inputs.os == 'macos-latest'
|
||||
if: inputs.os == 'macos-12'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: "Set up postgres (windows)"
|
||||
|
||||
1
.github/workflows/test/.actrc
vendored
1
.github/workflows/test/.actrc
vendored
@@ -1 +0,0 @@
|
||||
-P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest
|
||||
1
.github/workflows/test/.gitignore
vendored
1
.github/workflows/test/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
.secrets
|
||||
1
.github/workflows/test/.secrets.EXAMPLE
vendored
1
.github/workflows/test/.secrets.EXAMPLE
vendored
@@ -1 +0,0 @@
|
||||
GITHUB_TOKEN=GH_PERSONAL_ACCESS_TOKEN_GOES_HERE
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"inputs": {
|
||||
"version_number": "1.0.1",
|
||||
"package": "dbt-postgres"
|
||||
}
|
||||
}
|
||||
28
.github/workflows/version-bump.yml
vendored
28
.github/workflows/version-bump.yml
vendored
@@ -1,28 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow will take the new version number to bump to. With that
|
||||
# it will run versionbump to update the version number everywhere in the
|
||||
# code base and then run changie to create the corresponding changelog.
|
||||
# A PR will be created with the changes that can be reviewed before committing.
|
||||
|
||||
# **why?**
|
||||
# This is to aid in releasing dbt and making sure we have updated
|
||||
# the version in all places and generated the changelog.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually
|
||||
|
||||
name: Version Bump
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_number:
|
||||
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
version_bump_and_changie:
|
||||
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
secrets: inherit # ok since what we are calling is internally maintained
|
||||
@@ -1,9 +1,9 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
# Force all unspecified python hooks to run python 3.9
|
||||
default_language_version:
|
||||
python: python3
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.4.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -26,12 +26,13 @@ Legacy tests are found in the 'test' directory:
|
||||
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
|
||||
core/dbt/include/index.html
|
||||
core/dbt/task/docs/index.html
|
||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||
|
||||
## Adapters
|
||||
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. For testing and development purposes, the dbt-postgres plugin lives alongside the dbt-core codebase, in the [`plugins`](plugins) subdirectory. Like other adapter plugins, it is a self-contained codebase and package that builds on top of dbt-core.
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc.
|
||||
Note: dbt-postgres used to exist in dbt-core but is now in [its own repo](https://github.com/dbt-labs/dbt-postgres)
|
||||
|
||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||
|
||||
|
||||
372
CHANGELOG.md
372
CHANGELOG.md
@@ -1,15 +1,385 @@
|
||||
# dbt Core Changelog
|
||||
|
||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||
- This file provides a full account of all changes to `dbt-core`
|
||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.8.8 - October 23, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix unit tests for incremental model with alias ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Remove support and testing for Python 3.8, which is now EOL. ([#10861](https://github.com/dbt-labs/dbt-core/issues/10861))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Pin dbt-common and dbt-adapters with upper bound. ([#10895](https://github.com/dbt-labs/dbt-core/issues/10895))
|
||||
|
||||
### Contributors
|
||||
- [@katsugeneration](https://github.com/katsugeneration) ([#10754](https://github.com/dbt-labs/dbt-core/issues/10754))
|
||||
|
||||
|
||||
## dbt-core 1.8.7 - September 24, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- Add support for behavior flags ([#10618](https://github.com/dbt-labs/dbt-core/issues/10618))
|
||||
|
||||
## dbt-core 1.8.6 - August 29, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Late render pre- and post-hooks configs in properties / schema YAML files ([#10603](https://github.com/dbt-labs/dbt-core/issues/10603))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Improve speed of tree traversal when finding children, increasing build speed for some selectors ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
||||
|
||||
### Contributors
|
||||
- [@ttusing](https://github.com/ttusing) ([#10434](https://github.com/dbt-labs/dbt-core/issues/10434))
|
||||
|
||||
## dbt-core 1.8.5 - August 07, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- respect --quiet and --warn-error-options for flag deprecations ([#10105](https://github.com/dbt-labs/dbt-core/issues/10105))
|
||||
|
||||
## dbt-core 1.8.4 - July 18, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags ([#10160](https://github.com/dbt-labs/dbt-core/issues/10160))
|
||||
- Limit data_tests deprecation to root_project ([#9835](https://github.com/dbt-labs/dbt-core/issues/9835))
|
||||
- CLI flags should take precedence over env var flags ([#10304](https://github.com/dbt-labs/dbt-core/issues/10304))
|
||||
- Fix error constructing warn_error_options ([#10452](https://github.com/dbt-labs/dbt-core/issues/10452))
|
||||
|
||||
## dbt-core 1.8.3 - June 20, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- add --empty value to jinja context as flags.EMPTY ([#10317](https://github.com/dbt-labs/dbt-core/issues/10317))
|
||||
|
||||
### Fixes
|
||||
|
||||
- DOn't warn on `unit_test` config paths that are properly used ([#10311](https://github.com/dbt-labs/dbt-core/issues/10311))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix npm security vulnerabilities as of June 2024 ([dbt-docs/#513](https://github.com/dbt-labs/dbt-docs/issues/513))
|
||||
|
||||
## dbt-core 1.8.2 - June 05, 2024
|
||||
|
||||
### Features
|
||||
|
||||
- Add --host flag to dbt docs serve, defaulting to '127.0.0.1' ([#10229](https://github.com/dbt-labs/dbt-core/issues/10229))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix: Order-insensitive unit test equality assertion for expected/actual with multiple nulls ([#10167](https://github.com/dbt-labs/dbt-core/issues/10167))
|
||||
|
||||
## dbt-core 1.8.1 - May 22, 2024
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add resource type to saved_query ([#10168](https://github.com/dbt-labs/dbt-core/issues/10168))
|
||||
|
||||
### Docs
|
||||
|
||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
||||
- Add support for Saved Query node ([dbt-docs/#486](https://github.com/dbt-labs/dbt-docs/issues/486))
|
||||
|
||||
### Security
|
||||
|
||||
- Explicitly bind to localhost in docs serve ([#10209](https://github.com/dbt-labs/dbt-core/issues/10209))
|
||||
|
||||
## dbt-core 1.8.0 - May 09, 2024
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Remove adapter.get_compiler interface ([#9148](https://github.com/dbt-labs/dbt-core/issues/9148))
|
||||
- Move AdapterLogger to adapters folder ([#9151](https://github.com/dbt-labs/dbt-core/issues/9151))
|
||||
- Rm --dry-run flag from 'dbt deps --add-package', in favor of just 'dbt deps --lock' ([#9100](https://github.com/dbt-labs/dbt-core/issues/9100))
|
||||
- move event manager setup back to core, remove ref to global EVENT_MANAGER and clean up event manager functions ([#9150](https://github.com/dbt-labs/dbt-core/issues/9150))
|
||||
- Remove dbt-tests-adapter and dbt-postgres packages from dbt-core ([#9455](https://github.com/dbt-labs/dbt-core/issues/9455))
|
||||
- Update the default behaviour of require_explicit_package_overrides_for_builtin_materializations to True. ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
||||
|
||||
### Features
|
||||
|
||||
- Initial implementation of unit testing ([#8287](https://github.com/dbt-labs/dbt-core/issues/8287))
|
||||
- Unit test manifest artifacts and selection ([#8295](https://github.com/dbt-labs/dbt-core/issues/8295))
|
||||
- Support config with tags & meta for unit tests ([#8294](https://github.com/dbt-labs/dbt-core/issues/8294))
|
||||
- Allow adapters to include package logs in dbt standard logging ([#7859](https://github.com/dbt-labs/dbt-core/issues/7859))
|
||||
- Enable inline csv fixtures in unit tests ([#8626](https://github.com/dbt-labs/dbt-core/issues/8626))
|
||||
- Add drop_schema_named macro ([#8025](https://github.com/dbt-labs/dbt-core/issues/8025))
|
||||
- migrate utils to common and adapters folders ([#8924](https://github.com/dbt-labs/dbt-core/issues/8924))
|
||||
- Move Agate helper client into common ([#8926](https://github.com/dbt-labs/dbt-core/issues/8926))
|
||||
- remove usage of dbt.config.PartialProject from dbt/adapters ([#8928](https://github.com/dbt-labs/dbt-core/issues/8928))
|
||||
- Add exports to SavedQuery spec ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
||||
- Support unit testing incremental models ([#8422](https://github.com/dbt-labs/dbt-core/issues/8422))
|
||||
- Add support of csv file fixtures to unit testing ([#8290](https://github.com/dbt-labs/dbt-core/issues/8290))
|
||||
- Remove legacy logger ([#8027](https://github.com/dbt-labs/dbt-core/issues/8027))
|
||||
- Unit tests support --defer and state:modified ([#8517](https://github.com/dbt-labs/dbt-core/issues/8517))
|
||||
- Support setting export configs hierarchically via saved query and project configs ([#8956](https://github.com/dbt-labs/dbt-core/issues/8956))
|
||||
- Support source inputs in unit tests ([#8507](https://github.com/dbt-labs/dbt-core/issues/8507))
|
||||
- Use daff to render diff displayed in stdout when unit test fails ([#8558](https://github.com/dbt-labs/dbt-core/issues/8558))
|
||||
- Global config for --target and --profile CLI flags and DBT_TARGET and DBT_PROFILE environment variables. ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798))
|
||||
- Move unit testing to test command ([#8979](https://github.com/dbt-labs/dbt-core/issues/8979))
|
||||
- Support --empty flag for schema-only dry runs ([#8971](https://github.com/dbt-labs/dbt-core/issues/8971))
|
||||
- Support unit tests in non-root packages ([#8285](https://github.com/dbt-labs/dbt-core/issues/8285))
|
||||
- Convert the `tests` config to `data_tests` in both dbt_project.yml and schema files. in schema files. ([#8699](https://github.com/dbt-labs/dbt-core/issues/8699))
|
||||
- Make fixture files full-fledged parts of the manifest and enable partial parsing ([#9067](https://github.com/dbt-labs/dbt-core/issues/9067))
|
||||
- Adds support for parsing conversion metric related properties for the semantic layer. ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
- Package selector syntax for the current package ([#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
||||
- In build command run unit tests before models ([#9128](https://github.com/dbt-labs/dbt-core/issues/9128))
|
||||
- Move flags from UserConfig in profiles.yml to flags in dbt_project.yml ([#9183](https://github.com/dbt-labs/dbt-core/issues/9183))
|
||||
- Added hook support for `dbt source freshness` ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
||||
- Align with order of unit test output when `actual` differs from `expected` ([#9370](https://github.com/dbt-labs/dbt-core/issues/9370))
|
||||
- Added support for external nodes in unit test nodes ([#8944](https://github.com/dbt-labs/dbt-core/issues/8944))
|
||||
- Enable unit testing versioned models ([#9344](https://github.com/dbt-labs/dbt-core/issues/9344))
|
||||
- Enable list command for unit tests ([#8508](https://github.com/dbt-labs/dbt-core/issues/8508))
|
||||
- Integration Test Optimizations ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
||||
- Accelerate integration tests with caching. ([#9498](https://github.com/dbt-labs/dbt-core/issues/9498))
|
||||
- Cache environment variables ([#9489](https://github.com/dbt-labs/dbt-core/issues/9489))
|
||||
- Support meta at the config level for Metric nodes ([#9441](https://github.com/dbt-labs/dbt-core/issues/9441))
|
||||
- Add cache to SavedQuery config ([#9540](https://github.com/dbt-labs/dbt-core/issues/9540))
|
||||
- Support scrubbing secret vars ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
||||
- Allow excluding resource types for build, list, and clone commands, and provide env vars ([#9237](https://github.com/dbt-labs/dbt-core/issues/9237))
|
||||
- SourceDefinition.meta represents source-level and table-level meta properties, instead of only table-level ([#9766](https://github.com/dbt-labs/dbt-core/issues/9766))
|
||||
- Allow metrics in semantic layer filters. ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804))
|
||||
- Add wildcard support to the group selector method ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
||||
- source freshness precomputes metadata-based freshness in batch, if possible ([#8705](https://github.com/dbt-labs/dbt-core/issues/8705))
|
||||
- Better error message when trying to select a disabled model ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
||||
- Support SQL in unit testing fixtures ([#9405](https://github.com/dbt-labs/dbt-core/issues/9405))
|
||||
- Add require_explicit_package_overrides_for_builtin_materializations to dbt_project.yml flags, which can be used to opt-out of overriding built-in materializations from packages ([#10007](https://github.com/dbt-labs/dbt-core/issues/10007))
|
||||
- add --empty flag to dbt build command ([#10026](https://github.com/dbt-labs/dbt-core/issues/10026))
|
||||
- Ability to `silence` warnings via `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
||||
- Allow aliases `error` for `include` and `warn` for `exclude` in `warn_error_options` ([#9644](https://github.com/dbt-labs/dbt-core/issues/9644))
|
||||
- Add unit_test: selection method ([#10053](https://github.com/dbt-labs/dbt-core/issues/10053))
|
||||
|
||||
### Fixes
|
||||
|
||||
- For packages installed with tarball method, fetch metadata to resolve nested dependencies ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
||||
- Fix partial parsing not working for semantic model change ([#8859](https://github.com/dbt-labs/dbt-core/issues/8859))
|
||||
- Handle unknown `type_code` for model contracts ([#8877](https://github.com/dbt-labs/dbt-core/issues/8877), [#8353](https://github.com/dbt-labs/dbt-core/issues/8353))
|
||||
- Rework get_catalog implementation to retain previous adapter interface semantics ([#8846](https://github.com/dbt-labs/dbt-core/issues/8846))
|
||||
- Add back contract enforcement for temporary tables on postgres ([#8857](https://github.com/dbt-labs/dbt-core/issues/8857))
|
||||
- Add version to fqn when version==0 ([#8836](https://github.com/dbt-labs/dbt-core/issues/8836))
|
||||
- Fix cased comparison in catalog-retrieval function. ([#8939](https://github.com/dbt-labs/dbt-core/issues/8939))
|
||||
- Catalog queries now assign the correct type to materialized views ([#8864](https://github.com/dbt-labs/dbt-core/issues/8864))
|
||||
- Fix compilation exception running empty seed file and support new Integer agate data_type ([#8895](https://github.com/dbt-labs/dbt-core/issues/8895))
|
||||
- Make relation filtering None-tolerant for maximal flexibility across adapters. ([#8974](https://github.com/dbt-labs/dbt-core/issues/8974))
|
||||
- Update run_results.json from previous versions of dbt to support deferral and rerun from failure ([#9010](https://github.com/dbt-labs/dbt-core/issues/9010))
|
||||
- Use MANIFEST.in to recursively include all jinja templates; fixes issue where some templates were not included in the distribution ([#9016](https://github.com/dbt-labs/dbt-core/issues/9016))
|
||||
- Fix git repository with subdirectory for Deps ([#9000](https://github.com/dbt-labs/dbt-core/issues/9000))
|
||||
- Use seed file from disk for unit testing if rows not specified in YAML config ([#8652](https://github.com/dbt-labs/dbt-core/issues/8652))
|
||||
- Fix formatting of tarball information in packages-lock.yml ([#9062](https://github.com/dbt-labs/dbt-core/issues/9062))
|
||||
- deps: Lock git packages to commit SHA during resolution ([#9050](https://github.com/dbt-labs/dbt-core/issues/9050))
|
||||
- deps: Use PackageRenderer to read package-lock.json ([#9127](https://github.com/dbt-labs/dbt-core/issues/9127))
|
||||
- Ensure we produce valid jsonschema schemas for manifest, catalog, run-results, and sources ([#8991](https://github.com/dbt-labs/dbt-core/issues/8991))
|
||||
- Get sources working again in dbt docs generate ([#9119](https://github.com/dbt-labs/dbt-core/issues/9119))
|
||||
- Fix parsing f-strings in python models ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
||||
- Preserve the value of vars and the --full-refresh flags when using retry. ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
- fix lock-file bad indentation ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319))
|
||||
- fix configuration of turning test warnings into failures with WARN_ERROR_OPTIONS ([#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
||||
- Support reasonably long unit test names ([#9015](https://github.com/dbt-labs/dbt-core/issues/9015))
|
||||
- Fix back-compat parsing for model-level 'tests', source table-level 'tests', and 'tests' defined on model versions ([#9411](https://github.com/dbt-labs/dbt-core/issues/9411))
|
||||
- Fix retry command run from CLI ([#9444](https://github.com/dbt-labs/dbt-core/issues/9444))
|
||||
- Fix seed and source selection in `dbt docs generate` ([#9161](https://github.com/dbt-labs/dbt-core/issues/9161))
|
||||
- Add TestGenerateCatalogWithExternalNodes, include empty nodes in node selection during docs generate ([#9456](https://github.com/dbt-labs/dbt-core/issues/9456))
|
||||
- Fix node type plurals in FoundStats log message ([#9464](https://github.com/dbt-labs/dbt-core/issues/9464))
|
||||
- Run manifest upgrade preprocessing on any older manifest version, including v11 ([#9487](https://github.com/dbt-labs/dbt-core/issues/9487))
|
||||
- Update 'compiled_code' context member logic to route based on command ('clone' or not). Reimplement 'sql' context member as wrapper of 'compiled_code'. ([#9502](https://github.com/dbt-labs/dbt-core/issues/9502))
|
||||
- Fix bug where Semantic Layer filter strings are parsed into lists. ([#9507](https://github.com/dbt-labs/dbt-core/issues/9507))
|
||||
- Initialize invocation context before test fixtures are built. ([##9489](https://github.com/dbt-labs/dbt-core/issues/#9489))
|
||||
- Fix conflict with newer versions of Snowplow tracker ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
||||
- When patching versioned models, set constraints after config ([#9364](https://github.com/dbt-labs/dbt-core/issues/9364))
|
||||
- only include unmodified semantic mdodels in state:modified selection ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
||||
- Set query headers when manifest is passed in to dbtRunner ([#9546](https://github.com/dbt-labs/dbt-core/issues/9546))
|
||||
- Store node_info in node associated logging events ([#9557](https://github.com/dbt-labs/dbt-core/issues/9557))
|
||||
- Fix Semantic Model Compare node relations ([#9548](https://github.com/dbt-labs/dbt-core/issues/9548))
|
||||
- Tighten exception handling to avoid worker thread hangs. ([#9583](https://github.com/dbt-labs/dbt-core/issues/9583))
|
||||
- Clearer no-op logging in stubbed SavedQueryRunner ([#9533](https://github.com/dbt-labs/dbt-core/issues/9533))
|
||||
- Fix node_info contextvar handling so incorrect node_info doesn't persist ([#8866](https://github.com/dbt-labs/dbt-core/issues/8866))
|
||||
- Add target-path to retry ([#8948](https://github.com/dbt-labs/dbt-core/issues/8948))
|
||||
- Do not add duplicate input_measures ([#9360](https://github.com/dbt-labs/dbt-core/issues/9360))
|
||||
- Throw a ParsingError if a primary key constraint is defined on multiple columns or at both the column and model level. ([#9581](https://github.com/dbt-labs/dbt-core/issues/9581))
|
||||
- Bug fix: don't parse Jinja in filters for input metrics or measures. ([#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- Fix traceback parsing for exceptions raised due to csv fixtures moved into or out of fixture/subfolders. ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
- Fix partial parsing `KeyError` on deleted schema files ([#8860](https://github.com/dbt-labs/dbt-core/issues/8860))
|
||||
- Support saved queries in `dbt list` ([#9532](https://github.com/dbt-labs/dbt-core/issues/9532))
|
||||
- include sources in catalog.json when over 100 relations selected for catalog generation ([#9755](https://github.com/dbt-labs/dbt-core/issues/9755))
|
||||
- Support overriding macros in packages in unit testing ([#9624](https://github.com/dbt-labs/dbt-core/issues/9624))
|
||||
- Handle exceptions for failing on-run-* hooks in source freshness ([#9511](https://github.com/dbt-labs/dbt-core/issues/9511))
|
||||
- Validation of unit test parsing for incremental models ([#9593](https://github.com/dbt-labs/dbt-core/issues/9593))
|
||||
- Fix use of retry command on command using defer ([#9770](https://github.com/dbt-labs/dbt-core/issues/9770))
|
||||
- Make `args` variable to be un-modified by `dbt.invoke(args)` ([#8938](https://github.com/dbt-labs/dbt-core/issues/8938), [#9787](https://github.com/dbt-labs/dbt-core/issues/9787))
|
||||
- Only create the packages-install-path / dbt_packages folder during dbt deps ([#6985](https://github.com/dbt-labs/dbt-core/issues/6985), [#9584](https://github.com/dbt-labs/dbt-core/issues/9584))
|
||||
- Unit test path outputs ([#9608](https://github.com/dbt-labs/dbt-core/issues/9608))
|
||||
- Fix assorted source freshness edgecases so check is run or actionable information is given ([#9078](https://github.com/dbt-labs/dbt-core/issues/9078))
|
||||
- "Fix Docker release process to account for both historical and current versions of `dbt-postgres` ([#9827](https://github.com/dbt-labs/dbt-core/issues/9827))
|
||||
- Exclude password-like fields for considering reparse ([#9795](https://github.com/dbt-labs/dbt-core/issues/9795))
|
||||
- Fixed query comments test ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
||||
- Begin warning people about spaces in model names ([#9397](https://github.com/dbt-labs/dbt-core/issues/9397))
|
||||
- Add NodeRelation to SavedQuery Export ([#9534](https://github.com/dbt-labs/dbt-core/issues/9534))
|
||||
- Disambiguiate FreshnessConfigProblem error message ([#9891](https://github.com/dbt-labs/dbt-core/issues/9891))
|
||||
- Use consistent secret scrubbing with the log function. ([#9987](https://github.com/dbt-labs/dbt-core/issues/9987))
|
||||
- Validate against empty strings in package definitions ([#9985](https://github.com/dbt-labs/dbt-core/issues/9985))
|
||||
- Fix default value for indirect selection in selector cannot overwritten by CLI flag and env var ([#9976](https://github.com/dbt-labs/dbt-core/issues/9976), [#7673](https://github.com/dbt-labs/dbt-core/issues/7673))
|
||||
- Simplify error message if test severity isn't 'warn' or 'error' ([#9715](https://github.com/dbt-labs/dbt-core/issues/9715))
|
||||
- Support overriding source level loaded_at_field with a null table level definition ([#9320](https://github.com/dbt-labs/dbt-core/issues/9320))
|
||||
- Undo conditional agate import to prevent UnresolvedTypeReferenceError during RunResult serialization ([#10098](https://github.com/dbt-labs/dbt-core/issues/10098))
|
||||
- Restore previous behavior for --favor-state: only favor defer_relation if not selected in current command" ([#10107](https://github.com/dbt-labs/dbt-core/issues/10107))
|
||||
- Unit test fixture (csv) returns null for empty value ([#9881](https://github.com/dbt-labs/dbt-core/issues/9881))
|
||||
|
||||
### Docs
|
||||
|
||||
- Add analytics for dbt.com ([dbt-docs/#430](https://github.com/dbt-labs/dbt-docs/issues/430))
|
||||
- fix get_custom_database docstring ([dbt-docs/#9003](https://github.com/dbt-labs/dbt-docs/issues/9003))
|
||||
- Enable display of unit tests ([dbt-docs/#501](https://github.com/dbt-labs/dbt-docs/issues/501))
|
||||
- Unit tests not rendering ([dbt-docs/#506](https://github.com/dbt-labs/dbt-docs/issues/506))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Added more type annotations. ([#8537](https://github.com/dbt-labs/dbt-core/issues/8537))
|
||||
- Add unit testing functional tests ([#8512](https://github.com/dbt-labs/dbt-core/issues/8512))
|
||||
- Remove usage of dbt.include.global_project in dbt/adapters ([#8925](https://github.com/dbt-labs/dbt-core/issues/8925))
|
||||
- Add a no-op runner for Saved Qeury ([#8893](https://github.com/dbt-labs/dbt-core/issues/8893))
|
||||
- remove dbt.flags.MP_CONTEXT usage in dbt/adapters ([#8967](https://github.com/dbt-labs/dbt-core/issues/8967))
|
||||
- Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters ([#8969](https://github.com/dbt-labs/dbt-core/issues/8969))
|
||||
- Move CatalogRelationTypes test case to the shared test suite to be reused by adapter maintainers ([#8952](https://github.com/dbt-labs/dbt-core/issues/8952))
|
||||
- Treat SystemExit as an interrupt if raised during node execution. ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
- Removing unused 'documentable' ([#8871](https://github.com/dbt-labs/dbt-core/issues/8871))
|
||||
- Remove use of dbt/core exceptions in dbt/adapter ([#8920](https://github.com/dbt-labs/dbt-core/issues/8920))
|
||||
- Cache dbt plugin modules to improve integration test performance ([#9029](https://github.com/dbt-labs/dbt-core/issues/9029))
|
||||
- Consolidate deferral methods & flags ([#7965](https://github.com/dbt-labs/dbt-core/issues/7965), [#8715](https://github.com/dbt-labs/dbt-core/issues/8715))
|
||||
- Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock variance ([#9057](https://github.com/dbt-labs/dbt-core/issues/9057))
|
||||
- Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific event types and protos ([#8927](https://github.com/dbt-labs/dbt-core/issues/8927), [#8918](https://github.com/dbt-labs/dbt-core/issues/8918))
|
||||
- Clean up unused adaptor folders ([#9123](https://github.com/dbt-labs/dbt-core/issues/9123))
|
||||
- Move column constraints into common/contracts, removing another dependency of adapters on core. ([#9024](https://github.com/dbt-labs/dbt-core/issues/9024))
|
||||
- Move dbt.semver to dbt.common.semver and update references. ([#9039](https://github.com/dbt-labs/dbt-core/issues/9039))
|
||||
- Move lowercase utils method to common ([#9180](https://github.com/dbt-labs/dbt-core/issues/9180))
|
||||
- Remove usages of dbt.clients.jinja in dbt/adapters ([#9205](https://github.com/dbt-labs/dbt-core/issues/9205))
|
||||
- Remove usage of dbt.contracts in dbt/adapters ([#9208](https://github.com/dbt-labs/dbt-core/issues/9208))
|
||||
- Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters ([#9214](https://github.com/dbt-labs/dbt-core/issues/9214))
|
||||
- Introduce RelationConfig Protocol, consolidate Relation.create_from ([#9215](https://github.com/dbt-labs/dbt-core/issues/9215))
|
||||
- remove manifest from adapter.set_relations_cache signature ([#9217](https://github.com/dbt-labs/dbt-core/issues/9217))
|
||||
- remove manifest from adapter catalog method signatures ([#9218](https://github.com/dbt-labs/dbt-core/issues/9218))
|
||||
- Move BaseConfig, Metadata and various other contract classes from model_config to common/contracts/config ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
||||
- Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro ([#9244](https://github.com/dbt-labs/dbt-core/issues/9244))
|
||||
- pass query header context to MacroQueryStringSetter ([#9249](https://github.com/dbt-labs/dbt-core/issues/9249), [#9250](https://github.com/dbt-labs/dbt-core/issues/9250))
|
||||
- add macro_context_generator on adapter ([#9247](https://github.com/dbt-labs/dbt-core/issues/9247))
|
||||
- pass mp_context to adapter factory as argument instead of import ([#9025](https://github.com/dbt-labs/dbt-core/issues/9025))
|
||||
- have dbt-postgres use RelationConfig protocol for materialized views' ([#9292](https://github.com/dbt-labs/dbt-core/issues/9292))
|
||||
- move system.py to common as dbt-bigquery relies on it to call gcloud ([#9293](https://github.com/dbt-labs/dbt-core/issues/9293))
|
||||
- Reorganizing event definitions to define core events in dbt/events rather than dbt/common ([#9152](https://github.com/dbt-labs/dbt-core/issues/9152))
|
||||
- move exceptions used only in dbt/common to dbt/common/exceptions ([#9332](https://github.com/dbt-labs/dbt-core/issues/9332))
|
||||
- Remove usage of dbt.adapters.factory in dbt/common ([#9334](https://github.com/dbt-labs/dbt-core/issues/9334))
|
||||
- Accept valid_error_names in WarnErrorOptions constructor, remove global usage of event modules ([#9337](https://github.com/dbt-labs/dbt-core/issues/9337))
|
||||
- Move result objects to dbt.artifacts ([#9193](https://github.com/dbt-labs/dbt-core/issues/9193))
|
||||
- dbt Labs OSS standardization of docs and templates. ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
||||
- Add dbt-common as a dependency and remove dbt/common ([#9357](https://github.com/dbt-labs/dbt-core/issues/9357))
|
||||
- move cache exceptions to dbt/adapters ([#9362](https://github.com/dbt-labs/dbt-core/issues/9362))
|
||||
- Clean up macro contexts. ([#9422](https://github.com/dbt-labs/dbt-core/issues/9422))
|
||||
- Add the @requires.manifest decorator to the retry command. ([#9426](https://github.com/dbt-labs/dbt-core/issues/9426))
|
||||
- Move WritableManifest + Documentation to dbt/artifacts ([#9378](https://github.com/dbt-labs/dbt-core/issues/9378), [#9379](https://github.com/dbt-labs/dbt-core/issues/9379))
|
||||
- Define Macro and Group resources in dbt/artifacts ([#9381](https://github.com/dbt-labs/dbt-core/issues/9381), [#9382](https://github.com/dbt-labs/dbt-core/issues/9382))
|
||||
- Move `SavedQuery` data definition to `dbt/artifacts` ([#9386](https://github.com/dbt-labs/dbt-core/issues/9386))
|
||||
- Migrate data parts of `Metric` node to dbt/artifacts ([#9383](https://github.com/dbt-labs/dbt-core/issues/9383))
|
||||
- Move data portion of `SemanticModel` to dbt/artifacts ([#9387](https://github.com/dbt-labs/dbt-core/issues/9387))
|
||||
- Move data parts of `Exposure` class to dbt/artifacts ([#9380](https://github.com/dbt-labs/dbt-core/issues/9380))
|
||||
- Split up deferral across parsing (adding 'defer_relation' from state manifest) and runtime ref resolution" ([#9199](https://github.com/dbt-labs/dbt-core/issues/9199))
|
||||
- Start using `Mergeable` from dbt-common ([#9505](https://github.com/dbt-labs/dbt-core/issues/9505))
|
||||
- Move manifest nodes to artifacts ([#9388](https://github.com/dbt-labs/dbt-core/issues/9388))
|
||||
- Move data parts of `SourceDefinition` class to dbt/artifacts ([#9384](https://github.com/dbt-labs/dbt-core/issues/9384))
|
||||
- Remove uses of Replaceable class ([#7802](https://github.com/dbt-labs/dbt-core/issues/7802))
|
||||
- Make dbt-core compatible with Python 3.12 ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
||||
- Restrict protobuf to major version 4. ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
||||
- Remove references to dbt.tracking and dbt.flags from dbt/artifacts ([#9390](https://github.com/dbt-labs/dbt-core/issues/9390))
|
||||
- Remove unused key `wildcard` from MethodName enum ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- Implement primary key inference for model nodes ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
||||
- Define UnitTestDefinition resource in dbt/artifacts/resources ([#9667](https://github.com/dbt-labs/dbt-core/issues/9667))
|
||||
- Use Manifest instead of WritableManifest in PreviousState and _get_deferred_manifest ([#9567](https://github.com/dbt-labs/dbt-core/issues/9567))
|
||||
- Improve dbt CLI speed ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- Include node_info in various Result events ([#9619](https://github.com/dbt-labs/dbt-core/issues/9619))
|
||||
- Remove non dbt.artifacts dbt.* imports from dbt/artifacts ([#9926](https://github.com/dbt-labs/dbt-core/issues/9926))
|
||||
- Migrate to using `error_tag` provided by `dbt-common` ([#9914](https://github.com/dbt-labs/dbt-core/issues/9914))
|
||||
- Add a test for semantic manifest and move test fixtures needed for it ([#9665](https://github.com/dbt-labs/dbt-core/issues/9665))
|
||||
- Raise deprecation warning if installed package overrides built-in materialization ([#9971](https://github.com/dbt-labs/dbt-core/issues/9971))
|
||||
- Use the SECRET_ENV_PREFIX from dbt_common instead of duplicating it in dbt-core ([#10018](https://github.com/dbt-labs/dbt-core/issues/10018))
|
||||
- Enable use of record mode via environment variable ([#10045](https://github.com/dbt-labs/dbt-core/issues/10045))
|
||||
- Consistent naming + deprecation warnings for "legacy behavior" flags ([#10062](https://github.com/dbt-labs/dbt-core/issues/10062))
|
||||
- Enable use of context in serialization ([#10093](https://github.com/dbt-labs/dbt-core/issues/10093))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump actions/checkout from 3 to 4 ([#8781](https://github.com/dbt-labs/dbt-core/issues/8781))
|
||||
- Begin using DSI 0.4.x ([#8892](https://github.com/dbt-labs/dbt-core/issues/8892))
|
||||
- Update typing-extensions version to >=4.4 ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012))
|
||||
- Bump ddtrace from 2.1.7 to 2.3.0 ([#9132](https://github.com/dbt-labs/dbt-core/issues/9132))
|
||||
- Bump freezegun from 0.3.12 to 1.3.0 ([#9197](https://github.com/dbt-labs/dbt-core/issues/9197))
|
||||
- Bump actions/setup-python from 4 to 5 ([#9267](https://github.com/dbt-labs/dbt-core/issues/9267))
|
||||
- Bump actions/download-artifact from 3 to 4 ([#9374](https://github.com/dbt-labs/dbt-core/issues/9374))
|
||||
- Relax pathspec upper bound version restriction ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
||||
- remove dbt/adapters and add dependency on dbt-adapters ([#9430](https://github.com/dbt-labs/dbt-core/issues/9430))
|
||||
- Bump actions/upload-artifact from 3 to 4 ([#9470](https://github.com/dbt-labs/dbt-core/issues/9470))
|
||||
- Bump actions/cache from 3 to 4 ([#9471](https://github.com/dbt-labs/dbt-core/issues/9471))
|
||||
- Bump peter-evans/create-pull-request from 5 to 6 ([#9552](https://github.com/dbt-labs/dbt-core/issues/9552))
|
||||
- Restrict protobuf to 4.* versions ([#9566](https://github.com/dbt-labs/dbt-core/issues/9566))
|
||||
- Bump codecov/codecov-action from 3 to 4 ([#9659](https://github.com/dbt-labs/dbt-core/issues/9659))
|
||||
- Cap dbt-semantic-interfaces version range to <0.6 ([#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
||||
- Bump python from 3.10.7-slim-nullseye to 3.11.2-slim-bullseye in /docker ([#9687](https://github.com/dbt-labs/dbt-core/issues/9687))
|
||||
- bump dbt-common to accept major version 1 ([#9690](https://github.com/dbt-labs/dbt-core/issues/9690))
|
||||
- Remove duplicate dependency of protobuf in dev-requirements ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
||||
- Bump black from 23.3.0 to >=24.3.0,<25.0 ([#8074](https://github.com/dbt-labs/dbt-core/issues/8074))
|
||||
- Update the agate pin to "agate>=1.7.0,<1.10" ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
||||
|
||||
### Security
|
||||
|
||||
- Update Jinja2 to >= 3.1.3 to address CVE-2024-22195 ([#9638](https://github.com/dbt-labs/dbt-core/issues/9638))
|
||||
- Bump sqlparse to >=0.5.0, <0.6.0 to address GHSA-2m57-hf25-phgg ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
||||
|
||||
### Contributors
|
||||
- [@LeoTheGriff](https://github.com/LeoTheGriff) ([#9003](https://github.com/dbt-labs/dbt-core/issues/9003))
|
||||
- [@SamuelBFavarin](https://github.com/SamuelBFavarin) ([#9747](https://github.com/dbt-labs/dbt-core/issues/9747))
|
||||
- [@WilliamDee](https://github.com/WilliamDee) ([#9203](https://github.com/dbt-labs/dbt-core/issues/9203))
|
||||
- [@adamlopez](https://github.com/adamlopez) ([#8621](https://github.com/dbt-labs/dbt-core/issues/8621))
|
||||
- [@akurdyukov](https://github.com/akurdyukov) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
||||
- [@aliceliu](https://github.com/aliceliu) ([#9652](https://github.com/dbt-labs/dbt-core/issues/9652))
|
||||
- [@asweet](https://github.com/asweet) ([#9641](https://github.com/dbt-labs/dbt-core/issues/9641))
|
||||
- [@b-per](https://github.com/b-per) ([#430](https://github.com/dbt-labs/dbt-core/issues/430))
|
||||
- [@barton996](https://github.com/barton996) ([#7798](https://github.com/dbt-labs/dbt-core/issues/7798), [#6891](https://github.com/dbt-labs/dbt-core/issues/6891))
|
||||
- [@benmosher](https://github.com/benmosher) ([#n/a](https://github.com/dbt-labs/dbt-core/issues/n/a))
|
||||
- [@colin-rorgers-dbt](https://github.com/colin-rorgers-dbt) ([#8919](https://github.com/dbt-labs/dbt-core/issues/8919))
|
||||
- [@courtneyholcomb](https://github.com/courtneyholcomb) ([#9804](https://github.com/dbt-labs/dbt-core/issues/9804), [#9507](https://github.com/dbt-labs/dbt-core/issues/9507), [#9582](https://github.com/dbt-labs/dbt-core/issues/9582))
|
||||
- [@damian3031](https://github.com/damian3031) ([#9860](https://github.com/dbt-labs/dbt-core/issues/9860))
|
||||
- [@dwreeves](https://github.com/dwreeves) ([#4627](https://github.com/dbt-labs/dbt-core/issues/4627))
|
||||
- [@edgarrmondragon](https://github.com/edgarrmondragon) ([#8719](https://github.com/dbt-labs/dbt-core/issues/8719))
|
||||
- [@emmoop](https://github.com/emmoop) ([#9951](https://github.com/dbt-labs/dbt-core/issues/9951))
|
||||
- [@heysweet](https://github.com/heysweet) ([#9811](https://github.com/dbt-labs/dbt-core/issues/9811))
|
||||
- [@jx2lee](https://github.com/jx2lee) ([#9319](https://github.com/dbt-labs/dbt-core/issues/9319), [#7761](https://github.com/dbt-labs/dbt-core/issues/7761))
|
||||
- [@l1xnan](https://github.com/l1xnan) ([#9007](https://github.com/dbt-labs/dbt-core/issues/9007))
|
||||
- [@mederka](https://github.com/mederka) ([#6976](https://github.com/dbt-labs/dbt-core/issues/6976))
|
||||
- [@mjkanji](https://github.com/mjkanji) ([#9934](https://github.com/dbt-labs/dbt-core/issues/9934))
|
||||
- [@nielspardon](https://github.com/nielspardon) ([#7247](https://github.com/dbt-labs/dbt-core/issues/7247))
|
||||
- [@niteshy](https://github.com/niteshy) ([#9830](https://github.com/dbt-labs/dbt-core/issues/9830))
|
||||
- [@ofek1weiss](https://github.com/ofek1weiss) ([#5609](https://github.com/dbt-labs/dbt-core/issues/5609))
|
||||
- [@peterallenwebb,](https://github.com/peterallenwebb,) ([#9112](https://github.com/dbt-labs/dbt-core/issues/9112))
|
||||
- [@rzjfr](https://github.com/rzjfr) ([#9373](https://github.com/dbt-labs/dbt-core/issues/9373))
|
||||
- [@slothkong](https://github.com/slothkong) ([#9570](https://github.com/dbt-labs/dbt-core/issues/9570))
|
||||
- [@tlento](https://github.com/tlento) ([#9012](https://github.com/dbt-labs/dbt-core/issues/9012), [#9671](https://github.com/dbt-labs/dbt-core/issues/9671))
|
||||
- [@tonayya](https://github.com/tonayya) ([#9252](https://github.com/dbt-labs/dbt-core/issues/9252))
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
6. [Debugging](#debugging)
|
||||
7. [Adding or modifying a changelog entry](#adding-or-modifying-a-changelog-entry)
|
||||
8. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
9. [Troubleshooting Tips](#troubleshooting-tips)
|
||||
|
||||
## About this document
|
||||
|
||||
@@ -21,10 +22,10 @@ If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-developm
|
||||
|
||||
### Notes
|
||||
|
||||
- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`).
|
||||
- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead.
|
||||
- **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones.
|
||||
- **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...). If an issue fix applies to a release branch, that fix should be first committed to the development branch and then to the release branch (rarely release-branch fixes may not apply to `main`).
|
||||
- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via pip, homebrew, and dbt Cloud.
|
||||
- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via our [supported installation methods](https://docs.getdbt.com/docs/core/installation-overview#install-dbt-core).
|
||||
|
||||
## Getting the code
|
||||
|
||||
@@ -44,9 +45,7 @@ If you are not a member of the `dbt-labs` GitHub organization, you can contribut
|
||||
|
||||
### dbt Labs contributors
|
||||
|
||||
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch. Branch names should be fixed by `CT-XXX/` where:
|
||||
* CT stands for 'core team'
|
||||
* XXX stands for a JIRA ticket number
|
||||
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
|
||||
## Setting up an environment
|
||||
|
||||
@@ -171,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
python3 -m pytest tests/unit/test_graph.py
|
||||
python3 -m pytest tests/unit/test_base_column.py
|
||||
# run a specific unit test
|
||||
python3 -m pytest tests/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
||||
# run specific Postgres functional tests
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
@@ -221,10 +220,12 @@ You don't need to worry about which `dbt-core` version your change will go into.
|
||||
|
||||
## Submitting a Pull Request
|
||||
|
||||
Code can be merged into the current development branch `main` by opening a pull request. A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
Code can be merged into the current development branch `main` by opening a pull request. If the proposal looks like it's on the right track, then a `dbt-core` maintainer will triage the PR and label it as `ready_for_review`. From this point, two code reviewers will be assigned with the aim of responding to any updates to the PR within about one week. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. Once merged, your contribution will be available for the next release of `dbt-core`.
|
||||
|
||||
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
||||
|
||||
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
|
||||
## Troubleshooting Tips
|
||||
|
||||
Sometimes, the content license agreement auto-check bot doesn't find a user's entry in its roster. If you need to force a rerun, add `@cla-bot check` in a comment on the pull request.
|
||||
|
||||
@@ -33,9 +33,6 @@ RUN apt-get update \
|
||||
python-is-python3 \
|
||||
python-dev-is-python3 \
|
||||
python3-pip \
|
||||
python3.8 \
|
||||
python3.8-dev \
|
||||
python3.8-venv \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-venv \
|
||||
|
||||
19
Makefile
19
Makefile
@@ -30,17 +30,22 @@ CI_FLAGS =\
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt
|
||||
pip install -r editable-requirements.txt
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@\
|
||||
pre-commit install
|
||||
|
||||
.PHONY: proto_types
|
||||
proto_types: ## generates google protobuf python file from types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||
.PHONY: dev-uninstall
|
||||
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||
@\
|
||||
pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
||||
pip uninstall -y dbt-core
|
||||
|
||||
.PHONY: core_proto_types
|
||||
core_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@@ -77,12 +82,12 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
integration: .env ## Runs core integration tests using postgres with py-integration
|
||||
@\
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||
- [Install dbt Core](https://docs.getdbt.com/docs/get-started/installation) or explore the [dbt Cloud CLI](https://docs.getdbt.com/docs/cloud/cloud-cli-installation), a command-line interface powered by [dbt Cloud](https://docs.getdbt.com/docs/cloud/about-cloud/dbt-cloud-features) that enhances collaboration.
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Join the dbt Community
|
||||
@@ -31,7 +31,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
||||
|
||||
## Reporting bugs and contributing code
|
||||
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new)
|
||||
- Want to report a bug or request a feature? Let us know and open [an issue](https://github.com/dbt-labs/dbt-core/issues/new/choose)
|
||||
- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
13
codecov.yml
13
codecov.yml
@@ -0,0 +1,13 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
||||
include dbt/py.typed
|
||||
recursive-include dbt/task/docs *.html
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Adapters README
|
||||
|
||||
The Adapters module is responsible for defining database connection methods, caching information from databases, how relations are defined, and the two major connection types we have - base and sql.
|
||||
|
||||
# Directories
|
||||
|
||||
## `base`
|
||||
|
||||
Defines the base implementation Adapters can use to build out full functionality.
|
||||
|
||||
## `sql`
|
||||
|
||||
Defines a sql implementation for adapters that initially inherits the above base implementation and comes with some premade methods and macros that can be overwritten as needed per adapter. (most common type of adapter.)
|
||||
|
||||
# Files
|
||||
|
||||
## `cache.py`
|
||||
|
||||
Cached information from the database.
|
||||
|
||||
## `factory.py`
|
||||
Defines how we generate adapter objects
|
||||
|
||||
## `protocol.py`
|
||||
|
||||
Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods.
|
||||
|
||||
## `reference_keys.py`
|
||||
|
||||
Configures naming scheme for cache elements to be universal.
|
||||
@@ -1,7 +0,0 @@
|
||||
# N.B.
|
||||
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
||||
# The matching statement is in plugins/postgres/dbt/adapters/__init__.py
|
||||
|
||||
from pkgutil import extend_path
|
||||
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
@@ -1,10 +0,0 @@
|
||||
|
||||
## Base adapters
|
||||
|
||||
### impl.py
|
||||
|
||||
The class `SQLAdapter` in [base/imply.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/adapters/base/impl.py) is a (mostly) abstract object that adapter objects inherit from. The base class scaffolds out methods that every adapter project usually should implement for smooth communication between dbt and database.
|
||||
|
||||
Some target databases require more or fewer methods--it all depends on what the warehouse's featureset is.
|
||||
|
||||
Look into the class for function-level comments.
|
||||
@@ -1,19 +0,0 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.base.meta import available # noqa: F401
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||
from dbt.adapters.base.relation import ( # noqa: F401
|
||||
BaseRelation,
|
||||
RelationType,
|
||||
SchemaSearchMap,
|
||||
)
|
||||
from dbt.adapters.base.column import Column # noqa: F401
|
||||
from dbt.adapters.base.impl import ( # noqa: F401
|
||||
AdapterConfig,
|
||||
BaseAdapter,
|
||||
PythonJobHelper,
|
||||
ConstraintSupport,
|
||||
)
|
||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401
|
||||
@@ -1,161 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
"STRING": "TEXT",
|
||||
"TIMESTAMP": "TIMESTAMP",
|
||||
"FLOAT": "FLOAT",
|
||||
"INTEGER": "INT",
|
||||
"BOOLEAN": "BOOLEAN",
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
char_size: Optional[int] = None
|
||||
numeric_precision: Optional[Any] = None
|
||||
numeric_scale: Optional[Any] = None
|
||||
|
||||
@classmethod
|
||||
def translate_type(cls, dtype: str) -> str:
|
||||
return cls.TYPE_LABELS.get(dtype.upper(), dtype)
|
||||
|
||||
@classmethod
|
||||
def create(cls, name, label_or_dtype: str) -> "Column":
|
||||
column_type = cls.translate_type(label_or_dtype)
|
||||
return cls(name, column_type)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.column
|
||||
|
||||
@property
|
||||
def quoted(self) -> str:
|
||||
return '"{}"'.format(self.column)
|
||||
|
||||
@property
|
||||
def data_type(self) -> str:
|
||||
if self.is_string():
|
||||
return self.string_type(self.string_size())
|
||||
elif self.is_numeric():
|
||||
return self.numeric_type(self.dtype, self.numeric_precision, self.numeric_scale)
|
||||
else:
|
||||
return self.dtype
|
||||
|
||||
def is_string(self) -> bool:
|
||||
return self.dtype.lower() in ["text", "character varying", "character", "varchar"]
|
||||
|
||||
def is_number(self):
|
||||
return any([self.is_integer(), self.is_numeric(), self.is_float()])
|
||||
|
||||
def is_float(self):
|
||||
return self.dtype.lower() in [
|
||||
# floats
|
||||
"real",
|
||||
"float4",
|
||||
"float",
|
||||
"double precision",
|
||||
"float8",
|
||||
"double",
|
||||
]
|
||||
|
||||
def is_integer(self) -> bool:
|
||||
return self.dtype.lower() in [
|
||||
# real types
|
||||
"smallint",
|
||||
"integer",
|
||||
"bigint",
|
||||
"smallserial",
|
||||
"serial",
|
||||
"bigserial",
|
||||
# aliases
|
||||
"int2",
|
||||
"int4",
|
||||
"int8",
|
||||
"serial2",
|
||||
"serial4",
|
||||
"serial8",
|
||||
]
|
||||
|
||||
def is_numeric(self) -> bool:
|
||||
return self.dtype.lower() in ["numeric", "decimal"]
|
||||
|
||||
def string_size(self) -> int:
|
||||
if not self.is_string():
|
||||
raise DbtRuntimeError("Called string_size() on non-string field!")
|
||||
|
||||
if self.dtype == "text" or self.char_size is None:
|
||||
# char_size should never be None. Handle it reasonably just in case
|
||||
return 256
|
||||
else:
|
||||
return int(self.char_size)
|
||||
|
||||
def can_expand_to(self, other_column: "Column") -> bool:
|
||||
"""returns True if this column can be expanded to the size of the
|
||||
other column"""
|
||||
if not self.is_string() or not other_column.is_string():
|
||||
return False
|
||||
|
||||
return other_column.string_size() > self.string_size()
|
||||
|
||||
def literal(self, value: Any) -> str:
|
||||
return "{}::{}".format(value, self.data_type)
|
||||
|
||||
@classmethod
|
||||
def string_type(cls, size: int) -> str:
|
||||
return "character varying({})".format(size)
|
||||
|
||||
@classmethod
|
||||
def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str:
|
||||
# This could be decimal(...), numeric(...), number(...)
|
||||
# Just use whatever was fed in here -- don't try to get too clever
|
||||
if precision is None or scale is None:
|
||||
return dtype
|
||||
else:
|
||||
return "{}({},{})".format(dtype, precision, scale)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<Column {} ({})>".format(self.name, self.data_type)
|
||||
|
||||
@classmethod
|
||||
def from_description(cls, name: str, raw_data_type: str) -> "Column":
|
||||
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
|
||||
if match is None:
|
||||
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
|
||||
data_type, size_info = match.groups()
|
||||
char_size = None
|
||||
numeric_precision = None
|
||||
numeric_scale = None
|
||||
if size_info is not None:
|
||||
# strip out the parentheses
|
||||
size_info = size_info[1:-1]
|
||||
parts = size_info.split(",")
|
||||
if len(parts) == 1:
|
||||
try:
|
||||
char_size = int(parts[0])
|
||||
except ValueError:
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
elif len(parts) == 2:
|
||||
try:
|
||||
numeric_precision = int(parts[0])
|
||||
except ValueError:
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
try:
|
||||
numeric_scale = int(parts[1])
|
||||
except ValueError:
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[1]}" to an integer'
|
||||
)
|
||||
|
||||
return cls(name, data_type, char_size, numeric_precision, numeric_scale)
|
||||
@@ -1,414 +0,0 @@
|
||||
import abc
|
||||
import os
|
||||
from time import sleep
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from threading import get_ident
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Tuple,
|
||||
Hashable,
|
||||
Optional,
|
||||
ContextManager,
|
||||
List,
|
||||
Type,
|
||||
Union,
|
||||
Iterable,
|
||||
Callable,
|
||||
)
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.connection import (
|
||||
Connection,
|
||||
Identifier,
|
||||
ConnectionState,
|
||||
AdapterRequiredConfig,
|
||||
LazyHandle,
|
||||
AdapterResponse,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.events import AdapterLogger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpenInCleanup,
|
||||
ConnectionLeftOpen,
|
||||
ConnectionClosedInCleanup,
|
||||
ConnectionClosed,
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
|
||||
|
||||
class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Methods to implement:
|
||||
- exception_handler
|
||||
- cancel_open
|
||||
- open
|
||||
- begin
|
||||
- commit
|
||||
- clear_transaction
|
||||
- execute
|
||||
|
||||
You must also set the 'TYPE' class attribute with a class-unique constant
|
||||
string.
|
||||
"""
|
||||
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
self.query_header: Optional[MacroQueryStringSetter] = None
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
self.query_header = MacroQueryStringSetter(self.profile, manifest)
|
||||
|
||||
@staticmethod
|
||||
def get_thread_identifier() -> Hashable:
|
||||
# note that get_ident() may be re-used, but we should never experience
|
||||
# that within a single process
|
||||
return (os.getpid(), get_ident())
|
||||
|
||||
def get_thread_connection(self) -> Connection:
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
|
||||
def get_if_exists(self) -> Optional[Connection]:
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
return self.thread_connections.get(key)
|
||||
|
||||
def clear_thread_connection(self) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key in self.thread_connections:
|
||||
del self.thread_connections[key]
|
||||
|
||||
def clear_transaction(self) -> None:
|
||||
"""Clear any existing transactions."""
|
||||
conn = self.get_thread_connection()
|
||||
if conn is not None:
|
||||
if conn.transaction_open:
|
||||
self._rollback(conn)
|
||||
self.begin()
|
||||
self.commit()
|
||||
|
||||
def rollback_if_open(self) -> None:
|
||||
conn = self.get_if_exists()
|
||||
if conn is not None and conn.handle and conn.transaction_open:
|
||||
self._rollback(conn)
|
||||
|
||||
@abc.abstractmethod
|
||||
def exception_handler(self, sql: str) -> ContextManager:
|
||||
"""Create a context manager that handles exceptions caused by database
|
||||
interactions.
|
||||
|
||||
:param str sql: The SQL string that the block inside the context
|
||||
manager is executing.
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
|
||||
conn_name: str = "master" if name is None else name
|
||||
|
||||
# Get a connection for this thread
|
||||
conn = self.get_if_exists()
|
||||
|
||||
if conn and conn.name == conn_name and conn.state == "open":
|
||||
# Found a connection and nothing to do, so just return it
|
||||
return conn
|
||||
|
||||
if conn is None:
|
||||
# Create a new connection
|
||||
conn = Connection(
|
||||
type=Identifier(self.TYPE),
|
||||
name=conn_name,
|
||||
state=ConnectionState.INIT,
|
||||
transaction_open=False,
|
||||
handle=None,
|
||||
credentials=self.profile.credentials,
|
||||
)
|
||||
conn.handle = LazyHandle(self.open)
|
||||
# Add the connection to thread_connections for this thread
|
||||
self.set_thread_connection(conn)
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
else: # existing connection either wasn't open or didn't have the right name
|
||||
if conn.state != "open":
|
||||
conn.handle = LazyHandle(self.open)
|
||||
if conn.name != conn_name:
|
||||
orig_conn_name: str = conn.name or ""
|
||||
conn.name = conn_name
|
||||
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
|
||||
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
def retry_connection(
|
||||
cls,
|
||||
connection: Connection,
|
||||
connect: Callable[[], AdapterHandle],
|
||||
logger: AdapterLogger,
|
||||
retryable_exceptions: Iterable[Type[Exception]],
|
||||
retry_limit: int = 1,
|
||||
retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1,
|
||||
_attempts: int = 0,
|
||||
) -> Connection:
|
||||
"""Given a Connection, set its handle by calling connect.
|
||||
|
||||
The calls to connect will be retried up to retry_limit times to deal with transient
|
||||
connection errors. By default, one retry will be attempted if retryable_exceptions is set.
|
||||
|
||||
:param Connection connection: An instance of a Connection that needs a handle to be set,
|
||||
usually when attempting to open it.
|
||||
:param connect: A callable that returns the appropiate connection handle for a
|
||||
given adapter. This callable will be retried retry_limit times if a subclass of any
|
||||
Exception in retryable_exceptions is raised by connect.
|
||||
:type connect: Callable[[], AdapterHandle]
|
||||
:param AdapterLogger logger: A logger to emit messages on retry attempts or errors. When
|
||||
handling expected errors, we call debug, and call warning on unexpected errors or when
|
||||
all retry attempts have been exhausted.
|
||||
:param retryable_exceptions: An iterable of exception classes that if raised by
|
||||
connect should trigger a retry.
|
||||
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||
is exceeded before a successful call, a FailedToConnectError will be raised.
|
||||
Must be non-negative.
|
||||
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||
or float to be passed to time.sleep.
|
||||
:type retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
if retry_limit < 0 or retry_limit > sys.getrecursionlimit():
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
connection.state = ConnectionState.OPEN
|
||||
return connection
|
||||
|
||||
except tuple(retryable_exceptions) as e:
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
f"{retry_limit} attempts remaining. Retrying in {timeout} seconds.\n"
|
||||
f"Error:\n{e}"
|
||||
)
|
||||
|
||||
sleep(timeout)
|
||||
return cls.retry_connection(
|
||||
connection=connection,
|
||||
connect=connect,
|
||||
logger=logger,
|
||||
retry_limit=retry_limit - 1,
|
||||
retry_timeout=retry_timeout,
|
||||
retryable_exceptions=retryable_exceptions,
|
||||
_attempts=_attempts + 1,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def open(cls, connection: Connection) -> Connection:
|
||||
"""Open the given connection on the adapter and return it.
|
||||
|
||||
This may mutate the given connection (in particular, its state and its
|
||||
handle).
|
||||
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
conn = self.get_if_exists()
|
||||
if conn is None:
|
||||
return
|
||||
|
||||
try:
|
||||
# always close the connection. close() calls _rollback() if there
|
||||
# is an open transaction
|
||||
self.close(conn)
|
||||
except Exception:
|
||||
# if rollback or close failed, remove our busted connection
|
||||
self.clear_thread_connection()
|
||||
raise
|
||||
|
||||
def cleanup_all(self) -> None:
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection.state not in {"closed", "init"}:
|
||||
fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name)))
|
||||
else:
|
||||
fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name)))
|
||||
self.close(connection)
|
||||
|
||||
# garbage collect these connections
|
||||
self.thread_connections.clear()
|
||||
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
"""Perform the actual rollback operation."""
|
||||
try:
|
||||
connection.handle.rollback()
|
||||
except Exception:
|
||||
fire_event(
|
||||
RollbackFailed(
|
||||
conn_name=cast_to_str(connection.name),
|
||||
exc_info=traceback.format_exc(),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _close_handle(cls, connection: Connection) -> None:
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, "close"):
|
||||
fire_event(
|
||||
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
|
||||
)
|
||||
connection.handle.close()
|
||||
else:
|
||||
fire_event(
|
||||
ConnectionLeftOpen(
|
||||
conn_name=cast_to_str(connection.name), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
cls._rollback_handle(connection)
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@classmethod
|
||||
def close(cls, connection: Connection) -> Connection:
|
||||
# if the connection is in closed or init, there's nothing to do
|
||||
if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}:
|
||||
return connection
|
||||
|
||||
if connection.transaction_open and connection.handle:
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
cls._rollback_handle(connection)
|
||||
connection.transaction_open = False
|
||||
|
||||
cls._close_handle(connection)
|
||||
connection.state = ConnectionState.CLOSED
|
||||
|
||||
return connection
|
||||
|
||||
def commit_if_has_connection(self) -> None:
|
||||
"""If the named connection exists, commit the current transaction."""
|
||||
connection = self.get_if_exists()
|
||||
if connection:
|
||||
self.commit()
|
||||
|
||||
def _add_query_comment(self, sql: str) -> str:
|
||||
if self.query_header is None:
|
||||
return sql
|
||||
return self.query_header.add(sql)
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
:param str sql: The sql to execute.
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,128 +0,0 @@
|
||||
import abc
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
||||
|
||||
from dbt.deprecations import warn, renamed_method
|
||||
|
||||
|
||||
Decorator = Callable[[Any], Callable]
|
||||
|
||||
|
||||
class _Available:
|
||||
def __call__(self, func: Callable) -> Callable:
|
||||
func._is_available_ = True # type: ignore
|
||||
return func
|
||||
|
||||
def parse(self, parse_replacement: Callable) -> Decorator:
|
||||
"""A decorator factory to indicate that a method on the adapter will be
|
||||
exposed to the database wrapper, and will be stubbed out at parse time
|
||||
with the given function.
|
||||
|
||||
@available.parse()
|
||||
def my_method(self, a, b):
|
||||
if something:
|
||||
return None
|
||||
return big_expensive_db_query()
|
||||
|
||||
@available.parse(lambda *args, **args: {})
|
||||
def my_other_method(self, a, b):
|
||||
x = {}
|
||||
x.update(big_expensive_db_query())
|
||||
return x
|
||||
"""
|
||||
|
||||
def inner(func):
|
||||
func._parse_replacement_ = parse_replacement
|
||||
return self(func)
|
||||
|
||||
return inner
|
||||
|
||||
def deprecated(
|
||||
self, supported_name: str, parse_replacement: Optional[Callable] = None
|
||||
) -> Decorator:
|
||||
"""A decorator that marks a function as available, but also prints a
|
||||
deprecation warning. Use like
|
||||
|
||||
@available.deprecated('my_new_method')
|
||||
def my_old_method(self, arg):
|
||||
args = compatability_shim(arg)
|
||||
return self.my_new_method(*args)
|
||||
|
||||
@available.deprecated('my_new_slow_method', lambda *a, **k: (0, ''))
|
||||
def my_old_slow_method(self, arg):
|
||||
args = compatibility_shim(arg)
|
||||
return self.my_new_slow_method(*args)
|
||||
|
||||
To make `adapter.my_old_method` available but also print out a warning
|
||||
on use directing users to `my_new_method`.
|
||||
|
||||
The optional parse_replacement, if provided, will provide a parse-time
|
||||
replacement for the actual method (see `available.parse`).
|
||||
"""
|
||||
|
||||
def wrapper(func):
|
||||
func_name = func.__name__
|
||||
renamed_method(func_name, supported_name)
|
||||
|
||||
@wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
warn("adapter:{}".format(func_name))
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if parse_replacement:
|
||||
available_function = self.parse(parse_replacement)
|
||||
else:
|
||||
available_function = self
|
||||
return available_function(inner)
|
||||
|
||||
return wrapper
|
||||
|
||||
def parse_none(self, func: Callable) -> Callable:
|
||||
wrapper = self.parse(lambda *a, **k: None)
|
||||
return wrapper(func)
|
||||
|
||||
def parse_list(self, func: Callable) -> Callable:
|
||||
wrapper = self.parse(lambda *a, **k: [])
|
||||
return wrapper(func)
|
||||
|
||||
|
||||
available = _Available()
|
||||
|
||||
|
||||
class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
# but having it doesn't hurt on the python side (and omitting it could
|
||||
# hurt for obscure metaclass reasons, for all I know)
|
||||
cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) # type: ignore
|
||||
|
||||
# this is very much inspired by ABCMeta's own implementation
|
||||
|
||||
# dict mapping the method name to whether the model name should be
|
||||
# injected into the arguments. All methods in here are exposed to the
|
||||
# context.
|
||||
available: Set[str] = set()
|
||||
replacements: Dict[str, Any] = {}
|
||||
|
||||
# collect base class data first
|
||||
for base in bases:
|
||||
available.update(getattr(base, "_available_", set()))
|
||||
replacements.update(getattr(base, "_parse_replacements_", set()))
|
||||
|
||||
# override with local data if it exists
|
||||
for name, value in namespace.items():
|
||||
if getattr(value, "_is_available_", False):
|
||||
available.add(name)
|
||||
parse_replacement = getattr(value, "_parse_replacement_", None)
|
||||
if parse_replacement is not None:
|
||||
replacements[name] = parse_replacement
|
||||
|
||||
cls._available_ = frozenset(available)
|
||||
# should this be a namedtuple so it will be immutable like _available_?
|
||||
cls._parse_replacements_ = replacements
|
||||
return cls
|
||||
@@ -1,42 +0,0 @@
|
||||
from typing import List, Optional, Type
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import PartialProject
|
||||
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
class AdapterPlugin:
|
||||
"""Defines the basic requirements for a dbt adapter plugin.
|
||||
|
||||
:param include_path: The path to this adapter plugin's root
|
||||
:param dependencies: A list of adapter names that this adapter depends
|
||||
upon.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
adapter: Type[AdapterProtocol],
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None,
|
||||
):
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
self.include_path: str = include_path
|
||||
self.project_name: str = project_name_from_path(include_path)
|
||||
self.dependencies: List[str]
|
||||
if dependencies is None:
|
||||
self.dependencies = []
|
||||
else:
|
||||
self.dependencies = dependencies
|
||||
@@ -1,102 +0,0 @@
|
||||
from threading import local
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
|
||||
from dbt.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
self._inner_node = node
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._inner_node, name, "")
|
||||
|
||||
|
||||
class _QueryComment(local):
|
||||
"""A thread-local class storing thread-specific state information for
|
||||
connection management, namely:
|
||||
- the current thread's query comment.
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
return sql
|
||||
|
||||
if self.append:
|
||||
# replace last ';' with '<comment>;'
|
||||
sql = sql.rstrip()
|
||||
if sql[-1] == ";":
|
||||
sql = sql[:-1]
|
||||
return "{}\n/* {} */;".format(sql, self.query_comment.strip())
|
||||
|
||||
return "{}\n/* {} */".format(sql, self.query_comment.strip())
|
||||
|
||||
return "/* {} */\n{}".format(self.query_comment.strip(), sql)
|
||||
|
||||
def set(self, comment: Optional[str], append: bool):
|
||||
if isinstance(comment, str) and "*/" in comment:
|
||||
# tell the user "no" so they don't hurt themselves by writing
|
||||
# garbage
|
||||
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
|
||||
self.query_comment = comment
|
||||
self.append = append
|
||||
|
||||
|
||||
QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
self.manifest = manifest
|
||||
self.config = config
|
||||
|
||||
comment_macro = self._get_comment_macro()
|
||||
self.generator: QueryStringFunc = lambda name, model: ""
|
||||
# if the comment value was None or the empty string, just skip it
|
||||
if comment_macro:
|
||||
assert isinstance(comment_macro, str)
|
||||
macro = "\n".join(
|
||||
(
|
||||
"{%- macro query_comment_macro(connection_name, node) -%}",
|
||||
comment_macro,
|
||||
"{% endmacro %}",
|
||||
)
|
||||
)
|
||||
ctx = self._get_context()
|
||||
self.generator = QueryStringGenerator(macro, ctx)
|
||||
self.comment = _QueryComment(None)
|
||||
self.reset()
|
||||
|
||||
def _get_comment_macro(self) -> Optional[str]:
|
||||
return self.config.query_comment.comment
|
||||
|
||||
def _get_context(self) -> Dict[str, Any]:
|
||||
return generate_query_header_context(self.config, self.manifest)
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
return self.comment.add(sql)
|
||||
|
||||
def reset(self):
|
||||
self.set("master", None)
|
||||
|
||||
def set(self, name: str, node: Optional[ResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
comment_str = self.generator(name, wrapped)
|
||||
|
||||
append = False
|
||||
if isinstance(self.config.query_comment, QueryComment):
|
||||
append = self.config.query_comment.append
|
||||
self.comment.set(comment_str, append)
|
||||
@@ -1,465 +0,0 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
RelationType,
|
||||
ComponentName,
|
||||
HasQuoting,
|
||||
FakeAPIObject,
|
||||
Policy,
|
||||
Path,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
ApproximateMatchError,
|
||||
DbtInternalError,
|
||||
MultipleDatabasesNotAllowedError,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
|
||||
import dbt.exceptions
|
||||
|
||||
|
||||
Self = TypeVar("Self", bound="BaseRelation")
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
class BaseRelation(FakeAPIObject, Hashable):
|
||||
path: Path
|
||||
type: Optional[RelationType] = None
|
||||
quote_character: str = '"'
|
||||
# Python 3.11 requires that these use default_factory instead of simple default
|
||||
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
|
||||
include_policy: Policy = field(default_factory=lambda: Policy())
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
return self.path.get_lowered_part(field) == value.lower()
|
||||
else:
|
||||
return self.path.get_part(field) == value
|
||||
|
||||
@classmethod
|
||||
def _get_field_named(cls, field_name):
|
||||
for f, _ in cls._get_fields():
|
||||
if f.name == field_name:
|
||||
return f
|
||||
# this should be unreachable
|
||||
raise ValueError(f"BaseRelation has no {field_name} field!")
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self.to_dict(omit_none=True) == other.to_dict(omit_none=True)
|
||||
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
return cls._get_field_named("quote_policy").default_factory()
|
||||
|
||||
@classmethod
|
||||
def get_default_include_policy(cls) -> Policy:
|
||||
return cls._get_field_named("include_policy").default_factory()
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Override `.get` to return a metadata object so we don't break
|
||||
dbt_utils.
|
||||
"""
|
||||
if key == "metadata":
|
||||
return {"type": self.__class__.__name__}
|
||||
return super().get(key, default)
|
||||
|
||||
def matches(
|
||||
self,
|
||||
database: Optional[str] = None,
|
||||
schema: Optional[str] = None,
|
||||
identifier: Optional[str] = None,
|
||||
) -> bool:
|
||||
search = filter_null_values(
|
||||
{
|
||||
ComponentName.Database: database,
|
||||
ComponentName.Schema: schema,
|
||||
ComponentName.Identifier: identifier,
|
||||
}
|
||||
)
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.DbtRuntimeError(
|
||||
"Tried to match relation, but no search path was passed!"
|
||||
)
|
||||
|
||||
exact_match = True
|
||||
approximate_match = True
|
||||
|
||||
for k, v in search.items():
|
||||
if not self._is_exactish_match(k, v):
|
||||
exact_match = False
|
||||
if str(self.path.get_lowered_part(k)).strip(self.quote_character) != v.lower().strip(
|
||||
self.quote_character
|
||||
):
|
||||
approximate_match = False # type: ignore[union-attr]
|
||||
|
||||
if approximate_match and not exact_match:
|
||||
target = self.create(database=database, schema=schema, identifier=identifier)
|
||||
raise ApproximateMatchError(target, self)
|
||||
|
||||
return exact_match
|
||||
|
||||
def replace_path(self, **kwargs):
|
||||
return self.replace(path=self.path.replace(**kwargs))
|
||||
|
||||
def quote(
|
||||
self: Self,
|
||||
database: Optional[bool] = None,
|
||||
schema: Optional[bool] = None,
|
||||
identifier: Optional[bool] = None,
|
||||
) -> Self:
|
||||
policy = filter_null_values(
|
||||
{
|
||||
ComponentName.Database: database,
|
||||
ComponentName.Schema: schema,
|
||||
ComponentName.Identifier: identifier,
|
||||
}
|
||||
)
|
||||
|
||||
new_quote_policy = self.quote_policy.replace_dict(policy)
|
||||
return self.replace(quote_policy=new_quote_policy)
|
||||
|
||||
def include(
|
||||
self: Self,
|
||||
database: Optional[bool] = None,
|
||||
schema: Optional[bool] = None,
|
||||
identifier: Optional[bool] = None,
|
||||
) -> Self:
|
||||
policy = filter_null_values(
|
||||
{
|
||||
ComponentName.Database: database,
|
||||
ComponentName.Schema: schema,
|
||||
ComponentName.Identifier: identifier,
|
||||
}
|
||||
)
|
||||
|
||||
new_include_policy = self.include_policy.replace_dict(policy)
|
||||
return self.replace(include_policy=new_include_policy)
|
||||
|
||||
def information_schema(self, view_name=None) -> "InformationSchema":
|
||||
# some of our data comes from jinja, where things can be `Undefined`.
|
||||
if not isinstance(view_name, str):
|
||||
view_name = None
|
||||
|
||||
# Kick the user-supplied schema out of the information schema relation
|
||||
# Instead address this as <database>.information_schema by default
|
||||
info_schema = InformationSchema.from_relation(self, view_name)
|
||||
return info_schema.incorporate(path={"schema": None})
|
||||
|
||||
def information_schema_only(self) -> "InformationSchema":
|
||||
return self.information_schema()
|
||||
|
||||
def without_identifier(self) -> "BaseRelation":
|
||||
"""Return a form of this relation that only has the database and schema
|
||||
set to included. To get the appropriately-quoted form the schema out of
|
||||
the result (for use as part of a query), use `.render()`. To get the
|
||||
raw database or schema name, use `.database` or `.schema`.
|
||||
|
||||
The hash of the returned object is the result of render().
|
||||
"""
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
path_part = self.path.get_part(key)
|
||||
if path_part is not None and self.quote_policy.get_part(key):
|
||||
path_part = self.quoted(path_part)
|
||||
yield key, path_part
|
||||
|
||||
def render(self) -> str:
|
||||
# if there is nothing set, this will return the empty string.
|
||||
return ".".join(part for _, part in self._render_iterator() if part is not None)
|
||||
|
||||
def quoted(self, identifier):
|
||||
return "{quote_char}{identifier}{quote_char}".format(
|
||||
quote_char=self.quote_character,
|
||||
identifier=identifier,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||
source_quoting,
|
||||
kwargs.get("quote_policy", {}),
|
||||
)
|
||||
|
||||
return cls.create(
|
||||
database=source.database,
|
||||
schema=source.schema,
|
||||
identifier=source.identifier,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_ephemeral_prefix(name: str):
|
||||
return f"__dbt__cte__{name}"
|
||||
|
||||
@classmethod
|
||||
def create_ephemeral_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ManifestNode,
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
return cls.create(
|
||||
type=cls.CTE,
|
||||
identifier=identifier,
|
||||
).quote(identifier=False)
|
||||
|
||||
@classmethod
|
||||
def create_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node,
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if quote_policy is None:
|
||||
quote_policy = {}
|
||||
|
||||
quote_policy = dbt.utils.merge(config.quoting, quote_policy)
|
||||
|
||||
return cls.create(
|
||||
database=node.database,
|
||||
schema=node.schema,
|
||||
identifier=node.alias,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise DbtInternalError(
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[Self],
|
||||
database: Optional[str] = None,
|
||||
schema: Optional[str] = None,
|
||||
identifier: Optional[str] = None,
|
||||
type: Optional[RelationType] = None,
|
||||
**kwargs,
|
||||
) -> Self:
|
||||
kwargs.update(
|
||||
{
|
||||
"path": {
|
||||
"database": database,
|
||||
"schema": schema,
|
||||
"identifier": identifier,
|
||||
},
|
||||
"type": type,
|
||||
}
|
||||
)
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.render())
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.render()
|
||||
|
||||
@property
|
||||
def database(self) -> Optional[str]:
|
||||
return self.path.database
|
||||
|
||||
@property
|
||||
def schema(self) -> Optional[str]:
|
||||
return self.path.schema
|
||||
|
||||
@property
|
||||
def identifier(self) -> Optional[str]:
|
||||
return self.path.identifier
|
||||
|
||||
@property
|
||||
def table(self) -> Optional[str]:
|
||||
return self.path.identifier
|
||||
|
||||
# Here for compatibility with old Relation interface
|
||||
@property
|
||||
def name(self) -> Optional[str]:
|
||||
return self.identifier
|
||||
|
||||
@property
|
||||
def is_table(self) -> bool:
|
||||
return self.type == RelationType.Table
|
||||
|
||||
@property
|
||||
def is_cte(self) -> bool:
|
||||
return self.type == RelationType.CTE
|
||||
|
||||
@property
|
||||
def is_view(self) -> bool:
|
||||
return self.type == RelationType.View
|
||||
|
||||
@property
|
||||
def is_materialized_view(self) -> bool:
|
||||
return self.type == RelationType.MaterializedView
|
||||
|
||||
@classproperty
|
||||
def Table(cls) -> str:
|
||||
return str(RelationType.Table)
|
||||
|
||||
@classproperty
|
||||
def CTE(cls) -> str:
|
||||
return str(RelationType.CTE)
|
||||
|
||||
@classproperty
|
||||
def View(cls) -> str:
|
||||
return str(RelationType.View)
|
||||
|
||||
@classproperty
|
||||
def External(cls) -> str:
|
||||
return str(RelationType.External)
|
||||
|
||||
@classproperty
|
||||
def MaterializedView(cls) -> str:
|
||||
return str(RelationType.MaterializedView)
|
||||
|
||||
@classproperty
|
||||
def get_relation_type(cls) -> Type[RelationType]:
|
||||
return RelationType
|
||||
|
||||
|
||||
Info = TypeVar("Info", bound="InformationSchema")
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
class InformationSchema(BaseRelation):
|
||||
information_schema_view: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationError(
|
||||
"Got an invalid name: {}".format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_path(cls, relation: BaseRelation, information_schema_view: Optional[str]) -> Path:
|
||||
return Path(
|
||||
database=relation.database,
|
||||
schema=relation.schema,
|
||||
identifier="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_include_policy(
|
||||
cls,
|
||||
relation,
|
||||
information_schema_view: Optional[str],
|
||||
) -> Policy:
|
||||
return relation.include_policy.replace(
|
||||
database=relation.database is not None,
|
||||
schema=False,
|
||||
identifier=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_quote_policy(
|
||||
cls,
|
||||
relation,
|
||||
information_schema_view: Optional[str],
|
||||
) -> Policy:
|
||||
return relation.quote_policy.replace(
|
||||
identifier=False,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_relation(
|
||||
cls: Type[Info],
|
||||
relation: BaseRelation,
|
||||
information_schema_view: Optional[str],
|
||||
) -> Info:
|
||||
include_policy = cls.get_include_policy(relation, information_schema_view)
|
||||
quote_policy = cls.get_quote_policy(relation, information_schema_view)
|
||||
path = cls.get_path(relation, information_schema_view)
|
||||
return cls(
|
||||
type=RelationType.View,
|
||||
path=path,
|
||||
include_policy=include_policy,
|
||||
quote_policy=quote_policy,
|
||||
information_schema_view=information_schema_view,
|
||||
)
|
||||
|
||||
def _render_iterator(self):
|
||||
for k, v in super()._render_iterator():
|
||||
yield k, v
|
||||
yield None, self.information_schema_view
|
||||
|
||||
|
||||
class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
"""A utility class to keep track of what information_schema tables to
|
||||
search for what schemas. The schema values are all lowercased to avoid
|
||||
duplication.
|
||||
"""
|
||||
|
||||
def add(self, relation: BaseRelation):
|
||||
key = relation.information_schema_only()
|
||||
if key not in self:
|
||||
self[key] = set()
|
||||
schema: Optional[str] = None
|
||||
if relation.schema is not None:
|
||||
schema = relation.schema.lower()
|
||||
self[key].add(schema)
|
||||
|
||||
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||
for information_schema_name, schemas in self.items():
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
if not allow_multiple_databases:
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
raise MultipleDatabasesNotAllowedError(seen)
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {"database": information_schema_name.database, "schema": schema}
|
||||
new.add(
|
||||
information_schema_name.incorporate(
|
||||
path=path,
|
||||
quote_policy={"database": False},
|
||||
include_policy={"database": False},
|
||||
)
|
||||
)
|
||||
|
||||
return new
|
||||
@@ -1,520 +0,0 @@
|
||||
import threading
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from dbt.adapters.reference_keys import (
|
||||
_make_ref_key,
|
||||
_make_ref_key_dict,
|
||||
_ReferenceKey,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
DependentLinkNotCachedError,
|
||||
NewNameAlreadyInCacheError,
|
||||
NoneRelationFoundError,
|
||||
ReferencedLinkNotCachedError,
|
||||
TruncatedModelNameCausedCollisionError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
from dbt.flags import get_flags
|
||||
from dbt.utils import lowercase
|
||||
|
||||
|
||||
def dot_separated(key: _ReferenceKey) -> str:
|
||||
"""Return the key in dot-separated string form.
|
||||
|
||||
:param _ReferenceKey key: The key to stringify.
|
||||
"""
|
||||
return ".".join(map(str, key))
|
||||
|
||||
|
||||
class _CachedRelation:
|
||||
"""Nothing about _CachedRelation is guaranteed to be thread-safe!
|
||||
|
||||
:attr str schema: The schema of this relation.
|
||||
:attr str identifier: The identifier of this relation.
|
||||
:attr Dict[_ReferenceKey, _CachedRelation] referenced_by: The relations
|
||||
that refer to this relation.
|
||||
:attr BaseRelation inner: The underlying dbt relation.
|
||||
"""
|
||||
|
||||
def __init__(self, inner):
|
||||
self.referenced_by = {}
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
return ("_CachedRelation(database={}, schema={}, identifier={}, inner={})").format(
|
||||
self.database, self.schema, self.identifier, self.inner
|
||||
)
|
||||
|
||||
@property
|
||||
def database(self) -> Optional[str]:
|
||||
return lowercase(self.inner.database)
|
||||
|
||||
@property
|
||||
def schema(self) -> Optional[str]:
|
||||
return lowercase(self.inner.schema)
|
||||
|
||||
@property
|
||||
def identifier(self) -> Optional[str]:
|
||||
return lowercase(self.inner.identifier)
|
||||
|
||||
def __copy__(self):
|
||||
new = self.__class__(self.inner)
|
||||
new.__dict__.update(self.__dict__)
|
||||
return new
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
new = self.__class__(self.inner.incorporate())
|
||||
new.__dict__.update(self.__dict__)
|
||||
new.referenced_by = deepcopy(self.referenced_by, memo)
|
||||
|
||||
def is_referenced_by(self, key):
|
||||
return key in self.referenced_by
|
||||
|
||||
def key(self):
|
||||
"""Get the _ReferenceKey that represents this relation
|
||||
|
||||
:return _ReferenceKey: A key for this relation.
|
||||
"""
|
||||
return _make_ref_key(self)
|
||||
|
||||
def add_reference(self, referrer: "_CachedRelation"):
|
||||
"""Add a reference from referrer to self, indicating that if this node
|
||||
were drop...cascaded, the referrer would be dropped as well.
|
||||
|
||||
:param _CachedRelation referrer: The node that refers to this node.
|
||||
"""
|
||||
self.referenced_by[referrer.key()] = referrer
|
||||
|
||||
def collect_consequences(self):
|
||||
"""Recursively collect a set of _ReferenceKeys that would
|
||||
consequentially get dropped if this were dropped via
|
||||
"drop ... cascade".
|
||||
|
||||
:return Set[_ReferenceKey]: All the relations that would be dropped
|
||||
"""
|
||||
consequences = {self.key()}
|
||||
for relation in self.referenced_by.values():
|
||||
consequences.update(relation.collect_consequences())
|
||||
return consequences
|
||||
|
||||
def release_references(self, keys):
|
||||
"""Non-recursively indicate that an iterable of _ReferenceKey no longer
|
||||
exist. Unknown keys are ignored.
|
||||
|
||||
:param Iterable[_ReferenceKey] keys: The keys to drop.
|
||||
"""
|
||||
keys = set(self.referenced_by) & set(keys)
|
||||
for key in keys:
|
||||
self.referenced_by.pop(key)
|
||||
|
||||
def rename(self, new_relation):
|
||||
"""Rename this cached relation to new_relation.
|
||||
Note that this will change the output of key(), all refs must be
|
||||
updated!
|
||||
|
||||
:param _CachedRelation new_relation: The new name to apply to the
|
||||
relation
|
||||
"""
|
||||
# Relations store this stuff inside their `path` dict. But they
|
||||
# also store a table_name, and usually use it in their .render(),
|
||||
# so we need to update that as well. It doesn't appear that
|
||||
# table_name is ever anything but the identifier (via .create())
|
||||
self.inner = self.inner.incorporate(
|
||||
path={
|
||||
"database": new_relation.inner.database,
|
||||
"schema": new_relation.inner.schema,
|
||||
"identifier": new_relation.inner.identifier,
|
||||
},
|
||||
)
|
||||
|
||||
def rename_key(self, old_key, new_key):
|
||||
"""Rename a reference that may or may not exist. Only handles the
|
||||
reference itself, so this is the other half of what `rename` does.
|
||||
|
||||
If old_key is not in referenced_by, this is a no-op.
|
||||
|
||||
:param _ReferenceKey old_key: The old key to be renamed.
|
||||
:param _ReferenceKey new_key: The new key to rename to.
|
||||
:raises InternalError: If the new key already exists.
|
||||
"""
|
||||
if new_key in self.referenced_by:
|
||||
raise NewNameAlreadyInCacheError(old_key, new_key)
|
||||
|
||||
if old_key not in self.referenced_by:
|
||||
return
|
||||
value = self.referenced_by.pop(old_key)
|
||||
self.referenced_by[new_key] = value
|
||||
|
||||
def dump_graph_entry(self):
|
||||
"""Return a key/value pair representing this key and its referents.
|
||||
|
||||
return List[str]: The dot-separated form of all referent keys.
|
||||
"""
|
||||
return [dot_separated(r) for r in self.referenced_by]
|
||||
|
||||
|
||||
class RelationsCache:
|
||||
"""A cache of the relations known to dbt. Keeps track of relationships
|
||||
declared between tables and handles renames/drops as a real database would.
|
||||
|
||||
:attr Dict[_ReferenceKey, _CachedRelation] relations: The known relations.
|
||||
:attr threading.RLock lock: The lock around relations, held during updates.
|
||||
The adapters also hold this lock while filling the cache.
|
||||
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.lock = threading.RLock()
|
||||
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
|
||||
def add_schema(
|
||||
self,
|
||||
database: Optional[str],
|
||||
schema: Optional[str],
|
||||
) -> None:
|
||||
"""Add a schema to the set of known schemas (case-insensitive)
|
||||
|
||||
:param database: The database name to add.
|
||||
:param schema: The schema name to add.
|
||||
"""
|
||||
self.schemas.add((lowercase(database), lowercase(schema)))
|
||||
|
||||
def drop_schema(
|
||||
self,
|
||||
database: Optional[str],
|
||||
schema: Optional[str],
|
||||
) -> None:
|
||||
"""Drop the given schema and remove it from the set of known schemas.
|
||||
|
||||
Then remove all its contents (and their dependents, etc) as well.
|
||||
"""
|
||||
key = (lowercase(database), lowercase(schema))
|
||||
if key not in self.schemas:
|
||||
return
|
||||
|
||||
# avoid iterating over self.relations while removing things by
|
||||
# collecting the list first.
|
||||
|
||||
with self.lock:
|
||||
to_remove = self._list_relations_in_schema(database, schema)
|
||||
self._remove_all(to_remove)
|
||||
# handle a drop_schema race by using discard() over remove()
|
||||
self.schemas.discard(key)
|
||||
|
||||
def update_schemas(self, schemas: Iterable[Tuple[Optional[str], str]]):
|
||||
"""Add multiple schemas to the set of known schemas (case-insensitive)
|
||||
|
||||
:param schemas: An iterable of the schema names to add.
|
||||
"""
|
||||
self.schemas.update((lowercase(d), s.lower()) for (d, s) in schemas)
|
||||
|
||||
def __contains__(self, schema_id: Tuple[Optional[str], str]):
|
||||
"""A schema is 'in' the relations cache if it is in the set of cached
|
||||
schemas.
|
||||
|
||||
:param schema_id: The db name and schema name to look up.
|
||||
"""
|
||||
db, schema = schema_id
|
||||
return (lowercase(db), schema.lower()) in self.schemas
|
||||
|
||||
def dump_graph(self):
|
||||
"""Dump a key-only representation of the schema to a dictionary. Every
|
||||
known relation is a key with a value of a list of keys it is referenced
|
||||
by.
|
||||
"""
|
||||
# we have to hold the lock for the entire dump, if other threads modify
|
||||
# self.relations or any cache entry's referenced_by during iteration
|
||||
# it's a runtime error!
|
||||
with self.lock:
|
||||
return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()}
|
||||
|
||||
def _setdefault(self, relation: _CachedRelation):
|
||||
"""Add a relation to the cache, or return it if it already exists.
|
||||
|
||||
:param _CachedRelation relation: The relation to set or get.
|
||||
:return _CachedRelation: The relation stored under the given relation's
|
||||
key
|
||||
"""
|
||||
self.add_schema(relation.database, relation.schema)
|
||||
key = relation.key()
|
||||
return self.relations.setdefault(key, relation)
|
||||
|
||||
def _add_link(self, referenced_key, dependent_key):
|
||||
"""Add a link between two relations to the database. Both the old and
|
||||
new entries must alraedy exist in the database.
|
||||
|
||||
:param _ReferenceKey referenced_key: The key identifying the referenced
|
||||
model (the one that if dropped will drop the dependent model).
|
||||
:param _ReferenceKey dependent_key: The key identifying the dependent
|
||||
model.
|
||||
:raises InternalError: If either entry does not exist.
|
||||
"""
|
||||
referenced = self.relations.get(referenced_key)
|
||||
if referenced is None:
|
||||
return
|
||||
if referenced is None:
|
||||
raise ReferencedLinkNotCachedError(referenced_key)
|
||||
|
||||
dependent = self.relations.get(dependent_key)
|
||||
if dependent is None:
|
||||
raise DependentLinkNotCachedError(dependent_key)
|
||||
|
||||
assert dependent is not None # we just raised!
|
||||
|
||||
referenced.add_reference(dependent)
|
||||
|
||||
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
|
||||
def add_link(self, referenced, dependent):
|
||||
"""Add a link between two relations to the database. If either relation
|
||||
does not exist, it will be added as an "external" relation.
|
||||
|
||||
The dependent model refers _to_ the referenced model. So, given
|
||||
arguments of (jake_test, bar, jake_test, foo):
|
||||
both values are in the schema jake_test and foo is a view that refers
|
||||
to bar, so "drop bar cascade" will drop foo and all of foo's
|
||||
dependents.
|
||||
|
||||
:param BaseRelation referenced: The referenced model.
|
||||
:param BaseRelation dependent: The dependent model.
|
||||
:raises InternalError: If either entry does not exist.
|
||||
"""
|
||||
ref_key = _make_ref_key(referenced)
|
||||
dep_key = _make_ref_key(dependent)
|
||||
if (ref_key.database, ref_key.schema) not in self:
|
||||
# if we have not cached the referenced schema at all, we must be
|
||||
# referring to a table outside our control. There's no need to make
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
fire_event(
|
||||
CacheAction(
|
||||
ref_key=ref_key._asdict(),
|
||||
ref_key_2=dep_key._asdict(),
|
||||
)
|
||||
)
|
||||
return
|
||||
if ref_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
referenced = referenced.replace(type=referenced.External)
|
||||
self.add(referenced)
|
||||
if dep_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
dependent = dependent.replace(type=referenced.External)
|
||||
self.add(dependent)
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="add_link",
|
||||
ref_key=dep_key._asdict(),
|
||||
ref_key_2=ref_key._asdict(),
|
||||
)
|
||||
)
|
||||
with self.lock:
|
||||
self._add_link(ref_key, dep_key)
|
||||
|
||||
def add(self, relation):
|
||||
"""Add the relation inner to the cache, under the schema schema and
|
||||
identifier identifier
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
flags = get_flags()
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
def _remove_refs(self, keys):
|
||||
"""Removes all references to all entries in keys. This does not
|
||||
cascade!
|
||||
|
||||
:param Iterable[_ReferenceKey] keys: The keys to remove.
|
||||
"""
|
||||
# remove direct refs
|
||||
for key in keys:
|
||||
del self.relations[key]
|
||||
# then remove all entries from each child
|
||||
for cached in self.relations.values():
|
||||
cached.release_references(keys)
|
||||
|
||||
def drop(self, relation):
|
||||
"""Drop the named relation and cascade it appropriately to all
|
||||
dependent relations.
|
||||
|
||||
Because dbt proactively does many `drop relation if exist ... cascade`
|
||||
that are noops, nonexistent relation drops cause a debug log and no
|
||||
other actions.
|
||||
|
||||
:param str schema: The schema of the relation to drop.
|
||||
:param str identifier: The identifier of the relation to drop.
|
||||
"""
|
||||
dropped_key = _make_ref_key(relation)
|
||||
dropped_key_msg = _make_ref_key_dict(relation)
|
||||
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
||||
with self.lock:
|
||||
if dropped_key not in self.relations:
|
||||
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
|
||||
return
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||
consequence_msgs = [key._asdict() for key in consequences]
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
|
||||
)
|
||||
)
|
||||
self._remove_refs(consequences)
|
||||
|
||||
def _rename_relation(self, old_key, new_relation):
|
||||
"""Rename a relation named old_key to new_key, updating references.
|
||||
Return whether or not there was a key to rename.
|
||||
|
||||
:param _ReferenceKey old_key: The existing key, to rename from.
|
||||
:param _CachedRelation new_key: The new relation, to rename to.
|
||||
"""
|
||||
# On the database level, a rename updates all values that were
|
||||
# previously referenced by old_name to be referenced by new_name.
|
||||
# basically, the name changes but some underlying ID moves. Kind of
|
||||
# like an object reference!
|
||||
relation = self.relations.pop(old_key)
|
||||
new_key = new_relation.key()
|
||||
|
||||
# relation has to rename its innards, so it needs the _CachedRelation.
|
||||
relation.rename(new_relation)
|
||||
# update all the relations that refer to it
|
||||
for cached in self.relations.values():
|
||||
if cached.is_referenced_by(old_key):
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="update_reference",
|
||||
ref_key=_make_ref_key_dict(old_key),
|
||||
ref_key_2=_make_ref_key_dict(new_key),
|
||||
ref_key_3=_make_ref_key_dict(cached.key()),
|
||||
)
|
||||
)
|
||||
|
||||
cached.rename_key(old_key, new_key)
|
||||
|
||||
self.relations[new_key] = relation
|
||||
# also fixup the schemas!
|
||||
self.add_schema(new_key.database, new_key.schema)
|
||||
|
||||
return True
|
||||
|
||||
def _check_rename_constraints(self, old_key, new_key):
|
||||
"""Check the rename constraints, and return whether or not the rename
|
||||
can proceed.
|
||||
|
||||
If the new key is already present, that is an error.
|
||||
If the old key is absent, we debug log and return False, assuming it's
|
||||
a temp table being renamed.
|
||||
|
||||
:param _ReferenceKey old_key: The existing key, to rename from.
|
||||
:param _ReferenceKey new_key: The new key, to rename to.
|
||||
:return bool: If the old relation exists for renaming.
|
||||
:raises InternalError: If the new key is already present.
|
||||
"""
|
||||
if new_key in self.relations:
|
||||
# Tell user when collision caused by model names truncated during
|
||||
# materialization.
|
||||
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
||||
|
||||
if old_key not in self.relations:
|
||||
fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict()))
|
||||
return False
|
||||
return True
|
||||
|
||||
def rename(self, old, new):
|
||||
"""Rename the old schema/identifier to the new schema/identifier and
|
||||
update references.
|
||||
|
||||
If the new schema/identifier is already present, that is an error.
|
||||
If the schema/identifier key is absent, we only debug log and return,
|
||||
assuming it's a temp table being renamed.
|
||||
|
||||
:param BaseRelation old: The existing relation name information.
|
||||
:param BaseRelation new: The new relation name information.
|
||||
:raises InternalError: If the new key is already present.
|
||||
"""
|
||||
old_key = _make_ref_key(old)
|
||||
new_key = _make_ref_key(new)
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="rename_relation",
|
||||
ref_key=old_key._asdict(),
|
||||
ref_key_2=new_key._asdict(),
|
||||
)
|
||||
)
|
||||
flags = get_flags()
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
if self._check_rename_constraints(old_key, new_key):
|
||||
self._rename_relation(old_key, _CachedRelation(new))
|
||||
else:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||
"""Case-insensitively yield all relations matching the given schema.
|
||||
|
||||
:param str schema: The case-insensitive schema name to list from.
|
||||
:return List[BaseRelation]: The list of relations with the given
|
||||
schema
|
||||
"""
|
||||
database = lowercase(database)
|
||||
schema = lowercase(schema)
|
||||
with self.lock:
|
||||
results = [
|
||||
r.inner
|
||||
for r in self.relations.values()
|
||||
if (lowercase(r.schema) == schema and lowercase(r.database) == database)
|
||||
]
|
||||
|
||||
if None in results:
|
||||
raise NoneRelationFoundError()
|
||||
return results
|
||||
|
||||
def clear(self):
|
||||
"""Clear the cache"""
|
||||
with self.lock:
|
||||
self.relations.clear()
|
||||
self.schemas.clear()
|
||||
|
||||
def _list_relations_in_schema(
|
||||
self, database: Optional[str], schema: Optional[str]
|
||||
) -> List[_CachedRelation]:
|
||||
"""Get the relations in a schema. Callers should hold the lock."""
|
||||
key = (lowercase(database), lowercase(schema))
|
||||
|
||||
to_remove: List[_CachedRelation] = []
|
||||
for cachekey, relation in self.relations.items():
|
||||
if (cachekey.database, cachekey.schema) == key:
|
||||
to_remove.append(relation)
|
||||
return to_remove
|
||||
|
||||
def _remove_all(self, to_remove: List[_CachedRelation]):
|
||||
"""Remove all the listed relations. Ignore relations that have been
|
||||
cascaded out.
|
||||
"""
|
||||
for relation in to_remove:
|
||||
# it may have been cascaded out already
|
||||
drop_key = _make_ref_key(relation)
|
||||
if drop_key in self.relations:
|
||||
self.drop(drop_key)
|
||||
@@ -1,237 +0,0 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Set, Type
|
||||
|
||||
from dbt.adapters.base.plugin import AdapterPlugin
|
||||
from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered
|
||||
from dbt.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.semver import VersionSpecifier
|
||||
|
||||
Adapter = AdapterProtocol
|
||||
|
||||
|
||||
class AdapterContainer:
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.adapters: Dict[str, Adapter] = {}
|
||||
self.plugins: Dict[str, AdapterPlugin] = {}
|
||||
# map package names to their include paths
|
||||
self.packages: Dict[str, Path] = {
|
||||
GLOBAL_PROJECT_NAME: Path(GLOBAL_PROJECT_PATH),
|
||||
}
|
||||
|
||||
def get_plugin_by_name(self, name: str) -> AdapterPlugin:
|
||||
with self.lock:
|
||||
if name in self.plugins:
|
||||
return self.plugins[name]
|
||||
names = ", ".join(self.plugins.keys())
|
||||
|
||||
message = f"Invalid adapter type {name}! Must be one of {names}"
|
||||
raise DbtRuntimeError(message)
|
||||
|
||||
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
||||
plugin = self.get_plugin_by_name(name)
|
||||
return plugin.adapter
|
||||
|
||||
def get_relation_class_by_name(self, name: str) -> Type[RelationProtocol]:
|
||||
adapter = self.get_adapter_class_by_name(name)
|
||||
return adapter.Relation
|
||||
|
||||
def get_config_class_by_name(self, name: str) -> Type[AdapterConfig]:
|
||||
adapter = self.get_adapter_class_by_name(name)
|
||||
return adapter.AdapterSpecificConfigs
|
||||
|
||||
def load_plugin(self, name: str) -> Type[Credentials]:
|
||||
# this doesn't need a lock: in the worst case we'll overwrite packages
|
||||
# and adapter_type entries with the same value, as they're all
|
||||
# singletons
|
||||
try:
|
||||
# mypy doesn't think modules have any attributes.
|
||||
mod: Any = import_module("." + name, "dbt.adapters")
|
||||
except ModuleNotFoundError as exc:
|
||||
# if we failed to import the target module in particular, inform
|
||||
# the user about it via a runtime error
|
||||
if exc.name == "dbt.adapters." + name:
|
||||
fire_event(AdapterImportError(exc=str(exc)))
|
||||
raise DbtRuntimeError(f"Could not find adapter type {name}!")
|
||||
# otherwise, the error had to have come from some underlying
|
||||
# library. Log the stack trace.
|
||||
|
||||
fire_event(PluginLoadError(exc_info=traceback.format_exc()))
|
||||
raise
|
||||
plugin: AdapterPlugin = mod.Plugin
|
||||
plugin_type = plugin.adapter.type()
|
||||
|
||||
if plugin_type != name:
|
||||
raise DbtRuntimeError(
|
||||
f"Expected to find adapter with type named {name}, got "
|
||||
f"adapter with type {plugin_type}"
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
# things do hold the lock to iterate over it so we need it to add
|
||||
self.plugins[name] = plugin
|
||||
|
||||
self.packages[plugin.project_name] = Path(plugin.include_path)
|
||||
|
||||
for dep in plugin.dependencies:
|
||||
self.load_plugin(dep)
|
||||
|
||||
return plugin.credentials
|
||||
|
||||
def register_adapter(self, config: AdapterRequiredConfig) -> None:
|
||||
adapter_name = config.credentials.type
|
||||
adapter_type = self.get_adapter_class_by_name(adapter_name)
|
||||
adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version
|
||||
adapter_version_specifier = VersionSpecifier.from_version_string(
|
||||
adapter_version
|
||||
).to_version_string()
|
||||
fire_event(
|
||||
AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier)
|
||||
)
|
||||
with self.lock:
|
||||
if adapter_name in self.adapters:
|
||||
# this shouldn't really happen...
|
||||
return
|
||||
|
||||
adapter: Adapter = adapter_type(config) # type: ignore
|
||||
self.adapters[adapter_name] = adapter
|
||||
|
||||
def lookup_adapter(self, adapter_name: str) -> Adapter:
|
||||
return self.adapters[adapter_name]
|
||||
|
||||
def reset_adapters(self):
|
||||
"""Clear the adapters. This is useful for tests, which change configs."""
|
||||
with self.lock:
|
||||
for adapter in self.adapters.values():
|
||||
adapter.cleanup_connections()
|
||||
self.adapters.clear()
|
||||
|
||||
def cleanup_connections(self):
|
||||
"""Only clean up the adapter connections list without resetting the
|
||||
actual adapters.
|
||||
"""
|
||||
with self.lock:
|
||||
for adapter in self.adapters.values():
|
||||
adapter.cleanup_connections()
|
||||
|
||||
def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]:
|
||||
"""Iterate over the known adapter plugins. If a name is provided,
|
||||
iterate in dependency order over the named plugin and its dependencies.
|
||||
"""
|
||||
if name is None:
|
||||
return list(self.plugins.values())
|
||||
|
||||
plugins: List[AdapterPlugin] = []
|
||||
seen: Set[str] = set()
|
||||
plugin_names: List[str] = [name]
|
||||
while plugin_names:
|
||||
plugin_name = plugin_names[0]
|
||||
plugin_names = plugin_names[1:]
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
for dep in plugin.dependencies:
|
||||
if dep not in seen:
|
||||
plugin_names.append(dep)
|
||||
return plugins
|
||||
|
||||
def get_adapter_package_names(self, name: Optional[str]) -> List[str]:
|
||||
package_names: List[str] = [p.project_name for p in self.get_adapter_plugins(name)]
|
||||
package_names.append(GLOBAL_PROJECT_NAME)
|
||||
return package_names
|
||||
|
||||
def get_include_paths(self, name: Optional[str]) -> List[Path]:
|
||||
paths = []
|
||||
for package_name in self.get_adapter_package_names(name):
|
||||
try:
|
||||
path = self.packages[package_name]
|
||||
except KeyError:
|
||||
raise DbtInternalError(f"No internal package listing found for {package_name}")
|
||||
paths.append(path)
|
||||
return paths
|
||||
|
||||
def get_adapter_type_names(self, name: Optional[str]) -> List[str]:
|
||||
return [p.adapter.type() for p in self.get_adapter_plugins(name)]
|
||||
|
||||
def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]:
|
||||
return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore
|
||||
|
||||
|
||||
FACTORY: AdapterContainer = AdapterContainer()
|
||||
|
||||
|
||||
def register_adapter(config: AdapterRequiredConfig) -> None:
|
||||
FACTORY.register_adapter(config)
|
||||
|
||||
|
||||
def get_adapter(config: AdapterRequiredConfig):
|
||||
return FACTORY.lookup_adapter(config.credentials.type)
|
||||
|
||||
|
||||
def get_adapter_by_type(adapter_type):
|
||||
return FACTORY.lookup_adapter(adapter_type)
|
||||
|
||||
|
||||
def reset_adapters():
|
||||
"""Clear the adapters. This is useful for tests, which change configs."""
|
||||
FACTORY.reset_adapters()
|
||||
|
||||
|
||||
def cleanup_connections():
|
||||
"""Only clean up the adapter connections list without resetting the actual
|
||||
adapters.
|
||||
"""
|
||||
FACTORY.cleanup_connections()
|
||||
|
||||
|
||||
def get_adapter_class_by_name(name: str) -> Type[AdapterProtocol]:
|
||||
return FACTORY.get_adapter_class_by_name(name)
|
||||
|
||||
|
||||
def get_config_class_by_name(name: str) -> Type[AdapterConfig]:
|
||||
return FACTORY.get_config_class_by_name(name)
|
||||
|
||||
|
||||
def get_relation_class_by_name(name: str) -> Type[RelationProtocol]:
|
||||
return FACTORY.get_relation_class_by_name(name)
|
||||
|
||||
|
||||
def load_plugin(name: str) -> Type[Credentials]:
|
||||
return FACTORY.load_plugin(name)
|
||||
|
||||
|
||||
def get_include_paths(name: Optional[str]) -> List[Path]:
|
||||
return FACTORY.get_include_paths(name)
|
||||
|
||||
|
||||
def get_adapter_package_names(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_package_names(name)
|
||||
|
||||
|
||||
def get_adapter_type_names(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_type_names(name)
|
||||
|
||||
|
||||
def get_adapter_constraint_support(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_constraint_support(name)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adapter_management():
|
||||
reset_adapters()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
cleanup_connections()
|
||||
@@ -1,158 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Type,
|
||||
Hashable,
|
||||
Optional,
|
||||
ContextManager,
|
||||
List,
|
||||
Generic,
|
||||
TypeVar,
|
||||
Tuple,
|
||||
Dict,
|
||||
Any,
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.relation import Policy, HasQuoting
|
||||
|
||||
from dbt.graph import Graph
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterConfig(BaseConfig):
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionManagerProtocol(Protocol):
|
||||
TYPE: str
|
||||
|
||||
|
||||
class ColumnProtocol(Protocol):
|
||||
pass
|
||||
|
||||
|
||||
Self = TypeVar("Self", bound="RelationProtocol")
|
||||
|
||||
|
||||
class RelationProtocol(Protocol):
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
|
||||
...
|
||||
|
||||
|
||||
class CompilerProtocol(Protocol):
|
||||
def compile(self, manifest: Manifest, write=True) -> Graph:
|
||||
...
|
||||
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> ManifestNode:
|
||||
...
|
||||
|
||||
|
||||
AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig)
|
||||
ConnectionManager_T = TypeVar("ConnectionManager_T", bound=ConnectionManagerProtocol)
|
||||
Relation_T = TypeVar("Relation_T", bound=RelationProtocol)
|
||||
Column_T = TypeVar("Column_T", bound=ColumnProtocol)
|
||||
Compiler_T = TypeVar("Compiler_T", bound=CompilerProtocol)
|
||||
|
||||
|
||||
# TODO CT-211
|
||||
class AdapterProtocol( # type: ignore[misc]
|
||||
Protocol,
|
||||
Generic[
|
||||
AdapterConfig_T,
|
||||
ConnectionManager_T,
|
||||
Relation_T,
|
||||
Column_T,
|
||||
Compiler_T,
|
||||
],
|
||||
):
|
||||
# N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a
|
||||
# ClassVar due to the restrictiveness of PEP-526
|
||||
# See: https://github.com/python/mypy/issues/5144
|
||||
AdapterSpecificConfigs: Type[AdapterConfig_T]
|
||||
Column: Type[Column_T]
|
||||
Relation: Type[Relation_T]
|
||||
ConnectionManager: Type[ConnectionManager_T]
|
||||
connections: ConnectionManager_T
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def type(cls) -> str:
|
||||
pass
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def get_thread_identifier() -> Hashable:
|
||||
...
|
||||
|
||||
def get_thread_connection(self) -> Connection:
|
||||
...
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
...
|
||||
|
||||
def get_if_exists(self) -> Optional[Connection]:
|
||||
...
|
||||
|
||||
def clear_thread_connection(self) -> None:
|
||||
...
|
||||
|
||||
def clear_transaction(self) -> None:
|
||||
...
|
||||
|
||||
def exception_handler(self, sql: str) -> ContextManager:
|
||||
...
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
...
|
||||
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
...
|
||||
|
||||
def open(cls, connection: Connection) -> Connection:
|
||||
...
|
||||
|
||||
def release(self) -> None:
|
||||
...
|
||||
|
||||
def cleanup_all(self) -> None:
|
||||
...
|
||||
|
||||
def begin(self) -> None:
|
||||
...
|
||||
|
||||
def commit(self) -> None:
|
||||
...
|
||||
|
||||
def close(cls, connection: Connection) -> Connection:
|
||||
...
|
||||
|
||||
def commit_if_has_connection(self) -> None:
|
||||
...
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
...
|
||||
|
||||
def get_compiler(self) -> Compiler_T:
|
||||
...
|
||||
@@ -1,37 +0,0 @@
|
||||
# this module exists to resolve circular imports with the events module
|
||||
|
||||
from collections import namedtuple
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||
|
||||
|
||||
def lowercase(value: Optional[str]) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return value.lower()
|
||||
|
||||
|
||||
# For backwards compatibility. New code should use _make_ref_key
|
||||
def _make_key(relation: Any) -> _ReferenceKey:
|
||||
return _make_ref_key(relation)
|
||||
|
||||
|
||||
def _make_ref_key(relation: Any) -> _ReferenceKey:
|
||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||
to keep track of quoting
|
||||
"""
|
||||
# databases and schemas can both be None
|
||||
return _ReferenceKey(
|
||||
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
||||
)
|
||||
|
||||
|
||||
def _make_ref_key_dict(relation: Any):
|
||||
return {
|
||||
"database": relation.database,
|
||||
"schema": relation.schema,
|
||||
"identifier": relation.identifier,
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
# RelationConfig
|
||||
This package serves as an initial abstraction for managing the inspection of existing relations and determining
|
||||
changes on those relations. It arose from the materialized view work and is currently only supporting
|
||||
materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main
|
||||
classes in this package.
|
||||
|
||||
## RelationConfigBase
|
||||
This is a very small class that only has a `from_dict()` method and a default `NotImplementedError()`. At some
|
||||
point this could be replaced by a more robust framework, like `mashumaro` or `pydantic`.
|
||||
|
||||
## RelationConfigChange
|
||||
This class inherits from `RelationConfigBase` ; however, this can be thought of as a separate class. The subclassing
|
||||
merely points to the idea that both classes would likely inherit from the same class in a `mashumaro` or
|
||||
`pydantic` implementation. This class is much more restricted in attribution. It should really only
|
||||
ever need an `action` and a `context`. This can be though of as being analogous to a web request. You need to
|
||||
know what you're doing (`action`: 'create' = GET, 'drop' = DELETE, etc.) and the information (`context`) needed
|
||||
to make the change. In our scenarios, the context tends to be an instance of `RelationConfigBase` corresponding
|
||||
to the new state.
|
||||
|
||||
## RelationConfigValidationMixin
|
||||
This mixin provides optional validation mechanics that can be applied to either `RelationConfigBase` or
|
||||
`RelationConfigChange` subclasses. A validation rule is a combination of a `validation_check`, something
|
||||
that should evaluate to `True`, and an optional `validation_error`, an instance of `DbtRuntimeError`
|
||||
that should be raised in the event the `validation_check` fails. While optional, it's recommended that
|
||||
the `validation_error` be provided for clearer transparency to the end user.
|
||||
@@ -1,12 +0,0 @@
|
||||
from dbt.adapters.relation_configs.config_base import ( # noqa: F401
|
||||
RelationConfigBase,
|
||||
RelationResults,
|
||||
)
|
||||
from dbt.adapters.relation_configs.config_change import ( # noqa: F401
|
||||
RelationConfigChangeAction,
|
||||
RelationConfigChange,
|
||||
)
|
||||
from dbt.adapters.relation_configs.config_validation import ( # noqa: F401
|
||||
RelationConfigValidationMixin,
|
||||
RelationConfigValidationRule,
|
||||
)
|
||||
@@ -1,44 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Union, Dict
|
||||
|
||||
import agate
|
||||
from dbt.utils import filter_null_values
|
||||
|
||||
|
||||
"""
|
||||
This is what relation metadata from the database looks like. It's a dictionary because there will be
|
||||
multiple grains of data for a single object. For example, a materialized view in Postgres has base level information,
|
||||
like name. But it also can have multiple indexes, which needs to be a separate query. It might look like this:
|
||||
|
||||
{
|
||||
"base": agate.Row({"table_name": "table_abc", "query": "select * from table_def"})
|
||||
"indexes": agate.Table("rows": [
|
||||
agate.Row({"name": "index_a", "columns": ["column_a"], "type": "hash", "unique": False}),
|
||||
agate.Row({"name": "index_b", "columns": ["time_dim_a"], "type": "btree", "unique": False}),
|
||||
])
|
||||
}
|
||||
"""
|
||||
RelationResults = Dict[str, Union[agate.Row, agate.Table]]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RelationConfigBase:
|
||||
@classmethod
|
||||
def from_dict(cls, kwargs_dict) -> "RelationConfigBase":
|
||||
"""
|
||||
This assumes the subclass of `RelationConfigBase` is flat, in the sense that no attribute is
|
||||
itself another subclass of `RelationConfigBase`. If that's not the case, this should be overriden
|
||||
to manually manage that complexity.
|
||||
|
||||
Args:
|
||||
kwargs_dict: the dict representation of this instance
|
||||
|
||||
Returns: the `RelationConfigBase` representation associated with the provided dict
|
||||
"""
|
||||
return cls(**filter_null_values(kwargs_dict)) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def _not_implemented_error(cls) -> NotImplementedError:
|
||||
return NotImplementedError(
|
||||
"This relation type has not been fully configured for this adapter."
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Hashable
|
||||
|
||||
from dbt.adapters.relation_configs.config_base import RelationConfigBase
|
||||
from dbt.dataclass_schema import StrEnum
|
||||
|
||||
|
||||
class RelationConfigChangeAction(StrEnum):
|
||||
alter = "alter"
|
||||
create = "create"
|
||||
drop = "drop"
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||
class RelationConfigChange(RelationConfigBase, ABC):
|
||||
action: RelationConfigChangeAction
|
||||
context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def requires_full_refresh(self) -> bool:
|
||||
raise self._not_implemented_error()
|
||||
@@ -1,57 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Set, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||
class RelationConfigValidationRule:
|
||||
validation_check: bool
|
||||
validation_error: Optional[DbtRuntimeError]
|
||||
|
||||
@property
|
||||
def default_error(self):
|
||||
return DbtRuntimeError(
|
||||
"There was a validation error in preparing this relation config."
|
||||
"No additional context was provided by this adapter."
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RelationConfigValidationMixin:
|
||||
def __post_init__(self):
|
||||
self.run_validation_rules()
|
||||
|
||||
@property
|
||||
def validation_rules(self) -> Set[RelationConfigValidationRule]:
|
||||
"""
|
||||
A set of validation rules to run against the object upon creation.
|
||||
|
||||
A validation rule is a combination of a validation check (bool) and an optional error message.
|
||||
|
||||
This defaults to no validation rules if not implemented. It's recommended to override this with values,
|
||||
but that may not always be necessary.
|
||||
|
||||
Returns: a set of validation rules
|
||||
"""
|
||||
return set()
|
||||
|
||||
def run_validation_rules(self):
|
||||
for validation_rule in self.validation_rules:
|
||||
try:
|
||||
assert validation_rule.validation_check
|
||||
except AssertionError:
|
||||
if validation_rule.validation_error:
|
||||
raise validation_rule.validation_error
|
||||
else:
|
||||
raise validation_rule.default_error
|
||||
self.run_child_validation_rules()
|
||||
|
||||
def run_child_validation_rules(self):
|
||||
for attr_value in vars(self).values():
|
||||
if hasattr(attr_value, "validation_rules"):
|
||||
attr_value.run_validation_rules()
|
||||
if isinstance(attr_value, set):
|
||||
for member in attr_value:
|
||||
if hasattr(member, "validation_rules"):
|
||||
member.run_validation_rules()
|
||||
@@ -1,3 +0,0 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
from dbt.adapters.sql.connections import SQLConnectionManager # noqa
|
||||
from dbt.adapters.sql.impl import SQLAdapter # noqa
|
||||
@@ -1,190 +0,0 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.clients.agate_helper
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
|
||||
class SQLConnectionManager(BaseConnectionManager):
|
||||
"""The default connection manager with some common SQL methods implemented.
|
||||
|
||||
Methods to implement:
|
||||
- exception_handler
|
||||
- cancel
|
||||
- get_response
|
||||
- open
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel(self, connection: Connection):
|
||||
"""Cancel the given connection."""
|
||||
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
|
||||
|
||||
def cancel_open(self) -> List[str]:
|
||||
names = []
|
||||
this_connection = self.get_if_exists()
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection is this_connection:
|
||||
continue
|
||||
|
||||
# if the connection failed, the handle will be None so we have
|
||||
# nothing to cancel.
|
||||
if connection.handle is not None and connection.state == ConnectionState.OPEN:
|
||||
self.cancel(connection)
|
||||
if connection.name is not None:
|
||||
names.append(connection.name)
|
||||
return names
|
||||
|
||||
def add_query(
|
||||
self,
|
||||
sql: str,
|
||||
auto_begin: bool = True,
|
||||
bindings: Optional[Any] = None,
|
||||
abridge_sql_log: bool = False,
|
||||
) -> Tuple[Connection, Any]:
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
fire_event(
|
||||
ConnectionUsed(
|
||||
conn_type=self.TYPE,
|
||||
conn_name=cast_to_str(connection.name),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
with self.exception_handler(sql):
|
||||
if abridge_sql_log:
|
||||
log_sql = "{}...".format(sql[:512])
|
||||
else:
|
||||
log_sql = sql
|
||||
|
||||
fire_event(
|
||||
SQLQuery(
|
||||
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
pre = time.time()
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
cursor.execute(sql, bindings)
|
||||
|
||||
fire_event(
|
||||
SQLQueryStatus(
|
||||
status=str(self.get_response(cursor)),
|
||||
elapsed=round((time.time() - pre)),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
return connection, cursor
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`get_response` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def process_results(
|
||||
cls, column_names: Iterable[str], rows: Iterable[Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
# TODO CT-211
|
||||
unique_col_names = dict() # type: ignore[var-annotated]
|
||||
# TODO CT-211
|
||||
for idx in range(len(column_names)): # type: ignore[arg-type]
|
||||
# TODO CT-211
|
||||
col_name = column_names[idx] # type: ignore[index]
|
||||
if col_name in unique_col_names:
|
||||
unique_col_names[col_name] += 1
|
||||
# TODO CT-211
|
||||
column_names[idx] = f"{col_name}_{unique_col_names[col_name]}" # type: ignore[index] # noqa
|
||||
else:
|
||||
# TODO CT-211
|
||||
unique_col_names[column_names[idx]] = 1 # type: ignore[index]
|
||||
return [dict(zip(column_names, row)) for row in rows]
|
||||
|
||||
@classmethod
|
||||
def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table:
|
||||
data: List[Any] = []
|
||||
column_names: List[str] = []
|
||||
|
||||
if cursor.description is not None:
|
||||
column_names = [col[0] for col in cursor.description]
|
||||
if limit:
|
||||
rows = cursor.fetchmany(limit)
|
||||
else:
|
||||
rows = cursor.fetchall()
|
||||
data = cls.process_results(column_names, rows)
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql, auto_begin)
|
||||
response = self.get_response(cursor)
|
||||
if fetch:
|
||||
table = self.get_result_from_cursor(cursor, limit)
|
||||
else:
|
||||
table = dbt.clients.agate_helper.empty_table()
|
||||
return response, table
|
||||
|
||||
def add_begin_query(self):
|
||||
return self.add_query("BEGIN", auto_begin=False)
|
||||
|
||||
def add_commit_query(self):
|
||||
return self.add_query("COMMIT", auto_begin=False)
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
sql = self._add_query_comment(sql)
|
||||
return self.add_query(sql, auto_begin=False)
|
||||
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
"it already had one open!".format(connection.name)
|
||||
)
|
||||
|
||||
self.add_begin_query()
|
||||
|
||||
connection.transaction_open = True
|
||||
return connection
|
||||
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
"it does not have one open!".format(connection.name)
|
||||
)
|
||||
|
||||
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
|
||||
self.add_commit_query()
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
return connection
|
||||
@@ -1,270 +0,0 @@
|
||||
import agate
|
||||
from typing import Any, Optional, Tuple, Type, List
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterResponse
|
||||
from dbt.exceptions import RelationTypeNullError
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.cache import _make_ref_key_dict
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
||||
|
||||
|
||||
from dbt.adapters.base.relation import BaseRelation
|
||||
|
||||
LIST_RELATIONS_MACRO_NAME = "list_relations_without_caching"
|
||||
GET_COLUMNS_IN_RELATION_MACRO_NAME = "get_columns_in_relation"
|
||||
LIST_SCHEMAS_MACRO_NAME = "list_schemas"
|
||||
CHECK_SCHEMA_EXISTS_MACRO_NAME = "check_schema_exists"
|
||||
CREATE_SCHEMA_MACRO_NAME = "create_schema"
|
||||
DROP_SCHEMA_MACRO_NAME = "drop_schema"
|
||||
RENAME_RELATION_MACRO_NAME = "rename_relation"
|
||||
TRUNCATE_RELATION_MACRO_NAME = "truncate_relation"
|
||||
DROP_RELATION_MACRO_NAME = "drop_relation"
|
||||
ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
|
||||
VALIDATE_SQL_MACRO_NAME = "validate_sql"
|
||||
|
||||
|
||||
class SQLAdapter(BaseAdapter):
|
||||
"""The default adapter with the common agate conversions and some SQL
|
||||
methods was implemented. This adapter has a different much shorter list of
|
||||
methods to implement, but some more macros that must be implemented.
|
||||
|
||||
To implement a macro, implement "${adapter_type}__${macro_name}". in the
|
||||
adapter's internal project.
|
||||
|
||||
Methods to implement:
|
||||
- date_function
|
||||
|
||||
Macros to implement:
|
||||
- get_catalog
|
||||
- list_relations_without_caching
|
||||
- get_columns_in_relation
|
||||
"""
|
||||
|
||||
ConnectionManager: Type[SQLConnectionManager]
|
||||
connections: SQLConnectionManager
|
||||
|
||||
@available.parse(lambda *a, **k: (None, None))
|
||||
def add_query(
|
||||
self,
|
||||
sql: str,
|
||||
auto_begin: bool = True,
|
||||
bindings: Optional[Any] = None,
|
||||
abridge_sql_log: bool = False,
|
||||
) -> Tuple[Connection, Any]:
|
||||
"""Add a query to the current transaction. A thin wrapper around
|
||||
ConnectionManager.add_query.
|
||||
|
||||
:param sql: The SQL query to add
|
||||
:param auto_begin: If set and there is no transaction in progress,
|
||||
begin a new one.
|
||||
:param bindings: An optional list of bindings for the query.
|
||||
:param abridge_sql_log: If set, limit the raw sql logged to 512
|
||||
characters
|
||||
"""
|
||||
return self.connections.add_query(sql, auto_begin, bindings, abridge_sql_log)
|
||||
|
||||
@classmethod
|
||||
def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "text"
|
||||
|
||||
@classmethod
|
||||
def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
# TODO CT-211
|
||||
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined]
|
||||
return "float8" if decimals else "integer"
|
||||
|
||||
@classmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "boolean"
|
||||
|
||||
@classmethod
|
||||
def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "timestamp without time zone"
|
||||
|
||||
@classmethod
|
||||
def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "date"
|
||||
|
||||
@classmethod
|
||||
def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "time"
|
||||
|
||||
@classmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
return True
|
||||
|
||||
def expand_column_types(self, goal, current):
|
||||
reference_columns = {c.name: c for c in self.get_columns_in_relation(goal)}
|
||||
|
||||
target_columns = {c.name: c for c in self.get_columns_in_relation(current)}
|
||||
|
||||
for column_name, reference_column in reference_columns.items():
|
||||
target_column = target_columns.get(column_name)
|
||||
|
||||
if target_column is not None and target_column.can_expand_to(reference_column):
|
||||
col_string_size = reference_column.string_size()
|
||||
new_type = self.Column.string_type(col_string_size)
|
||||
fire_event(
|
||||
ColTypeChange(
|
||||
orig_type=target_column.data_type,
|
||||
new_type=new_type,
|
||||
table=_make_ref_key_dict(current),
|
||||
)
|
||||
)
|
||||
|
||||
self.alter_column_type(current, column_name, new_type)
|
||||
|
||||
def alter_column_type(self, relation, column_name, new_column_type) -> None:
|
||||
"""
|
||||
1. Create a new column (w/ temp name and correct type)
|
||||
2. Copy data over to it
|
||||
3. Drop the existing column (cascade!)
|
||||
4. Rename the new column to existing column
|
||||
"""
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
"column_name": column_name,
|
||||
"new_column_type": new_column_type,
|
||||
}
|
||||
self.execute_macro(ALTER_COLUMN_TYPE_MACRO_NAME, kwargs=kwargs)
|
||||
|
||||
def drop_relation(self, relation):
|
||||
if relation.type is None:
|
||||
raise RelationTypeNullError(relation)
|
||||
|
||||
self.cache_dropped(relation)
|
||||
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
||||
|
||||
def truncate_relation(self, relation):
|
||||
self.execute_macro(TRUNCATE_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
||||
|
||||
def rename_relation(self, from_relation, to_relation):
|
||||
self.cache_renamed(from_relation, to_relation)
|
||||
|
||||
kwargs = {"from_relation": from_relation, "to_relation": to_relation}
|
||||
self.execute_macro(RENAME_RELATION_MACRO_NAME, kwargs=kwargs)
|
||||
|
||||
def get_columns_in_relation(self, relation):
|
||||
return self.execute_macro(
|
||||
GET_COLUMNS_IN_RELATION_MACRO_NAME, kwargs={"relation": relation}
|
||||
)
|
||||
|
||||
def create_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
fire_event(SchemaCreation(relation=_make_ref_key_dict(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
self.execute_macro(CREATE_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||
self.commit_if_has_connection()
|
||||
# we can't update the cache here, as if the schema already existed we
|
||||
# don't want to (incorrectly) say that it's empty
|
||||
|
||||
def drop_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
fire_event(SchemaDrop(relation=_make_ref_key_dict(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
|
||||
self.commit_if_has_connection()
|
||||
# we can update the cache here
|
||||
self.cache.drop_schema(relation.database, relation.schema)
|
||||
|
||||
def list_relations_without_caching(
|
||||
self,
|
||||
schema_relation: BaseRelation,
|
||||
) -> List[BaseRelation]:
|
||||
kwargs = {"schema_relation": schema_relation}
|
||||
results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs)
|
||||
|
||||
relations = []
|
||||
quote_policy = {"database": True, "schema": True, "identifier": True}
|
||||
for _database, name, _schema, _type in results:
|
||||
try:
|
||||
_type = self.Relation.get_relation_type(_type)
|
||||
except ValueError:
|
||||
_type = self.Relation.External
|
||||
relations.append(
|
||||
self.Relation.create(
|
||||
database=_database,
|
||||
schema=_schema,
|
||||
identifier=name,
|
||||
quote_policy=quote_policy,
|
||||
type=_type,
|
||||
)
|
||||
)
|
||||
return relations
|
||||
|
||||
@classmethod
|
||||
def quote(self, identifier):
|
||||
return '"{}"'.format(identifier)
|
||||
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
results = self.execute_macro(LIST_SCHEMAS_MACRO_NAME, kwargs={"database": database})
|
||||
|
||||
return [row[0] for row in results]
|
||||
|
||||
def check_schema_exists(self, database: str, schema: str) -> bool:
|
||||
information_schema = self.Relation.create(
|
||||
database=database,
|
||||
schema=schema,
|
||||
identifier="INFORMATION_SCHEMA",
|
||||
quote_policy=self.config.quoting,
|
||||
).information_schema()
|
||||
|
||||
kwargs = {"information_schema": information_schema, "schema": schema}
|
||||
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
|
||||
return results[0][0] > 0
|
||||
|
||||
def validate_sql(self, sql: str) -> AdapterResponse:
|
||||
"""Submit the given SQL to the engine for validation, but not execution.
|
||||
|
||||
By default we simply prefix the query with the explain keyword and allow the
|
||||
exceptions thrown by the underlying engine on invalid SQL inputs to bubble up
|
||||
to the exception handler. For adjustments to the explain statement - such as
|
||||
for adapters that have different mechanisms for hinting at query validation
|
||||
or dry-run - callers may be able to override the validate_sql_query macro with
|
||||
the addition of an <adapter>__validate_sql implementation.
|
||||
|
||||
:param sql str: The sql to validate
|
||||
"""
|
||||
kwargs = {
|
||||
"sql": sql,
|
||||
}
|
||||
result = self.execute_macro(VALIDATE_SQL_MACRO_NAME, kwargs=kwargs)
|
||||
# The statement macro always returns an AdapterResponse in the output AttrDict's
|
||||
# `response` property, and we preserve the full payload in case we want to
|
||||
# return fetched output for engines where explain plans are emitted as columnar
|
||||
# results. Any macro override that deviates from this behavior may encounter an
|
||||
# assertion error in the runtime.
|
||||
adapter_response = result.response # type: ignore[attr-defined]
|
||||
assert isinstance(adapter_response, AdapterResponse), (
|
||||
f"Expected AdapterResponse from validate_sql macro execution, "
|
||||
f"got {type(adapter_response)}."
|
||||
)
|
||||
return adapter_response
|
||||
|
||||
# This is for use in the test suite
|
||||
def run_sql_for_tests(self, sql, fetch, conn):
|
||||
cursor = conn.handle.cursor()
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
if hasattr(conn.handle, "commit"):
|
||||
conn.handle.commit()
|
||||
if fetch == "one":
|
||||
return cursor.fetchone()
|
||||
elif fetch == "all":
|
||||
return cursor.fetchall()
|
||||
else:
|
||||
return
|
||||
except BaseException as e:
|
||||
if conn.handle and not getattr(conn.handle, "closed", True):
|
||||
conn.handle.rollback()
|
||||
print(sql)
|
||||
print(e)
|
||||
raise
|
||||
finally:
|
||||
conn.transaction_open = False
|
||||
58
core/dbt/artifacts/README.md
Normal file
58
core/dbt/artifacts/README.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# dbt/artifacts
|
||||
|
||||
## Overview
|
||||
This directory is meant to be a lightweight module that is independent (and upstream of) the rest of dbt-core internals.
|
||||
|
||||
It's primary responsibility is to define simple data classes that represent the versioned artifact schemas that dbt writes as JSON files throughout execution.
|
||||
|
||||
Long term, this module may be released as a standalone package (e.g. dbt-artifacts) to support stable parsing dbt artifacts programmatically.
|
||||
|
||||
`dbt/artifacts` is organized into artifact 'schemas' and 'resources'. Schemas represent the final serialized artifact object, while resources represent sub-components of the larger artifact schemas.
|
||||
|
||||
### dbt/artifacts/schemas
|
||||
|
||||
|
||||
Each major version of a schema under `dbt/artifacts/schema` is defined in its corresponding `dbt/artifacts/schema/<artifact-name>/v<version>` directory. Before `dbt/artifacts` artifact schemas were always modified in-place, which is why artifacts are missing class definitions for historical versions.
|
||||
|
||||
Currently, there are four artifact schemas defined in `dbt/artifact/schemas`:
|
||||
|
||||
| Artifact name | File | Class | Latest definition |
|
||||
|---------------|------------------|----------------------------------|-----------------------------------|
|
||||
| manifest | manifest.json | WritableManifest | dbt/artifacts/schema/manifest/v11 |
|
||||
| catalog | catalog.json | CatalogArtifact | dbt/artifacts/schema/catalog/v1 |
|
||||
| run | run_results.json | RunResultsArtifact | dbt/artifacts/schema/run/v5 |
|
||||
| freshness | sources.json | FreshnessExecutionResultArtifact | dbt/artifacts/schema/freshness/v3 |
|
||||
|
||||
|
||||
### dbt/artifacts/resources
|
||||
|
||||
All existing resources are defined under `dbt/artifacts/resources/v1`.
|
||||
|
||||
## Making changes to dbt/artifacts
|
||||
|
||||
### Non-breaking changes
|
||||
|
||||
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact in mantle (via minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
||||
* Adding a new field with a default
|
||||
* Deleting an __optional__ field
|
||||
|
||||
### Breaking changes
|
||||
A breaking change is anything that:
|
||||
* Deletes a required field
|
||||
* Changes the name or type of an existing field
|
||||
* Removes default from a field
|
||||
|
||||
These should generally be avoided, and bundled together to aim for as minimal disruption across the integration ecosystem as possible.
|
||||
|
||||
However, when it comes time to make one (or more) of these, a new versioned artifact should be created as follows:
|
||||
1. Create a new version directory and file that defines the new artifact schema under `dbt/artifacts/schemas/<artifact>/v<next-artifact-version>/<artifact>.py`
|
||||
2. If any resources are having breaking changes introduced, create a new resource class that defines the new resource schema under `dbt/artifacts/resources/v<next-resource-version>/<resource>.py`
|
||||
3. Implement upgrade paths on the new versioned artifact class so it can be constructed given a dictionary representation of any previous version of the same artifact
|
||||
* TODO: update once the design is finalized
|
||||
4. Implement downgrade paths on all previous versions of the artifact class so they can still be constructed given a dictionary representation of the new artifact schema
|
||||
* TODO: update once the design is finalized
|
||||
5. Update the 'latest' aliases to point to the new version of the artifact and/or resource:
|
||||
* Artifact: `dbt/artifacts/schemas/<artifact>/__init__.py `
|
||||
* Resource: `dbt/artifacts/resources/__init__.py `
|
||||
|
||||
Downstream consumers (e.g. dbt-core) importing from the latest alias are susceptible to breaking changes. Ideally, any incompatibilities should be caught my static type checking in those systems. However, it is always possible for consumers to pin imports to previous versions via `dbt.artifacts.schemas.<artifact>.v<prev-version>`
|
||||
1
core/dbt/artifacts/exceptions/__init__.py
Normal file
1
core/dbt/artifacts/exceptions/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from dbt.artifacts.exceptions.schemas import IncompatibleSchemaError
|
||||
31
core/dbt/artifacts/exceptions/schemas.py
Normal file
31
core/dbt/artifacts/exceptions/schemas.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Optional
|
||||
|
||||
from dbt_common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class IncompatibleSchemaError(DbtRuntimeError):
|
||||
def __init__(self, expected: str, found: Optional[str] = None) -> None:
|
||||
self.expected = expected
|
||||
self.found = found
|
||||
self.filename = "input file"
|
||||
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def add_filename(self, filename: str):
|
||||
self.filename = filename
|
||||
self.msg = self.get_message()
|
||||
|
||||
def get_message(self) -> str:
|
||||
found_str = "nothing"
|
||||
if self.found is not None:
|
||||
found_str = f'"{self.found}"'
|
||||
|
||||
msg = (
|
||||
f'Expected a schema version of "{self.expected}" in '
|
||||
f"{self.filename}, but found {found_str}. Are you running with a "
|
||||
f"different version of dbt?"
|
||||
)
|
||||
return msg
|
||||
|
||||
CODE = 10014
|
||||
MESSAGE = "Incompatible Schema"
|
||||
101
core/dbt/artifacts/resources/__init__.py
Normal file
101
core/dbt/artifacts/resources/__init__.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from dbt.artifacts.resources.base import BaseResource, GraphResource, FileHash, Docs
|
||||
|
||||
# alias to latest resource definitions
|
||||
from dbt.artifacts.resources.v1.components import (
|
||||
DependsOn,
|
||||
NodeVersion,
|
||||
RefArgs,
|
||||
HasRelationMetadata,
|
||||
ParsedResourceMandatory,
|
||||
ParsedResource,
|
||||
ColumnInfo,
|
||||
CompiledResource,
|
||||
InjectedCTE,
|
||||
Contract,
|
||||
DeferRelation,
|
||||
FreshnessThreshold,
|
||||
Quoting,
|
||||
Time,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.analysis import Analysis
|
||||
from dbt.artifacts.resources.v1.hook import HookNode
|
||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
||||
from dbt.artifacts.resources.v1.sql_operation import SqlOperation
|
||||
from dbt.artifacts.resources.v1.seed import Seed, SeedConfig
|
||||
from dbt.artifacts.resources.v1.singular_test import SingularTest
|
||||
from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata
|
||||
from dbt.artifacts.resources.v1.snapshot import Snapshot, SnapshotConfig
|
||||
|
||||
|
||||
from dbt.artifacts.resources.v1.documentation import Documentation
|
||||
from dbt.artifacts.resources.v1.exposure import (
|
||||
Exposure,
|
||||
ExposureConfig,
|
||||
ExposureType,
|
||||
MaturityType,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.macro import Macro, MacroDependsOn, MacroArgument
|
||||
from dbt.artifacts.resources.v1.group import Group
|
||||
from dbt.artifacts.resources.v1.metric import (
|
||||
ConstantPropertyInput,
|
||||
ConversionTypeParams,
|
||||
Metric,
|
||||
MetricConfig,
|
||||
MetricInput,
|
||||
MetricInputMeasure,
|
||||
MetricTimeWindow,
|
||||
MetricTypeParams,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.owner import Owner
|
||||
from dbt.artifacts.resources.v1.saved_query import (
|
||||
Export,
|
||||
ExportConfig,
|
||||
QueryParams,
|
||||
SavedQuery,
|
||||
SavedQueryConfig,
|
||||
SavedQueryMandatory,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
||||
FileSlice,
|
||||
SourceFileMetadata,
|
||||
WhereFilter,
|
||||
WhereFilterIntersection,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.semantic_model import (
|
||||
Defaults,
|
||||
Dimension,
|
||||
DimensionTypeParams,
|
||||
DimensionValidityParams,
|
||||
Entity,
|
||||
Measure,
|
||||
MeasureAggregationParameters,
|
||||
NodeRelation,
|
||||
NonAdditiveDimension,
|
||||
SemanticModel,
|
||||
SemanticModelConfig,
|
||||
)
|
||||
|
||||
from dbt.artifacts.resources.v1.config import (
|
||||
NodeAndTestConfig,
|
||||
NodeConfig,
|
||||
TestConfig,
|
||||
Hook,
|
||||
)
|
||||
|
||||
from dbt.artifacts.resources.v1.source_definition import (
|
||||
SourceConfig,
|
||||
ExternalPartition,
|
||||
ExternalTable,
|
||||
SourceDefinition,
|
||||
ParsedSourceMandatory,
|
||||
)
|
||||
|
||||
from dbt.artifacts.resources.v1.unit_test_definition import (
|
||||
UnitTestConfig,
|
||||
UnitTestDefinition,
|
||||
UnitTestInputFixture,
|
||||
UnitTestOutputFixture,
|
||||
UnitTestOverrides,
|
||||
UnitTestNodeVersions,
|
||||
UnitTestFormat,
|
||||
)
|
||||
67
core/dbt/artifacts/resources/base.py
Normal file
67
core/dbt/artifacts/resources/base.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from dataclasses import dataclass
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from typing import List, Optional
|
||||
import hashlib
|
||||
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseResource(dbtClassMixin):
|
||||
name: str
|
||||
resource_type: NodeType
|
||||
package_name: str
|
||||
path: str
|
||||
original_file_path: str
|
||||
unique_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class GraphResource(BaseResource):
|
||||
fqn: List[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileHash(dbtClassMixin):
|
||||
name: str # the hash type name
|
||||
checksum: str # the hashlib.hash_type().hexdigest() of the file contents
|
||||
|
||||
@classmethod
|
||||
def empty(cls):
|
||||
return FileHash(name="none", checksum="")
|
||||
|
||||
@classmethod
|
||||
def path(cls, path: str):
|
||||
return FileHash(name="path", checksum=path)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, FileHash):
|
||||
return NotImplemented
|
||||
|
||||
if self.name == "none" or self.name != other.name:
|
||||
return False
|
||||
|
||||
return self.checksum == other.checksum
|
||||
|
||||
def compare(self, contents: str) -> bool:
|
||||
"""Compare the file contents with the given hash"""
|
||||
if self.name == "none":
|
||||
return False
|
||||
|
||||
return self.from_contents(contents, name=self.name) == self.checksum
|
||||
|
||||
@classmethod
|
||||
def from_contents(cls, contents: str, name="sha256") -> "FileHash":
|
||||
"""Create a file hash from the given file contents. The hash is always
|
||||
the utf-8 encoding of the contents given, because dbt only reads files
|
||||
as utf-8.
|
||||
"""
|
||||
data = contents.encode("utf-8")
|
||||
checksum = hashlib.new(name, data).hexdigest()
|
||||
return cls(name=name, checksum=checksum)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Docs(dbtClassMixin):
|
||||
show: bool = True
|
||||
node_color: Optional[str] = None
|
||||
70
core/dbt/artifacts/resources/types.py
Normal file
70
core/dbt/artifacts/resources/types.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from dbt_common.dataclass_schema import StrEnum
|
||||
|
||||
|
||||
class AccessType(StrEnum):
|
||||
Private = "private"
|
||||
Protected = "protected"
|
||||
Public = "public"
|
||||
|
||||
@classmethod
|
||||
def is_valid(cls, item):
|
||||
try:
|
||||
cls(item)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class NodeType(StrEnum):
|
||||
Model = "model"
|
||||
Analysis = "analysis"
|
||||
Test = "test" # renamed to 'data_test'; preserved as 'test' here for back-compat
|
||||
Snapshot = "snapshot"
|
||||
Operation = "operation"
|
||||
Seed = "seed"
|
||||
# TODO: rm?
|
||||
RPCCall = "rpc"
|
||||
SqlOperation = "sql_operation"
|
||||
Documentation = "doc"
|
||||
Source = "source"
|
||||
Macro = "macro"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Group = "group"
|
||||
SavedQuery = "saved_query"
|
||||
SemanticModel = "semantic_model"
|
||||
Unit = "unit_test"
|
||||
Fixture = "fixture"
|
||||
|
||||
def pluralize(self) -> str:
|
||||
if self is self.Analysis:
|
||||
return "analyses"
|
||||
elif self is self.SavedQuery:
|
||||
return "saved_queries"
|
||||
elif self is self.Test:
|
||||
return "data_tests"
|
||||
return f"{self}s"
|
||||
|
||||
|
||||
class RunHookType(StrEnum):
|
||||
Start = "on-run-start"
|
||||
End = "on-run-end"
|
||||
|
||||
|
||||
class ModelLanguage(StrEnum):
|
||||
python = "python"
|
||||
sql = "sql"
|
||||
|
||||
|
||||
class ModelHookType(StrEnum):
|
||||
PreHook = "pre-hook"
|
||||
PostHook = "post-hook"
|
||||
|
||||
|
||||
class TimePeriod(StrEnum):
|
||||
minute = "minute"
|
||||
hour = "hour"
|
||||
day = "day"
|
||||
|
||||
def plural(self) -> str:
|
||||
return str(self) + "s"
|
||||
9
core/dbt/artifacts/resources/v1/analysis.py
Normal file
9
core/dbt/artifacts/resources/v1/analysis.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from dbt.artifacts.resources.v1.components import CompiledResource
|
||||
from typing import Literal
|
||||
from dataclasses import dataclass
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
|
||||
|
||||
@dataclass
|
||||
class Analysis(CompiledResource):
|
||||
resource_type: Literal[NodeType.Analysis]
|
||||
236
core/dbt/artifacts/resources/v1/components.py
Normal file
236
core/dbt/artifacts/resources/v1/components.py
Normal file
@@ -0,0 +1,236 @@
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from dbt.artifacts.resources.base import GraphResource, FileHash, Docs
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt_common.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
||||
from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||
from typing import Dict, List, Optional, Union, Any
|
||||
from datetime import timedelta
|
||||
from dbt.artifacts.resources.types import TimePeriod
|
||||
from dbt_common.contracts.util import Mergeable
|
||||
|
||||
|
||||
NodeVersion = Union[str, float]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroDependsOn(dbtClassMixin):
|
||||
macros: List[str] = field(default_factory=list)
|
||||
|
||||
# 'in' on lists is O(n) so this is O(n^2) for # of macros
|
||||
def add_macro(self, value: str):
|
||||
if value not in self.macros:
|
||||
self.macros.append(value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DependsOn(MacroDependsOn):
|
||||
nodes: List[str] = field(default_factory=list)
|
||||
|
||||
def add_node(self, value: str):
|
||||
if value not in self.nodes:
|
||||
self.nodes.append(value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RefArgs(dbtClassMixin):
|
||||
name: str
|
||||
package: Optional[str] = None
|
||||
version: Optional[NodeVersion] = None
|
||||
|
||||
@property
|
||||
def positional_args(self) -> List[str]:
|
||||
if self.package:
|
||||
return [self.package, self.name]
|
||||
else:
|
||||
return [self.name]
|
||||
|
||||
@property
|
||||
def keyword_args(self) -> Dict[str, Optional[NodeVersion]]:
|
||||
if self.version:
|
||||
return {"version": self.version}
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
||||
"""Used in all ManifestNodes and SourceDefinition"""
|
||||
|
||||
name: str
|
||||
description: str = ""
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
data_type: Optional[str] = None
|
||||
constraints: List[ColumnLevelConstraint] = field(default_factory=list)
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class InjectedCTE(dbtClassMixin):
|
||||
"""Used in CompiledNodes as part of ephemeral model processing"""
|
||||
|
||||
id: str
|
||||
sql: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Contract(dbtClassMixin):
|
||||
enforced: bool = False
|
||||
alias_types: bool = True
|
||||
checksum: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Quoting(dbtClassMixin, Mergeable):
|
||||
database: Optional[bool] = None
|
||||
schema: Optional[bool] = None
|
||||
identifier: Optional[bool] = None
|
||||
column: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Time(dbtClassMixin, Mergeable):
|
||||
count: Optional[int] = None
|
||||
period: Optional[TimePeriod] = None
|
||||
|
||||
def exceeded(self, actual_age: float) -> bool:
|
||||
if self.period is None or self.count is None:
|
||||
return False
|
||||
kwargs: Dict[str, int] = {self.period.plural(): self.count}
|
||||
difference = timedelta(**kwargs).total_seconds()
|
||||
return actual_age > difference
|
||||
|
||||
def __bool__(self):
|
||||
return self.count is not None and self.period is not None
|
||||
|
||||
|
||||
@dataclass
|
||||
class FreshnessThreshold(dbtClassMixin, Mergeable):
|
||||
warn_after: Optional[Time] = field(default_factory=Time)
|
||||
error_after: Optional[Time] = field(default_factory=Time)
|
||||
filter: Optional[str] = None
|
||||
|
||||
def status(self, age: float) -> "dbt.artifacts.schemas.results.FreshnessStatus": # type: ignore # noqa F821
|
||||
from dbt.artifacts.schemas.results import FreshnessStatus
|
||||
|
||||
if self.error_after and self.error_after.exceeded(age):
|
||||
return FreshnessStatus.Error
|
||||
elif self.warn_after and self.warn_after.exceeded(age):
|
||||
return FreshnessStatus.Warn
|
||||
else:
|
||||
return FreshnessStatus.Pass
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.warn_after) or bool(self.error_after)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HasRelationMetadata(dbtClassMixin):
|
||||
database: Optional[str]
|
||||
schema: str
|
||||
|
||||
# Can't set database to None like it ought to be
|
||||
# because it messes up the subclasses and default parameters
|
||||
# so hack it here
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
if "database" not in data:
|
||||
data["database"] = None
|
||||
return data
|
||||
|
||||
@property
|
||||
def quoting_dict(self) -> Dict[str, bool]:
|
||||
if hasattr(self, "quoting"):
|
||||
return self.quoting.to_dict(omit_none=True)
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeferRelation(HasRelationMetadata):
|
||||
alias: str
|
||||
relation_name: Optional[str]
|
||||
# The rest of these fields match RelationConfig protocol exactly
|
||||
resource_type: NodeType
|
||||
name: str
|
||||
description: str
|
||||
compiled_code: Optional[str]
|
||||
meta: Dict[str, Any]
|
||||
tags: List[str]
|
||||
config: Optional[NodeConfig]
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
return self.alias
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedResourceMandatory(GraphResource, HasRelationMetadata):
|
||||
alias: str
|
||||
checksum: FileHash
|
||||
config: NodeConfig = field(default_factory=NodeConfig)
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
return self.alias
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedResource(ParsedResourceMandatory):
|
||||
tags: List[str] = field(default_factory=list)
|
||||
description: str = field(default="")
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
group: Optional[str] = None
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
patch_path: Optional[str] = None
|
||||
build_path: Optional[str] = None
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
relation_name: Optional[str] = None
|
||||
raw_code: str = ""
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
if context and context.get("artifact") and "config_call_dict" in dct:
|
||||
del dct["config_call_dict"]
|
||||
return dct
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledResource(ParsedResource):
|
||||
"""Contains attributes necessary for SQL files and nodes with refs, sources, etc,
|
||||
so all ManifestNodes except SeedNode."""
|
||||
|
||||
language: str = "sql"
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
compiled_path: Optional[str] = None
|
||||
compiled: bool = False
|
||||
compiled_code: Optional[str] = None
|
||||
extra_ctes_injected: bool = False
|
||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||
_pre_injected_sql: Optional[str] = None
|
||||
contract: Contract = field(default_factory=Contract)
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
if "_pre_injected_sql" in dct:
|
||||
del dct["_pre_injected_sql"]
|
||||
# Remove compiled attributes
|
||||
if "compiled" in dct and dct["compiled"] is False:
|
||||
del dct["compiled"]
|
||||
del dct["extra_ctes_injected"]
|
||||
del dct["extra_ctes"]
|
||||
# "omit_none" means these might not be in the dictionary
|
||||
if "compiled_code" in dct:
|
||||
del dct["compiled_code"]
|
||||
return dct
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user