mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
277 Commits
jerco/upda
...
jerco/unit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2be7ba329 | ||
|
|
8197fa7b4d | ||
|
|
827e35e3ee | ||
|
|
e00199186e | ||
|
|
9bb970e6ef | ||
|
|
1c9cec1787 | ||
|
|
4d02ef637b | ||
|
|
a559259e32 | ||
|
|
964a7283cb | ||
|
|
19f027b7a7 | ||
|
|
3432436dae | ||
|
|
ab90c777d0 | ||
|
|
f1d68f402a | ||
|
|
3902137dfc | ||
|
|
0131feac68 | ||
|
|
35f579e3eb | ||
|
|
c6be2d288f | ||
|
|
436dae6bb3 | ||
|
|
017faf4bd1 | ||
|
|
c2f7d75e9e | ||
|
|
ebf48d2b50 | ||
|
|
e24f9b3da7 | ||
|
|
3b033ac108 | ||
|
|
b58e8e3ffc | ||
|
|
f45b013321 | ||
|
|
e547c0ec64 | ||
|
|
6871fc46b5 | ||
|
|
931b2dbe40 | ||
|
|
2792e0c2ce | ||
|
|
bb35b3eb87 | ||
|
|
01d481bc8d | ||
|
|
46b9a1d621 | ||
|
|
839c720e91 | ||
|
|
d88c6987a2 | ||
|
|
6c1822f186 | ||
|
|
c7c3ac872c | ||
|
|
f629baa95d | ||
|
|
02a3dc5be3 | ||
|
|
aa91ea4c00 | ||
|
|
7fddd6e448 | ||
|
|
f77c2260f2 | ||
|
|
bb21403c9e | ||
|
|
ac972948b8 | ||
|
|
211392c4a4 | ||
|
|
7317de23a3 | ||
|
|
a2a7b7d795 | ||
|
|
4122f6c308 | ||
|
|
6aeebc4c76 | ||
|
|
98310b6612 | ||
|
|
ef9d6a870f | ||
|
|
35f46dac8c | ||
|
|
df4e4ed388 | ||
|
|
efa6339e18 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
3b6f9bdef4 | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
5cafb96956 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
bb6fd3029b | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
ac719e441c | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
08ef90aafa | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
3dbf0951b2 | ||
|
|
26c7675c28 | ||
|
|
c48e34c47a | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
12342ca92b | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
2b376d9dba | ||
|
|
b39eeb328c | ||
|
|
120b36e2f5 | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
1e64f94bf0 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
b3bcbd5ea4 | ||
|
|
f063e4e01c | ||
|
|
42e66fda65 | ||
|
|
07372db906 | ||
|
|
7ea7069999 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
24abc3719a | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
181f5209a0 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.8.0a1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Begin using DSI 0.4.x
|
||||
time: 2023-10-31T13:19:54.750009-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
PR: "8892"
|
||||
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update typing-extensions version to >=4.4
|
||||
time: 2023-11-06T13:00:51.062386-08:00
|
||||
custom:
|
||||
Author: tlento
|
||||
PR: "9012"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix for column tests not rendering on quoted columns
|
||||
time: 2023-05-31T11:54:19.687363-04:00
|
||||
custom:
|
||||
Author: drewbanin
|
||||
Issue: "201"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fix get_custom_database docstring
|
||||
time: 2023-11-06T12:31:57.525711Z
|
||||
custom:
|
||||
Author: LeoTheGriff
|
||||
Issue: "9003"
|
||||
6
.changes/unreleased/Features-20230802-145011.yaml
Normal file
6
.changes/unreleased/Features-20230802-145011.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Initial implementation of unit testing
|
||||
time: 2023-08-02T14:50:11.391992-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8287"
|
||||
6
.changes/unreleased/Features-20230828-101825.yaml
Normal file
6
.changes/unreleased/Features-20230828-101825.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Unit test manifest artifacts and selection
|
||||
time: 2023-08-28T10:18:25.958929-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8295"
|
||||
6
.changes/unreleased/Features-20230906-234741.yaml
Normal file
6
.changes/unreleased/Features-20230906-234741.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support config with tags & meta for unit tests
|
||||
time: 2023-09-06T23:47:41.059915-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8294"
|
||||
6
.changes/unreleased/Features-20230928-163205.yaml
Normal file
6
.changes/unreleased/Features-20230928-163205.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Enable inline csv fixtures in unit tests
|
||||
time: 2023-09-28T16:32:05.573776-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8626"
|
||||
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add drop_schema_named macro
|
||||
time: 2023-10-17T14:36:20.612289-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8025"
|
||||
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add exports to SavedQuery spec
|
||||
time: 2023-10-31T13:20:22.448158-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
Issue: "8892"
|
||||
6
.changes/unreleased/Features-20231101-101845.yaml
Normal file
6
.changes/unreleased/Features-20231101-101845.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support unit testing incremental models
|
||||
time: 2023-11-01T10:18:45.341781-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8422"
|
||||
6
.changes/unreleased/Features-20231106-194752.yaml
Normal file
6
.changes/unreleased/Features-20231106-194752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add support of csv file fixtures to unit testing
|
||||
time: 2023-11-06T19:47:52.501495-06:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8290"
|
||||
6
.changes/unreleased/Features-20231107-231006.yaml
Normal file
6
.changes/unreleased/Features-20231107-231006.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Unit tests support --defer and state:modified
|
||||
time: 2023-11-07T23:10:06.376588-05:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "8517"
|
||||
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support setting export configs hierarchically via saved query and project configs
|
||||
time: 2023-11-10T15:42:55.042317-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8956"
|
||||
6
.changes/unreleased/Features-20231111-191150.yaml
Normal file
6
.changes/unreleased/Features-20231111-191150.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support source inputs in unit tests
|
||||
time: 2023-11-11T19:11:50.870494-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8507"
|
||||
6
.changes/unreleased/Features-20231114-101555.yaml
Normal file
6
.changes/unreleased/Features-20231114-101555.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Use daff to render diff displayed in stdout when unit test fails
|
||||
time: 2023-11-14T10:15:55.689307-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8558"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: For packages installed with tarball method, fetch metadata to resolve nested dependencies
|
||||
time: 2023-10-13T13:09:43.188308-04:00
|
||||
custom:
|
||||
Author: adamlopez
|
||||
Issue: "8621"
|
||||
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix partial parsing not working for semantic model change
|
||||
time: 2023-10-16T16:39:53.05058-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8859"
|
||||
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Handle unknown `type_code` for model contracts
|
||||
time: 2023-10-24T11:01:51.980781-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 8877 8353
|
||||
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add back contract enforcement for temporary tables on postgres
|
||||
time: 2023-10-24T14:55:04.051683-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8857"
|
||||
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Rework get_catalog implementation to retain previous adapter interface semantics
|
||||
time: 2023-10-24T15:54:00.628086-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8846"
|
||||
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add version to fqn when version==0
|
||||
time: 2023-10-26T00:25:36.259356-05:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8836"
|
||||
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix cased comparison in catalog-retrieval function.
|
||||
time: 2023-10-30T09:37:34.258612-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8939"
|
||||
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Catalog queries now assign the correct type to materialized views
|
||||
time: 2023-10-31T00:53:45.486203-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8864"
|
||||
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix compilation exception running empty seed file and support new Integer agate data_type
|
||||
time: 2023-10-31T14:48:37.774871-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8895"
|
||||
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make relation filtering None-tolerant for maximal flexibility across adapters.
|
||||
time: 2023-11-01T15:58:24.552054-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8974"
|
||||
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Update run_results.json from previous versions of dbt to support deferral and
|
||||
rerun from failure
|
||||
time: 2023-11-06T15:59:33.677915-05:00
|
||||
custom:
|
||||
Author: jtcohen6 peterallenwebb
|
||||
Issue: "9010"
|
||||
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix git repository with subdirectory for Deps
|
||||
time: 2023-11-07T09:23:58.214271-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9000"
|
||||
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Use MANIFEST.in to recursively include all jinja templates; fixes issue where
|
||||
some templates were not included in the distribution
|
||||
time: 2023-11-07T09:41:30.121733-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9016"
|
||||
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix formatting of tarball information in packages-lock.yml
|
||||
time: 2023-11-13T11:49:56.437007-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx QMalcolm
|
||||
Issue: "9062"
|
||||
6
.changes/unreleased/Fixes-20231113-154535.yaml
Normal file
6
.changes/unreleased/Fixes-20231113-154535.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Use seed file from disk for unit testing if rows not specified in YAML config
|
||||
time: 2023-11-13T15:45:35.008565Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8652"
|
||||
6
.changes/unreleased/Under the Hood-20230912-190506.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230912-190506.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add unit testing functional tests
|
||||
time: 2023-09-12T19:05:06.023126-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8512"
|
||||
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add a no-op runner for Saved Qeury
|
||||
time: 2023-10-27T14:00:48.4755-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8893"
|
||||
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move CatalogRelationTypes test case to the shared test suite to be reused by
|
||||
adapter maintainers
|
||||
time: 2023-11-03T19:52:22.694394-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8952"
|
||||
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Treat SystemExit as an interrupt if raised during node execution.
|
||||
time: 2023-11-06T08:04:22.022179-05:00
|
||||
custom:
|
||||
Author: benmosher
|
||||
Issue: n/a
|
||||
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Removing unused 'documentable'
|
||||
time: 2023-11-06T10:57:30.694056-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8871"
|
||||
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Cache dbt plugin modules to improve integration test performance
|
||||
time: 2023-11-07T19:15:46.170151-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9029"
|
||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -13,23 +13,6 @@
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
@@ -40,7 +23,7 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -28,3 +28,10 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# github dependencies
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Add the `user docs` label to this PR if it will need docs changes. An
|
||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
@@ -33,3 +30,4 @@ resolves #
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,7 +17,7 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
43
.github/workflows/docs-issue.yml
vendored
Normal file
43
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.pull_request.merged == true) &&
|
||||
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
84
.github/workflows/main.yml
vendored
84
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -103,26 +108,59 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +203,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -182,8 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
6
.github/workflows/release-docker.yml
vendored
6
.github/workflows/release-docker.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# **what?**
|
||||
# Cleanup branches left over from automation and testing. Also cleanup
|
||||
# draft releases from release testing.
|
||||
|
||||
# **why?**
|
||||
# The automations are leaving behind branches and releases that clutter
|
||||
# the repository. Sometimes we need them to debug processes so we don't
|
||||
# want them immediately deleted. Running on Saturday to avoid running
|
||||
# at the same time as an actual release to prevent breaking a release
|
||||
# mid-release.
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 12:00 Saturday.
|
||||
# Manual trigger can also run on demand
|
||||
|
||||
name: Repository Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cleanup-repo:
|
||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||
secrets: inherit
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
13
codecov.yml
13
codecov.yml
@@ -0,0 +1,13 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
@@ -7,12 +7,12 @@ from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
# Note: This is automatically used by contract code
|
||||
# No-op conversions (INTEGER => INT) have been removed.
|
||||
# Any adapter that wants to take advantage of "translate_type"
|
||||
# should create a ClassVar with the appropriate conversions.
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
"STRING": "TEXT",
|
||||
"TIMESTAMP": "TIMESTAMP",
|
||||
"FLOAT": "FLOAT",
|
||||
"INTEGER": "INT",
|
||||
"BOOLEAN": "BOOLEAN",
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
|
||||
@@ -72,7 +72,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
def __init__(self, profile: AdapterRequiredConfig) -> None:
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
@@ -400,7 +400,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
@@ -408,7 +408,28 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param int limit: If set, limits the result set
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
"""
|
||||
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||
|
||||
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`add_select_query` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -17,9 +17,11 @@ from typing import (
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
|
||||
from dbt.adapters.capability import Capability, CapabilityDict
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
|
||||
import agate
|
||||
@@ -43,8 +45,14 @@ from dbt.exceptions import (
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.adapters.protocol import AdapterConfig
|
||||
from dbt.clients.agate_helper import (
|
||||
empty_table,
|
||||
get_column_value_uncased,
|
||||
merge_tables,
|
||||
table_from_rows,
|
||||
Integer,
|
||||
)
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
@@ -60,7 +68,7 @@ from dbt.events.types import (
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName,
|
||||
@@ -74,7 +82,9 @@ from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
@@ -109,7 +119,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
||||
return test
|
||||
|
||||
|
||||
def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
|
||||
def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
|
||||
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
@@ -161,6 +171,12 @@ class PythonJobHelper:
|
||||
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
||||
|
||||
|
||||
class FreshnessResponse(TypedDict):
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float # age in seconds
|
||||
|
||||
|
||||
class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""The BaseAdapter provides an abstract base class for adapters.
|
||||
|
||||
@@ -208,7 +224,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Relation: Type[BaseRelation] = BaseRelation
|
||||
Column: Type[BaseColumn] = BaseColumn
|
||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
||||
ConnectionManager: Type[BaseConnectionManager]
|
||||
|
||||
# A set of clobber config fields accepted by this adapter
|
||||
# for use in materializations
|
||||
@@ -222,7 +238,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
# This static member variable can be overriden in concrete adapter
|
||||
# implementations to indicate adapter support for optional capabilities.
|
||||
_capabilities = CapabilityDict({})
|
||||
|
||||
def __init__(self, config) -> None:
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
self.connections = self.ConnectionManager(config)
|
||||
@@ -315,14 +335,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
"""
|
||||
TODO: Can we move this to dbt-bigquery?
|
||||
Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:param str table: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
if hasattr(self.connections, "get_partitions_metadata"):
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
@@ -408,7 +435,30 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
relations = self._get_catalog_relations(manifest)
|
||||
for relation in relations:
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
# are sets of lowercase schema names that are valid members of those
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _get_catalog_relations_by_info_schema(
|
||||
self, relations
|
||||
) -> Dict[InformationSchema, List[BaseRelation]]:
|
||||
relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
|
||||
for relation in relations:
|
||||
info_schema = relation.information_schema_only()
|
||||
if info_schema not in relations_by_info_schema:
|
||||
relations_by_info_schema[info_schema] = []
|
||||
relations_by_info_schema[info_schema].append(relation)
|
||||
|
||||
return relations_by_info_schema
|
||||
|
||||
def _get_catalog_relations(self, manifest: Manifest) -> List[BaseRelation]:
|
||||
|
||||
nodes = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
@@ -416,14 +466,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
relation = self.Relation.create_from(self.config, node)
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
# are sets of lowercase schema names that are valid members of those
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
relations = [self.Relation.create_from(self.config, n) for n in nodes]
|
||||
return relations
|
||||
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||
@@ -453,9 +498,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||
for relation in cache_schemas:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
if relation.schema:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(
|
||||
@@ -917,6 +963,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
|
||||
:param agate_table: The table
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
@@ -974,6 +1031,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||
agate_type: Type = agate_table.column_types[col_idx]
|
||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||
(Integer, cls.convert_integer_type),
|
||||
(agate.Text, cls.convert_text_type),
|
||||
(agate.Number, cls.convert_number_type),
|
||||
(agate.Boolean, cls.convert_boolean_type),
|
||||
@@ -1085,25 +1143,108 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
schema_map = self._get_catalog_schemas(manifest)
|
||||
def _get_one_catalog_by_relations(
|
||||
self,
|
||||
information_schema: InformationSchema,
|
||||
relations: List[BaseRelation],
|
||||
manifest: Manifest,
|
||||
) -> agate.Table:
|
||||
|
||||
kwargs = {
|
||||
"information_schema": information_schema,
|
||||
"relations": relations,
|
||||
}
|
||||
table = self.execute_macro(
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME,
|
||||
kwargs=kwargs,
|
||||
# pass in the full manifest, so we get any local project
|
||||
# overrides
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_filtered_catalog(
|
||||
self, manifest: Manifest, relations: Optional[Set[BaseRelation]] = None
|
||||
):
|
||||
catalogs: agate.Table
|
||||
if (
|
||||
relations is None
|
||||
or len(relations) > 100
|
||||
or not self.supports(Capability.SchemaMetadataByRelations)
|
||||
):
|
||||
# Do it the traditional way. We get the full catalog.
|
||||
catalogs, exceptions = self.get_catalog(manifest)
|
||||
else:
|
||||
# Do it the new way. We try to save time by selecting information
|
||||
# only for the exact set of relations we are interested in.
|
||||
catalogs, exceptions = self.get_catalog_by_relations(manifest, relations)
|
||||
|
||||
if relations and catalogs:
|
||||
relation_map = {
|
||||
(
|
||||
r.database.casefold() if r.database else None,
|
||||
r.schema.casefold() if r.schema else None,
|
||||
r.identifier.casefold() if r.identifier else None,
|
||||
)
|
||||
for r in relations
|
||||
}
|
||||
|
||||
def in_map(row: agate.Row):
|
||||
d = _expect_row_value("table_database", row)
|
||||
s = _expect_row_value("table_schema", row)
|
||||
i = _expect_row_value("table_name", row)
|
||||
d = d.casefold() if d is not None else None
|
||||
s = s.casefold() if s is not None else None
|
||||
i = i.casefold() if i is not None else None
|
||||
return (d, s, i) in relation_map
|
||||
|
||||
catalogs = catalogs.where(in_map)
|
||||
|
||||
return catalogs, exceptions
|
||||
|
||||
def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
|
||||
pass
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
schema_map: SchemaSearchMap = self._get_catalog_schemas(manifest)
|
||||
for info, schemas in schema_map.items():
|
||||
if len(schemas) == 0:
|
||||
continue
|
||||
name = ".".join([str(info.database), "information_schema"])
|
||||
|
||||
fut = tpe.submit_connected(
|
||||
self, name, self._get_one_catalog, info, schemas, manifest
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def get_catalog_by_relations(
|
||||
self, manifest: Manifest, relations: Set[BaseRelation]
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
|
||||
for info_schema in relations_by_schema:
|
||||
name = ".".join([str(info_schema.database), "information_schema"])
|
||||
relations = set(relations_by_schema[info_schema])
|
||||
fut = tpe.submit_connected(
|
||||
self,
|
||||
name,
|
||||
self._get_one_catalog_by_relations,
|
||||
info_schema,
|
||||
relations,
|
||||
manifest,
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def cancel_open_connections(self):
|
||||
"""Cancel all open connections."""
|
||||
return self.connections.cancel_open()
|
||||
@@ -1114,7 +1255,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1149,13 +1290,52 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
freshness = {
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def calculate_freshness_from_metadata(
|
||||
self,
|
||||
source: BaseRelation,
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
kwargs: Dict[str, Any] = {
|
||||
"information_schema": source.information_schema_only(),
|
||||
"relations": [source],
|
||||
}
|
||||
result = self.execute_macro(
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, manifest=manifest
|
||||
)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
|
||||
try:
|
||||
row = table[0]
|
||||
last_modified_val = get_column_value_uncased("last_modified", row)
|
||||
snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
|
||||
except Exception:
|
||||
raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
|
||||
|
||||
if last_modified_val is None:
|
||||
# Interpret missing value as "infinitely long ago"
|
||||
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
|
||||
else:
|
||||
max_loaded_at = _utc(last_modified_val, None, "last_modified")
|
||||
|
||||
snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
|
||||
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
runs. The hook can assume it has a connection available.
|
||||
@@ -1429,6 +1609,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def capabilities(cls) -> CapabilityDict:
|
||||
return cls._capabilities
|
||||
|
||||
@classmethod
|
||||
def supports(cls, capability: Capability) -> bool:
|
||||
return bool(cls.capabilities()[capability])
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
|
||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
|
||||
@@ -29,7 +29,7 @@ class AdapterPlugin:
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None,
|
||||
):
|
||||
) -> None:
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
|
||||
@@ -11,7 +11,7 @@ from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
def __init__(self, node) -> None:
|
||||
self._inner_node = node
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -25,9 +25,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
@@ -57,7 +57,7 @@ QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None:
|
||||
self.manifest = manifest
|
||||
self.config = config
|
||||
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.graph.nodes import (
|
||||
SourceDefinition,
|
||||
ManifestNode,
|
||||
ResultNode,
|
||||
ParsedNode,
|
||||
UnitTestSourceDefinition,
|
||||
)
|
||||
from dbt.contracts.relation import (
|
||||
RelationType,
|
||||
ComponentName,
|
||||
@@ -23,6 +29,7 @@ import dbt.exceptions
|
||||
|
||||
|
||||
Self = TypeVar("Self", bound="BaseRelation")
|
||||
SerializableIterable = Union[Tuple, FrozenSet]
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
@@ -36,6 +43,18 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
||||
# adding a relation type here also requires defining the associated rename macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql()
|
||||
renameable_relations: SerializableIterable = ()
|
||||
|
||||
# register relation types that are atomically replaceable, e.g. they have "create or replace" syntax
|
||||
# adding a relation type here also requires defining the associated replace macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql()
|
||||
replaceable_relations: SerializableIterable = ()
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
return self.path.get_lowered_part(field) == value.lower()
|
||||
@@ -169,7 +188,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
@@ -189,7 +207,9 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
def create_from_source(
|
||||
cls: Type[Self], source: Union[SourceDefinition, UnitTestSourceDefinition], **kwargs: Any
|
||||
) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
@@ -251,8 +271,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, SourceDefinition):
|
||||
if node.resource_type == NodeType.Source or isinstance(node, UnitTestSourceDefinition):
|
||||
if not (
|
||||
isinstance(node, SourceDefinition) or isinstance(node, UnitTestSourceDefinition)
|
||||
):
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
@@ -286,6 +308,14 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
@property
|
||||
def can_be_renamed(self) -> bool:
|
||||
return self.type in self.renameable_relations
|
||||
|
||||
@property
|
||||
def can_be_replaced(self) -> bool:
|
||||
return self.type in self.replaceable_relations
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
@@ -439,11 +469,11 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
self[key].add(schema)
|
||||
|
||||
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||
for information_schema_name, schemas in self.items():
|
||||
for information_schema, schemas in self.items():
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
yield information_schema, schema
|
||||
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap":
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
|
||||
@@ -38,8 +38,8 @@ class _CachedRelation:
|
||||
:attr BaseRelation inner: The underlying dbt relation.
|
||||
"""
|
||||
|
||||
def __init__(self, inner):
|
||||
self.referenced_by = {}
|
||||
def __init__(self, inner) -> None:
|
||||
self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
52
core/dbt/adapters/capability.py
Normal file
52
core/dbt/adapters/capability.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional, DefaultDict, Mapping
|
||||
|
||||
|
||||
class Capability(str, Enum):
|
||||
"""Enumeration of optional adapter features which can be probed using BaseAdapter.capabilities()"""
|
||||
|
||||
SchemaMetadataByRelations = "SchemaMetadataByRelations"
|
||||
"""Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving
|
||||
all the relations in a schema."""
|
||||
|
||||
TableLastModifiedMetadata = "TableLastModifiedMetadata"
|
||||
"""Indicates support for determining the time of the last table modification by querying database metadata."""
|
||||
|
||||
|
||||
class Support(str, Enum):
|
||||
Unknown = "Unknown"
|
||||
"""The adapter has not declared whether this capability is a feature of the underlying DBMS."""
|
||||
|
||||
Unsupported = "Unsupported"
|
||||
"""This capability is not possible with the underlying DBMS, so the adapter does not implement related macros."""
|
||||
|
||||
NotImplemented = "NotImplemented"
|
||||
"""This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter."""
|
||||
|
||||
Versioned = "Versioned"
|
||||
"""Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
Full = "Full"
|
||||
"""All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class CapabilitySupport:
|
||||
support: Support
|
||||
first_version: Optional[str] = None
|
||||
|
||||
def __bool__(self):
|
||||
return self.support == Support.Versioned or self.support == Support.Full
|
||||
|
||||
|
||||
class CapabilityDict(DefaultDict[Capability, CapabilitySupport]):
|
||||
def __init__(self, vals: Mapping[Capability, CapabilitySupport]):
|
||||
super().__init__(self._default)
|
||||
self.update(vals)
|
||||
|
||||
@staticmethod
|
||||
def _default():
|
||||
return CapabilitySupport(support=Support.Unknown)
|
||||
@@ -19,7 +19,7 @@ Adapter = AdapterProtocol
|
||||
|
||||
|
||||
class AdapterContainer:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.lock = threading.Lock()
|
||||
self.adapters: Dict[str, Adapter] = {}
|
||||
self.plugins: Dict[str, AdapterPlugin] = {}
|
||||
|
||||
@@ -90,7 +90,7 @@ class AdapterProtocol( # type: ignore[misc]
|
||||
ConnectionManager: Type[ConnectionManager_T]
|
||||
connections: ConnectionManager_T
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig):
|
||||
def __init__(self, config: AdapterRequiredConfig) -> None:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -12,7 +12,7 @@ class RelationConfigChangeAction(StrEnum):
|
||||
drop = "drop"
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||
@dataclass(frozen=True, eq=True, unsafe_hash=True) # type: ignore
|
||||
class RelationConfigChange(RelationConfigBase, ABC):
|
||||
action: RelationConfigChangeAction
|
||||
context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
|
||||
import agate
|
||||
|
||||
@@ -131,14 +131,6 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
|
||||
@@ -75,6 +75,10 @@ class SQLAdapter(BaseAdapter):
|
||||
decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined]
|
||||
return "float8" if decimals else "integer"
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
return "boolean"
|
||||
|
||||
@@ -57,11 +57,10 @@ def args_to_context(args: List[str]) -> Context:
|
||||
from dbt.cli.main import cli
|
||||
|
||||
cli_ctx = cli.make_context(cli.name, args)
|
||||
# Split args if they're a comma seperated string.
|
||||
# Split args if they're a comma separated string.
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if isinstance(sub_command, Group):
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
@@ -319,7 +318,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
if k == WHICH_KEY:
|
||||
if v != command.value:
|
||||
@@ -342,9 +340,14 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
spinal_cased = k.replace("_", "-")
|
||||
|
||||
# MultiOption flags come back as lists, but we want to pass them as space separated strings
|
||||
if isinstance(v, list):
|
||||
v = " ".join(v)
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
elif v in (None, False):
|
||||
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||
elif v is None or v is False:
|
||||
add_fn(f"--no-{spinal_cased}")
|
||||
elif v is True:
|
||||
add_fn(f"--{spinal_cased}")
|
||||
@@ -393,6 +396,7 @@ def command_args(command: CliCommand) -> ArgsList:
|
||||
CliCommand.SOURCE_FRESHNESS: cli.freshness,
|
||||
CliCommand.TEST: cli.test,
|
||||
CliCommand.RETRY: cli.retry,
|
||||
CliCommand.UNIT_TEST: cli.unit_test,
|
||||
}
|
||||
click_cmd: Optional[ClickCommand] = CMD_DICT.get(command, None)
|
||||
if click_cmd is None:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import functools
|
||||
from copy import copy
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, List, Optional, Union
|
||||
@@ -39,6 +40,7 @@ from dbt.task.serve import ServeTask
|
||||
from dbt.task.show import ShowTask
|
||||
from dbt.task.snapshot import SnapshotTask
|
||||
from dbt.task.test import TestTask
|
||||
from dbt.task.unit_test import UnitTestTask
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -64,7 +66,7 @@ class dbtRunner:
|
||||
self,
|
||||
manifest: Optional[Manifest] = None,
|
||||
callbacks: Optional[List[Callable[[EventMsg], None]]] = None,
|
||||
):
|
||||
) -> None:
|
||||
self.manifest = manifest
|
||||
|
||||
if callbacks is None:
|
||||
@@ -118,6 +120,44 @@ class dbtRunner:
|
||||
)
|
||||
|
||||
|
||||
# approach from https://github.com/pallets/click/issues/108#issuecomment-280489786
|
||||
def global_flags(func):
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.deprecated_print
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@p.write_json
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# dbt
|
||||
@click.group(
|
||||
context_settings={"help_option_names": ["-h", "--help"]},
|
||||
@@ -126,36 +166,11 @@ class dbtRunner:
|
||||
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||
)
|
||||
@click.pass_context
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.deprecated_print
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@global_flags
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
@p.log_format
|
||||
@p.show_resource_report
|
||||
def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
For more documentation on these commands, visit: docs.getdbt.com
|
||||
@@ -165,13 +180,14 @@ def cli(ctx, **kwargs):
|
||||
# dbt build
|
||||
@cli.command("build")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.include_saved_query
|
||||
@p.indirect_selection
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@@ -188,7 +204,6 @@ def cli(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -211,6 +226,8 @@ def build(ctx, **kwargs):
|
||||
# dbt clean
|
||||
@cli.command("clean")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.clean_project_files_only
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -233,6 +250,7 @@ def clean(ctx, **kwargs):
|
||||
# dbt docs
|
||||
@cli.group()
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
def docs(ctx, **kwargs):
|
||||
"""Generate or serve the documentation website for your project"""
|
||||
|
||||
@@ -240,6 +258,7 @@ def docs(ctx, **kwargs):
|
||||
# dbt docs generate
|
||||
@docs.command("generate")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.compile_docs
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@@ -252,6 +271,7 @@ def docs(ctx, **kwargs):
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.empty_catalog
|
||||
@p.static
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@@ -259,7 +279,6 @@ def docs(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -282,6 +301,7 @@ def docs_generate(ctx, **kwargs):
|
||||
# dbt docs serve
|
||||
@docs.command("serve")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.browser
|
||||
@p.port
|
||||
@p.profile
|
||||
@@ -310,6 +330,7 @@ def docs_serve(ctx, **kwargs):
|
||||
# dbt compile
|
||||
@cli.command("compile")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -328,11 +349,11 @@ def docs_serve(ctx, **kwargs):
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.compile_inject_ephemeral_ctes
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -356,6 +377,7 @@ def compile(ctx, **kwargs):
|
||||
# dbt show
|
||||
@cli.command("show")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -379,7 +401,6 @@ def compile(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -403,6 +424,7 @@ def show(ctx, **kwargs):
|
||||
# dbt debug
|
||||
@cli.command("debug")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.debug_connection
|
||||
@p.config_dir
|
||||
@p.profile
|
||||
@@ -410,7 +432,6 @@ def show(ctx, **kwargs):
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def debug(ctx, **kwargs):
|
||||
@@ -429,18 +450,46 @@ def debug(ctx, **kwargs):
|
||||
# dbt deps
|
||||
@cli.command("deps")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.profile
|
||||
@p.profiles_dir_exists_false
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.source
|
||||
@p.dry_run
|
||||
@p.lock
|
||||
@p.upgrade
|
||||
@p.add_package
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def deps(ctx, **kwargs):
|
||||
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
"""Install dbt packages specified.
|
||||
In the following case, a new `package-lock.yml` will be generated and the packages are installed:
|
||||
- user updated the packages.yml
|
||||
- user specify the flag --update, which means for packages that are specified as a
|
||||
range, dbt-core will try to install the newer version
|
||||
Otherwise, deps will use `package-lock.yml` as source of truth to install packages.
|
||||
|
||||
There is a way to add new packages by providing an `--add-package` flag to deps command
|
||||
which will allow user to specify a package they want to add in the format of packagename@version.
|
||||
"""
|
||||
flags = ctx.obj["flags"]
|
||||
if flags.ADD_PACKAGE:
|
||||
if not flags.ADD_PACKAGE["version"] and flags.SOURCE != "local":
|
||||
raise BadOptionUsage(
|
||||
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
||||
option_name="--add-package",
|
||||
)
|
||||
else:
|
||||
if flags.DRY_RUN:
|
||||
raise BadOptionUsage(
|
||||
message="Invalid flag `--dry-run` when not using `--add-package`.",
|
||||
option_name="--dry-run",
|
||||
)
|
||||
task = DepsTask(flags, ctx.obj["project"])
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
@@ -449,6 +498,7 @@ def deps(ctx, **kwargs):
|
||||
# dbt init
|
||||
@cli.command("init")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||
@click.argument("project_name", required=False)
|
||||
@p.profile
|
||||
@@ -471,6 +521,7 @@ def init(ctx, **kwargs):
|
||||
# dbt list
|
||||
@cli.command("list")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.indirect_selection
|
||||
@p.models
|
||||
@@ -516,6 +567,7 @@ cli.add_command(ls, "ls")
|
||||
# dbt parse
|
||||
@cli.command("parse")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -523,7 +575,6 @@ cli.add_command(ls, "ls")
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -533,19 +584,18 @@ cli.add_command(ls, "ls")
|
||||
def parse(ctx, **kwargs):
|
||||
"""Parses the project and provides information on performance"""
|
||||
# manifest generation and writing happens in @requires.manifest
|
||||
|
||||
return ctx.obj["manifest"], True
|
||||
|
||||
|
||||
# dbt run
|
||||
@cli.command("run")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.full_refresh
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@@ -559,7 +609,6 @@ def parse(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -582,6 +631,7 @@ def run(ctx, **kwargs):
|
||||
# dbt retry
|
||||
@cli.command("retry")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.project_dir
|
||||
@p.profiles_dir
|
||||
@p.vars
|
||||
@@ -589,7 +639,6 @@ def run(ctx, **kwargs):
|
||||
@p.target
|
||||
@p.state
|
||||
@p.threads
|
||||
@p.fail_fast
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -612,6 +661,7 @@ def retry(ctx, **kwargs):
|
||||
# dbt clone
|
||||
@cli.command("clone")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer_state
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@@ -626,7 +676,6 @@ def retry(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@@ -649,6 +698,7 @@ def clone(ctx, **kwargs):
|
||||
# dbt run operation
|
||||
@cli.command("run-operation")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@click.argument("macro")
|
||||
@p.args
|
||||
@p.profile
|
||||
@@ -680,6 +730,7 @@ def run_operation(ctx, **kwargs):
|
||||
# dbt seed
|
||||
@cli.command("seed")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@p.profile
|
||||
@@ -695,7 +746,6 @@ def run_operation(ctx, **kwargs):
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@@ -717,6 +767,7 @@ def seed(ctx, **kwargs):
|
||||
# dbt snapshot
|
||||
@cli.command("snapshot")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -756,6 +807,7 @@ def snapshot(ctx, **kwargs):
|
||||
# dbt source
|
||||
@cli.group()
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
def source(ctx, **kwargs):
|
||||
"""Manage your project's sources"""
|
||||
|
||||
@@ -763,6 +815,7 @@ def source(ctx, **kwargs):
|
||||
# dbt source freshness
|
||||
@source.command("freshness")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||
@p.profile
|
||||
@@ -805,6 +858,48 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
||||
# dbt test
|
||||
@cli.command("test")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.indirect_selection
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.store_failures
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def test(ctx, **kwargs):
|
||||
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||
task = TestTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt unit-test
|
||||
@cli.command("unit-test")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@@ -812,6 +907,7 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.indirect_selection
|
||||
@p.show_output_format
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -832,9 +928,9 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def test(ctx, **kwargs):
|
||||
def unit_test(ctx, **kwargs):
|
||||
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||
task = TestTask(
|
||||
task = UnitTestTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
|
||||
@@ -22,6 +22,26 @@ class YAML(ParamType):
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
class Package(ParamType):
|
||||
"""The Click STRING type. Converts string into dict with package name and version.
|
||||
Example package:
|
||||
package-name@1.0.0
|
||||
package-name
|
||||
"""
|
||||
|
||||
name = "NewPackage"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# assume non-string values are a problem
|
||||
if not isinstance(value, str):
|
||||
self.fail(f"Cannot load Package from type {type(value)}", param, ctx)
|
||||
try:
|
||||
package_name, package_version = value.split("@")
|
||||
return {"name": package_name, "version": package_version}
|
||||
except ValueError:
|
||||
return {"name": value, "version": None}
|
||||
|
||||
|
||||
class WarnErrorOptionsType(YAML):
|
||||
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
|
||||
|
||||
|
||||
@@ -2,19 +2,21 @@ import click
|
||||
import inspect
|
||||
import typing as t
|
||||
from click import Context
|
||||
from click.parser import OptionParser, ParsingState
|
||||
from dbt.cli.option_types import ChoiceTuple
|
||||
|
||||
|
||||
# Implementation from: https://stackoverflow.com/a/48394004
|
||||
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||
class MultiOption(click.Option):
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||
nargs = kwargs.pop("nargs", -1)
|
||||
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||
super(MultiOption, self).__init__(*args, **kwargs)
|
||||
self._previous_parser_process = None
|
||||
self._eat_all_parser = None
|
||||
# this makes mypy happy, setting these to None causes mypy failures
|
||||
self._previous_parser_process = lambda *args, **kwargs: None
|
||||
self._eat_all_parser = lambda *args, **kwargs: None
|
||||
|
||||
# validate that multiple=True
|
||||
multiple = kwargs.pop("multiple", None)
|
||||
@@ -29,34 +31,35 @@ class MultiOption(click.Option):
|
||||
else:
|
||||
assert isinstance(option_type, ChoiceTuple), msg
|
||||
|
||||
def add_to_parser(self, parser, ctx):
|
||||
def parser_process(value, state):
|
||||
def add_to_parser(self, parser: OptionParser, ctx: Context):
|
||||
def parser_process(value: str, state: ParsingState):
|
||||
# method to hook to the parser.process
|
||||
done = False
|
||||
value = [value]
|
||||
value_list = str.split(value, " ")
|
||||
if self.save_other_options:
|
||||
# grab everything up to the next option
|
||||
while state.rargs and not done:
|
||||
for prefix in self._eat_all_parser.prefixes:
|
||||
for prefix in self._eat_all_parser.prefixes: # type: ignore[attr-defined]
|
||||
if state.rargs[0].startswith(prefix):
|
||||
done = True
|
||||
if not done:
|
||||
value.append(state.rargs.pop(0))
|
||||
value_list.append(state.rargs.pop(0))
|
||||
else:
|
||||
# grab everything remaining
|
||||
value += state.rargs
|
||||
value_list += state.rargs
|
||||
state.rargs[:] = []
|
||||
value = tuple(value)
|
||||
value_tuple = tuple(value_list)
|
||||
# call the actual process
|
||||
self._previous_parser_process(value, state)
|
||||
self._previous_parser_process(value_tuple, state)
|
||||
|
||||
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||
for name in self.opts:
|
||||
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||
if our_parser:
|
||||
self._eat_all_parser = our_parser
|
||||
self._eat_all_parser = our_parser # type: ignore[assignment]
|
||||
self._previous_parser_process = our_parser.process
|
||||
our_parser.process = parser_process
|
||||
# mypy doesnt like assingment to a method see https://github.com/python/mypy/issues/708
|
||||
our_parser.process = parser_process # type: ignore[method-assign]
|
||||
break
|
||||
return retval
|
||||
|
||||
|
||||
@@ -2,10 +2,16 @@ from pathlib import Path
|
||||
|
||||
import click
|
||||
from dbt.cli.options import MultiOption
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType, Package
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
from dbt.version import get_version_information
|
||||
|
||||
add_package = click.option(
|
||||
"--add-package",
|
||||
help="Add a package to current package spec, specify it as package-name@version. Change the source with --source flag.",
|
||||
envvar=None,
|
||||
type=Package(),
|
||||
)
|
||||
args = click.option(
|
||||
"--args",
|
||||
envvar=None,
|
||||
@@ -40,6 +46,14 @@ compile_docs = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_inject_ephemeral_ctes = click.option(
|
||||
"--inject-ephemeral-ctes/--no-inject-ephemeral-ctes",
|
||||
envvar=None,
|
||||
help="Internal flag controlling injection of referenced ephemeral models' CTEs during `compile`.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
config_dir = click.option(
|
||||
"--config-dir",
|
||||
envvar=None,
|
||||
@@ -69,6 +83,14 @@ deprecated_defer = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
dry_run = click.option(
|
||||
"--dry-run",
|
||||
envvar=None,
|
||||
help="Option to run `dbt deps --add-package` without updating package-lock.yml file.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
|
||||
enable_legacy_logger = click.option(
|
||||
"--enable-legacy-logger/--no-enable-legacy-logger",
|
||||
envvar="DBT_ENABLE_LEGACY_LOGGER",
|
||||
@@ -119,6 +141,13 @@ indirect_selection = click.option(
|
||||
default="eager",
|
||||
)
|
||||
|
||||
lock = click.option(
|
||||
"--lock",
|
||||
envvar=None,
|
||||
help="Generate the package-lock.yml file without install the packages.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
log_cache_events = click.option(
|
||||
"--log-cache-events/--no-log-cache-events",
|
||||
help="Enable verbose logging for relational cache events to help when debugging.",
|
||||
@@ -171,6 +200,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
@@ -248,6 +286,14 @@ partial_parse_file_path = click.option(
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
partial_parse_file_diff = click.option(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
@@ -290,7 +336,7 @@ printer_width = click.option(
|
||||
profile = click.option(
|
||||
"--profile",
|
||||
envvar=None,
|
||||
help="Which profile to load. Overrides setting in dbt_project.yml.",
|
||||
help="Which existing profile to load. Overrides setting in dbt_project.yml.",
|
||||
)
|
||||
|
||||
profiles_dir = click.option(
|
||||
@@ -343,6 +389,7 @@ resource_type = click.option(
|
||||
type=ChoiceTuple(
|
||||
[
|
||||
"metric",
|
||||
"semantic_model",
|
||||
"source",
|
||||
"analysis",
|
||||
"model",
|
||||
@@ -360,6 +407,14 @@ resource_type = click.option(
|
||||
default=(),
|
||||
)
|
||||
|
||||
include_saved_query = click.option(
|
||||
"--include-saved-query/--no-include-saved-query",
|
||||
envvar="DBT_INCLUDE_SAVED_QUERY",
|
||||
help="Include saved queries in the list of resources to be selected for build command",
|
||||
is_flag=True,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
@@ -380,9 +435,9 @@ inline = click.option(
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
selector = click.option(
|
||||
"--selector",
|
||||
@@ -397,6 +452,13 @@ send_anonymous_usage_stats = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
clean_project_files_only = click.option(
|
||||
"--clean-project-files-only / --no-clean-project-files-only",
|
||||
envvar="DBT_CLEAN_PROJECT_FILES_ONLY",
|
||||
help="If disabled, dbt clean will delete all paths specified in clean-paths, even if they're outside the dbt project.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show = click.option(
|
||||
"--show",
|
||||
envvar=None,
|
||||
@@ -432,6 +494,21 @@ empty_catalog = click.option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
source = click.option(
|
||||
"--source",
|
||||
envvar=None,
|
||||
help="Source to download page from, must be one of hub, git, or local. Defaults to hub.",
|
||||
type=click.Choice(["hub", "git", "local"], case_sensitive=True),
|
||||
default="hub",
|
||||
)
|
||||
|
||||
static = click.option(
|
||||
"--static",
|
||||
help="Generate an additional static_index.html with manifest and catalog built-in.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
state = click.option(
|
||||
"--state",
|
||||
envvar="DBT_STATE",
|
||||
@@ -500,6 +577,13 @@ target_path = click.option(
|
||||
type=click.Path(),
|
||||
)
|
||||
|
||||
upgrade = click.option(
|
||||
"--upgrade",
|
||||
envvar=None,
|
||||
help="Upgrade packages to the latest version.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
debug_connection = click.option(
|
||||
"--connection",
|
||||
envvar=None,
|
||||
@@ -581,3 +665,10 @@ write_json = click.option(
|
||||
help="Whether or not to write the manifest.json and run_results.json files to the target directory",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show_resource_report = click.option(
|
||||
"--show-resource-report/--no-show-resource-report",
|
||||
default=False,
|
||||
envvar="DBT_SHOW_RESOURCE_REPORT",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
@@ -9,12 +9,14 @@ from dbt.cli.exceptions import (
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||
from dbt.events.base_types import EventLevel
|
||||
from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger
|
||||
from dbt.events.types import (
|
||||
CommandCompleted,
|
||||
MainReportVersion,
|
||||
MainReportArgs,
|
||||
MainTrackingUserState,
|
||||
ResourceReport,
|
||||
)
|
||||
from dbt.events.helpers import get_json_string_utcnow
|
||||
from dbt.events.types import MainEncounteredError, MainStackTrace
|
||||
@@ -27,6 +29,7 @@ from dbt.plugins import set_up_plugin_manager, get_plugin_manager
|
||||
|
||||
from click import Context
|
||||
from functools import update_wrapper
|
||||
import importlib.util
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -96,6 +99,28 @@ def postflight(func):
|
||||
fire_event(MainStackTrace(stack_trace=traceback.format_exc()))
|
||||
raise ExceptionExit(e)
|
||||
finally:
|
||||
# Fire ResourceReport, but only on systems which support the resource
|
||||
# module. (Skip it on Windows).
|
||||
if importlib.util.find_spec("resource") is not None:
|
||||
import resource
|
||||
|
||||
rusage = resource.getrusage(resource.RUSAGE_SELF)
|
||||
fire_event(
|
||||
ResourceReport(
|
||||
command_name=ctx.command.name,
|
||||
command_success=success,
|
||||
command_wall_clock_time=time.perf_counter() - start_func,
|
||||
process_user_time=rusage.ru_utime,
|
||||
process_kernel_time=rusage.ru_stime,
|
||||
process_mem_max_rss=rusage.ru_maxrss,
|
||||
process_in_blocks=rusage.ru_inblock,
|
||||
process_out_blocks=rusage.ru_oublock,
|
||||
),
|
||||
EventLevel.INFO
|
||||
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
||||
else None,
|
||||
)
|
||||
|
||||
fire_event(
|
||||
CommandCompleted(
|
||||
command=ctx.command_path,
|
||||
|
||||
@@ -24,6 +24,7 @@ class Command(Enum):
|
||||
SOURCE_FRESHNESS = "freshness"
|
||||
TEST = "test"
|
||||
RETRY = "retry"
|
||||
UNIT_TEST = "unit-test"
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s: str) -> "Command":
|
||||
|
||||
@@ -3,16 +3,30 @@ from codecs import BOM_UTF8
|
||||
import agate
|
||||
import datetime
|
||||
import isodate
|
||||
import io
|
||||
import json
|
||||
import dbt.utils
|
||||
from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
class Integer(agate.data_types.DataType):
|
||||
def cast(self, d):
|
||||
# by default agate will cast none as a Number
|
||||
# but we need to cast it as an Integer to preserve
|
||||
# the type when merging and unioning tables
|
||||
if type(d) == int or d is None:
|
||||
return d
|
||||
else:
|
||||
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
|
||||
|
||||
def jsonify(self, d):
|
||||
return d
|
||||
|
||||
|
||||
class Number(agate.data_types.Number):
|
||||
# undo the change in https://github.com/wireservice/agate/pull/733
|
||||
# i.e. do not cast True and False to numeric 1 and 0
|
||||
@@ -48,6 +62,7 @@ def build_type_tester(
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
Integer(null_values=("null", "")),
|
||||
Number(null_values=("null", "")),
|
||||
agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
|
||||
agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
|
||||
@@ -123,6 +138,23 @@ def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table:
|
||||
)
|
||||
|
||||
|
||||
def json_rows_from_table(table: agate.Table) -> List[Dict[str, Any]]:
|
||||
"Convert a table to a list of row dict objects"
|
||||
output = io.StringIO()
|
||||
table.to_json(path=output) # type: ignore
|
||||
|
||||
return json.loads(output.getvalue())
|
||||
|
||||
|
||||
def list_rows_from_table(table: agate.Table) -> List[Any]:
|
||||
"Convert a table to a list of lists, where the first element represents the header"
|
||||
rows = [[col.name for col in table.columns]]
|
||||
for row in table.rows:
|
||||
rows.append(list(row.values()))
|
||||
|
||||
return rows
|
||||
|
||||
|
||||
def empty_table():
|
||||
"Returns an empty Agate table. To be used in place of None"
|
||||
|
||||
@@ -135,12 +167,12 @@ def as_matrix(table):
|
||||
return [r.values() for r in table.rows.values()]
|
||||
|
||||
|
||||
def from_csv(abspath, text_columns):
|
||||
def from_csv(abspath, text_columns, delimiter=","):
|
||||
type_tester = build_type_tester(text_columns=text_columns)
|
||||
with open(abspath, encoding="utf-8") as fp:
|
||||
if fp.read(1) != BOM:
|
||||
fp.seek(0)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester)
|
||||
return agate.Table.from_csv(fp, column_types=type_tester, delimiter=delimiter)
|
||||
|
||||
|
||||
class _NullMarker:
|
||||
@@ -151,7 +183,7 @@ NullableAgateType = Union[agate.data_types.DataType, _NullMarker]
|
||||
|
||||
|
||||
class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
@@ -166,6 +198,13 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
elif isinstance(value, _NullMarker):
|
||||
# use the existing value
|
||||
return
|
||||
# when one table column is Number while another is Integer, force the column to Number on merge
|
||||
elif isinstance(value, Integer) and isinstance(existing_type, agate.data_types.Number):
|
||||
# use the existing value
|
||||
return
|
||||
elif isinstance(existing_type, Integer) and isinstance(value, agate.data_types.Number):
|
||||
# overwrite
|
||||
super().__setitem__(key, value)
|
||||
elif not isinstance(value, type(existing_type)):
|
||||
# actual type mismatch!
|
||||
raise DbtRuntimeError(
|
||||
@@ -177,8 +216,9 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
result: Dict[str, agate.data_types.DataType] = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, _NullMarker):
|
||||
# this is what agate would do.
|
||||
result[key] = agate.data_types.Number()
|
||||
# agate would make it a Number but we'll make it Integer so that if this column
|
||||
# gets merged with another Integer column, it won't get forced to a Number
|
||||
result[key] = Integer()
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
@@ -218,3 +258,12 @@ def merge_tables(tables: List[agate.Table]) -> agate.Table:
|
||||
rows.append(agate.Row(data, column_names))
|
||||
# _is_fork to tell agate that we already made things into `Row`s.
|
||||
return agate.Table(rows, column_names, column_types, _is_fork=True)
|
||||
|
||||
|
||||
def get_column_value_uncased(column_name: str, row: agate.Row) -> Any:
|
||||
"""Get the value of a column in this row, ignoring the casing of the column name."""
|
||||
for key, value in row.items():
|
||||
if key.casefold() == column_name.casefold():
|
||||
return value
|
||||
|
||||
raise KeyError
|
||||
|
||||
@@ -191,7 +191,7 @@ NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore
|
||||
|
||||
|
||||
class TemplateCache:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.file_cache: Dict[str, jinja2.Template] = {}
|
||||
|
||||
def get_node_template(self, node) -> jinja2.Template:
|
||||
@@ -330,6 +330,26 @@ class MacroGenerator(BaseMacroGenerator):
|
||||
return self.call_macro(*args, **kwargs)
|
||||
|
||||
|
||||
class UnitTestMacroGenerator(MacroGenerator):
|
||||
# this makes UnitTestMacroGenerator objects callable like functions
|
||||
def __init__(
|
||||
self,
|
||||
macro_generator: MacroGenerator,
|
||||
call_return_value: Any,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
macro_generator.macro,
|
||||
macro_generator.context,
|
||||
macro_generator.node,
|
||||
macro_generator.stack,
|
||||
)
|
||||
self.call_return_value = call_return_value
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with self.track_call():
|
||||
return self.call_return_value
|
||||
|
||||
|
||||
class QueryStringGenerator(BaseMacroGenerator):
|
||||
def __init__(self, template_str: str, context: Dict[str, Any]) -> None:
|
||||
super().__init__(context)
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -13,7 +12,10 @@ from dbt.flags import get_flags
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model_context
|
||||
from dbt.context.providers import (
|
||||
generate_runtime_model_context,
|
||||
generate_runtime_unit_test_context,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest, UniqueID
|
||||
from dbt.contracts.graph.nodes import (
|
||||
ManifestNode,
|
||||
@@ -22,6 +24,7 @@ from dbt.contracts.graph.nodes import (
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
SeedNode,
|
||||
UnitTestNode,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
GraphDependencyNotFoundError,
|
||||
@@ -36,6 +39,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -44,6 +48,7 @@ def print_compile_stats(stats):
|
||||
names = {
|
||||
NodeType.Model: "model",
|
||||
NodeType.Test: "test",
|
||||
NodeType.Unit: "unit test",
|
||||
NodeType.Snapshot: "snapshot",
|
||||
NodeType.Analysis: "analysis",
|
||||
NodeType.Macro: "macro",
|
||||
@@ -91,6 +96,7 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[NodeType.Macro] += len(manifest.macros)
|
||||
stats[NodeType.Group] += len(manifest.groups)
|
||||
stats[NodeType.SemanticModel] += len(manifest.semantic_models)
|
||||
stats[NodeType.Unit] += len(manifest.unit_tests)
|
||||
|
||||
# TODO: should we be counting dimensions + entities?
|
||||
|
||||
@@ -125,7 +131,7 @@ def _get_tests_for_node(manifest: Manifest, unique_id: UniqueID) -> List[UniqueI
|
||||
|
||||
|
||||
class Linker:
|
||||
def __init__(self, data=None):
|
||||
def __init__(self, data=None) -> None:
|
||||
if data is None:
|
||||
data = {}
|
||||
self.graph = nx.DiGraph(**data)
|
||||
@@ -183,14 +189,18 @@ class Linker:
|
||||
def link_graph(self, manifest: Manifest):
|
||||
for source in manifest.sources.values():
|
||||
self.add_node(source.unique_id)
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.add_node(semantic_model.unique_id)
|
||||
for node in manifest.nodes.values():
|
||||
self.link_node(node, manifest)
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.link_node(semantic_model, manifest)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(exposure, manifest)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(metric, manifest)
|
||||
for unit_test in manifest.unit_tests.values():
|
||||
self.link_node(unit_test, manifest)
|
||||
for saved_query in manifest.saved_queries.values():
|
||||
self.link_node(saved_query, manifest)
|
||||
|
||||
cycle = self.find_cycles()
|
||||
|
||||
@@ -274,7 +284,7 @@ class Linker:
|
||||
|
||||
|
||||
class Compiler:
|
||||
def __init__(self, config):
|
||||
def __init__(self, config) -> None:
|
||||
self.config = config
|
||||
|
||||
def initialize(self):
|
||||
@@ -289,8 +299,10 @@ class Compiler:
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
context = generate_runtime_model_context(node, self.config, manifest)
|
||||
if isinstance(node, UnitTestNode):
|
||||
context = generate_runtime_unit_test_context(node, self.config, manifest)
|
||||
else:
|
||||
context = generate_runtime_model_context(node, self.config, manifest)
|
||||
context.update(extra_context)
|
||||
|
||||
if isinstance(node, GenericTestNode):
|
||||
@@ -320,6 +332,10 @@ class Compiler:
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# tech debt: safe flag/arg access (#6259)
|
||||
if not getattr(self.config.args, "inject_ephemeral_ctes", True):
|
||||
return (model, [])
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
@@ -378,16 +394,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +539,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -83,7 +83,7 @@ class Profile(HasCredentials):
|
||||
user_config: UserConfig,
|
||||
threads: int,
|
||||
credentials: Credentials,
|
||||
):
|
||||
) -> None:
|
||||
"""Explicitly defining `__init__` to work around bug in Python 3.9.7
|
||||
https://bugs.python.org/issue45081
|
||||
"""
|
||||
|
||||
@@ -16,8 +16,12 @@ import os
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt import deprecations
|
||||
from dbt.constants import DEPENDENCIES_FILE_NAME, PACKAGES_FILE_NAME
|
||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||
from dbt.constants import (
|
||||
DEPENDENCIES_FILE_NAME,
|
||||
PACKAGES_FILE_NAME,
|
||||
PACKAGE_LOCK_HASH_KEY,
|
||||
)
|
||||
from dbt.clients.system import path_exists, load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
from dbt.exceptions import (
|
||||
@@ -94,16 +98,17 @@ def _load_yaml(path):
|
||||
return load_yaml_text(contents)
|
||||
|
||||
|
||||
def package_and_project_data_from_root(project_root):
|
||||
package_filepath = resolve_path_from_base(PACKAGES_FILE_NAME, project_root)
|
||||
dependencies_filepath = resolve_path_from_base(DEPENDENCIES_FILE_NAME, project_root)
|
||||
def load_yml_dict(file_path):
|
||||
ret = {}
|
||||
if path_exists(file_path):
|
||||
ret = _load_yaml(file_path) or {}
|
||||
return ret
|
||||
|
||||
packages_yml_dict = {}
|
||||
dependencies_yml_dict = {}
|
||||
if path_exists(package_filepath):
|
||||
packages_yml_dict = _load_yaml(package_filepath) or {}
|
||||
if path_exists(dependencies_filepath):
|
||||
dependencies_yml_dict = _load_yaml(dependencies_filepath) or {}
|
||||
|
||||
def package_and_project_data_from_root(project_root):
|
||||
|
||||
packages_yml_dict = load_yml_dict(f"{project_root}/{PACKAGES_FILE_NAME}")
|
||||
dependencies_yml_dict = load_yml_dict(f"{project_root}/{DEPENDENCIES_FILE_NAME}")
|
||||
|
||||
if "packages" in packages_yml_dict and "packages" in dependencies_yml_dict:
|
||||
msg = "The 'packages' key cannot be specified in both packages.yml and dependencies.yml"
|
||||
@@ -127,6 +132,8 @@ def package_config_from_data(packages_data: Dict[str, Any]) -> PackageConfig:
|
||||
if not packages_data:
|
||||
packages_data = {"packages": []}
|
||||
|
||||
if PACKAGE_LOCK_HASH_KEY in packages_data:
|
||||
packages_data.pop(PACKAGE_LOCK_HASH_KEY)
|
||||
try:
|
||||
PackageConfig.validate(packages_data)
|
||||
packages = PackageConfig.from_dict(packages_data)
|
||||
@@ -425,9 +432,13 @@ class PartialProject(RenderComponents):
|
||||
snapshots: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
unit_tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
semantic_models: Dict[str, Any]
|
||||
saved_queries: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
dispatch = cfg.dispatch
|
||||
models = cfg.models
|
||||
@@ -435,7 +446,10 @@ class PartialProject(RenderComponents):
|
||||
snapshots = cfg.snapshots
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
unit_tests = cfg.unit_tests
|
||||
metrics = cfg.metrics
|
||||
semantic_models = cfg.semantic_models
|
||||
saved_queries = cfg.saved_queries
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -459,6 +473,8 @@ class PartialProject(RenderComponents):
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict["selectors"]
|
||||
)
|
||||
dbt_cloud = cfg.dbt_cloud
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
@@ -491,13 +507,17 @@ class PartialProject(RenderComponents):
|
||||
query_comment=query_comment,
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
unit_tests=unit_tests,
|
||||
metrics=metrics,
|
||||
semantic_models=semantic_models,
|
||||
saved_queries=saved_queries,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
unrendered=unrendered,
|
||||
project_env_vars=project_env_vars,
|
||||
restrict_access=cfg.restrict_access,
|
||||
dbt_cloud=dbt_cloud,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
@@ -541,6 +561,7 @@ class PartialProject(RenderComponents):
|
||||
packages_specified_path,
|
||||
) = package_and_project_data_from_root(project_root)
|
||||
selectors_dict = selector_data_from_root(project_root)
|
||||
|
||||
return cls.from_dicts(
|
||||
project_root=project_root,
|
||||
project_dict=project_dict,
|
||||
@@ -597,7 +618,10 @@ class Project:
|
||||
snapshots: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
unit_tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
semantic_models: Dict[str, Any]
|
||||
saved_queries: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -609,6 +633,7 @@ class Project:
|
||||
unrendered: RenderComponents
|
||||
project_env_vars: Dict[str, Any]
|
||||
restrict_access: bool
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
@@ -627,6 +652,13 @@ class Project:
|
||||
generic_test_paths.append(os.path.join(test_path, "generic"))
|
||||
return generic_test_paths
|
||||
|
||||
@property
|
||||
def fixture_paths(self):
|
||||
fixture_paths = []
|
||||
for test_path in self.test_paths:
|
||||
fixture_paths.append(os.path.join(test_path, "fixtures"))
|
||||
return fixture_paths
|
||||
|
||||
def __str__(self):
|
||||
cfg = self.to_project_config(with_packages=True)
|
||||
return str(cfg)
|
||||
@@ -672,12 +704,16 @@ class Project:
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"unit-tests": self.unit_tests,
|
||||
"metrics": self.metrics,
|
||||
"semantic-models": self.semantic_models,
|
||||
"saved-queries": self.saved_queries,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
"restrict-access": self.restrict_access,
|
||||
"dbt-cloud": self.dbt_cloud,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
|
||||
@@ -74,7 +74,7 @@ def _list_if_none_or_string(value):
|
||||
|
||||
|
||||
class ProjectPostprocessor(Dict[Keypath, Callable[[Any], Any]]):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
self[("on-run-start",)] = _list_if_none_or_string
|
||||
|
||||
@@ -166,7 +166,10 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
unit_tests=project.unit_tests,
|
||||
metrics=project.metrics,
|
||||
semantic_models=project.semantic_models,
|
||||
saved_queries=project.saved_queries,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -182,6 +185,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
dbt_cloud=project.dbt_cloud,
|
||||
)
|
||||
|
||||
# Called by 'load_projects' in this class
|
||||
@@ -321,7 +325,10 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"snapshots": self._get_config_paths(self.snapshots),
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"unit_tests": self._get_config_paths(self.unit_tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"semantic_models": self._get_config_paths(self.semantic_models),
|
||||
"saved_queries": self._get_config_paths(self.saved_queries),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
@@ -404,7 +411,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
|
||||
class UnsetCredentials(Credentials):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
super().__init__("", "")
|
||||
|
||||
@property
|
||||
|
||||
@@ -9,8 +9,12 @@ PIN_PACKAGE_URL = (
|
||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"
|
||||
)
|
||||
|
||||
DBT_PROJECT_FILE_NAME = "dbt_project.yml"
|
||||
PACKAGES_FILE_NAME = "packages.yml"
|
||||
DEPENDENCIES_FILE_NAME = "dependencies.yml"
|
||||
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
||||
MANIFEST_FILE_NAME = "manifest.json"
|
||||
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
||||
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
||||
UNIT_TEST_MANIFEST_FILE_NAME = "unit_test_manifest.json"
|
||||
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
from typing import Any, Callable, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
import threading
|
||||
|
||||
from dbt.flags import get_flags
|
||||
@@ -86,33 +88,29 @@ def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
|
||||
|
||||
class ContextMember:
|
||||
def __init__(self, value, name=None):
|
||||
def __init__(self, value: Any, name: Optional[str] = None) -> None:
|
||||
self.name = name
|
||||
self.inner = value
|
||||
|
||||
def key(self, default):
|
||||
def key(self, default: str) -> str:
|
||||
if self.name is None:
|
||||
return default
|
||||
return self.name
|
||||
|
||||
|
||||
def contextmember(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
return ContextMember(value)
|
||||
def contextmember(value: Optional[str] = None) -> Callable:
|
||||
return lambda v: ContextMember(v, name=value)
|
||||
|
||||
|
||||
def contextproperty(value):
|
||||
if isinstance(value, str):
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
return ContextMember(property(value))
|
||||
def contextproperty(value: Optional[str] = None) -> Callable:
|
||||
return lambda v: ContextMember(property(v), name=value)
|
||||
|
||||
|
||||
class ContextMeta(type):
|
||||
def __new__(mcls, name, bases, dct):
|
||||
context_members = {}
|
||||
context_attrs = {}
|
||||
new_dct = {}
|
||||
def __new__(mcls, name, bases, dct: Dict[str, Any]) -> ContextMeta:
|
||||
context_members: Dict[str, Any] = {}
|
||||
context_attrs: Dict[str, Any] = {}
|
||||
new_dct: Dict[str, Any] = {}
|
||||
|
||||
for base in bases:
|
||||
context_members.update(getattr(base, "_context_members_", {}))
|
||||
@@ -148,27 +146,28 @@ class Var:
|
||||
return self._cli_vars
|
||||
|
||||
@property
|
||||
def node_name(self):
|
||||
def node_name(self) -> str:
|
||||
if self._node is not None:
|
||||
return self._node.name
|
||||
else:
|
||||
return "<Configuration>"
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
raise RequiredVarNotFoundError(var_name, self._merged, self._node)
|
||||
def get_missing_var(self, var_name: str) -> NoReturn:
|
||||
# TODO function name implies a non exception resolution
|
||||
raise RequiredVarNotFoundError(var_name, dict(self._merged), self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
def has_var(self, var_name: str) -> bool:
|
||||
return var_name in self._merged
|
||||
|
||||
def get_rendered_var(self, var_name):
|
||||
def get_rendered_var(self, var_name: str) -> Any:
|
||||
raw = self._merged[var_name]
|
||||
# if bool/int/float/etc are passed in, don't compile anything
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
|
||||
return get_rendered(raw, self._context)
|
||||
return get_rendered(raw, dict(self._context))
|
||||
|
||||
def __call__(self, var_name, default=_VAR_NOTSET):
|
||||
def __call__(self, var_name: str, default: Any = _VAR_NOTSET) -> Any:
|
||||
if self.has_var(var_name):
|
||||
return self.get_rendered_var(var_name)
|
||||
elif default is not self._VAR_NOTSET:
|
||||
@@ -178,13 +177,17 @@ class Var:
|
||||
|
||||
|
||||
class BaseContext(metaclass=ContextMeta):
|
||||
# subclass is TargetContext
|
||||
def __init__(self, cli_vars):
|
||||
self._ctx = {}
|
||||
self.cli_vars = cli_vars
|
||||
self.env_vars = {}
|
||||
# Set by ContextMeta
|
||||
_context_members_: Dict[str, Any]
|
||||
_context_attrs_: Dict[str, Any]
|
||||
|
||||
def generate_builtins(self):
|
||||
# subclass is TargetContext
|
||||
def __init__(self, cli_vars: Dict[str, Any]) -> None:
|
||||
self._ctx: Dict[str, Any] = {}
|
||||
self.cli_vars: Dict[str, Any] = cli_vars
|
||||
self.env_vars: Dict[str, Any] = {}
|
||||
|
||||
def generate_builtins(self) -> Dict[str, Any]:
|
||||
builtins: Dict[str, Any] = {}
|
||||
for key, value in self._context_members_.items():
|
||||
if hasattr(value, "__get__"):
|
||||
@@ -194,14 +197,14 @@ class BaseContext(metaclass=ContextMeta):
|
||||
return builtins
|
||||
|
||||
# no dbtClassMixin so this is not an actual override
|
||||
def to_dict(self):
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
self._ctx["context"] = self._ctx
|
||||
builtins = self.generate_builtins()
|
||||
self._ctx["builtins"] = builtins
|
||||
self._ctx.update(builtins)
|
||||
return self._ctx
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def dbt_version(self) -> str:
|
||||
"""The `dbt_version` variable returns the installed version of dbt that
|
||||
is currently running. It can be used for debugging or auditing
|
||||
@@ -221,7 +224,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return dbt_version
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> Var:
|
||||
"""Variables can be passed from your `dbt_project.yml` file into models
|
||||
during compilation. These variables are useful for configuring packages
|
||||
@@ -290,7 +293,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return Var(self._ctx, self.cli_vars)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
@@ -318,7 +321,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
if os.environ.get("DBT_MACRO_DEBUGGING"):
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def debug():
|
||||
"""Enter a debugger at this line in the compiled jinja code."""
|
||||
@@ -357,7 +360,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
raise MacroReturn(data)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def fromjson(string: str, default: Any = None) -> Any:
|
||||
"""The `fromjson` context method can be used to deserialize a json
|
||||
@@ -378,7 +381,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def tojson(value: Any, default: Any = None, sort_keys: bool = False) -> Any:
|
||||
"""The `tojson` context method can be used to serialize a Python
|
||||
@@ -401,7 +404,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def fromyaml(value: str, default: Any = None) -> Any:
|
||||
"""The fromyaml context method can be used to deserialize a yaml string
|
||||
@@ -432,7 +435,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
# safe_dump defaults to sort_keys=True, but we act like json.dumps (the
|
||||
# opposite)
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def toyaml(
|
||||
value: Any, default: Optional[str] = None, sort_keys: bool = False
|
||||
@@ -477,7 +480,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def set_strict(value: Iterable[Any]) -> Set[Any]:
|
||||
"""The `set_strict` context method can be used to convert any iterable
|
||||
@@ -519,7 +522,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError:
|
||||
return default
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def zip_strict(*args: Iterable[Any]) -> Iterable[Any]:
|
||||
"""The `zip_strict` context method can be used to used to return
|
||||
@@ -541,7 +544,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
except TypeError as e:
|
||||
raise ZipStrictWrongTypeError(e)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def log(msg: str, info: bool = False) -> str:
|
||||
"""Logs a line to either the log file or stdout.
|
||||
@@ -562,7 +565,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info()))
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def run_started_at(self) -> Optional[datetime.datetime]:
|
||||
"""`run_started_at` outputs the timestamp that this run started, e.g.
|
||||
`2017-04-21 01:23:45.678`. The `run_started_at` variable is a Python
|
||||
@@ -590,19 +593,19 @@ class BaseContext(metaclass=ContextMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def invocation_id(self) -> Optional[str]:
|
||||
"""invocation_id outputs a UUID generated for this dbt run (useful for
|
||||
auditing)
|
||||
"""
|
||||
return get_invocation_id()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def thread_id(self) -> str:
|
||||
"""thread_id outputs an ID for the current thread (useful for auditing)"""
|
||||
return threading.current_thread().name
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def modules(self) -> Dict[str, Any]:
|
||||
"""The `modules` variable in the Jinja context contains useful Python
|
||||
modules for operating on data.
|
||||
@@ -627,7 +630,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
""" # noqa
|
||||
return get_context_modules()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def flags(self) -> Any:
|
||||
"""The `flags` variable contains true/false values for flags provided
|
||||
on the command line.
|
||||
@@ -644,7 +647,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return flags_module.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def print(msg: str) -> str:
|
||||
"""Prints a line to stdout.
|
||||
@@ -662,7 +665,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def diff_of_two_dicts(
|
||||
dict_a: Dict[str, List[str]], dict_b: Dict[str, List[str]]
|
||||
@@ -691,7 +694,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
dict_diff.update({k: dict_a[k]})
|
||||
return dict_diff
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
@staticmethod
|
||||
def local_md5(value: str) -> str:
|
||||
"""Calculates an MD5 hash of the given string.
|
||||
|
||||
@@ -19,7 +19,7 @@ class ConfiguredContext(TargetContext):
|
||||
super().__init__(config.to_target_dict(), config.cli_vars)
|
||||
self.config = config
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def project_name(self) -> str:
|
||||
return self.config.project_name
|
||||
|
||||
@@ -80,11 +80,11 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
self._project_name = project_name
|
||||
self.schema_yaml_vars = schema_yaml_vars
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(self._ctx, self.config, self._project_name)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
@@ -113,7 +113,7 @@ class MacroResolvingContext(ConfiguredContext):
|
||||
def __init__(self, config):
|
||||
super().__init__(config)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ConfiguredVar:
|
||||
return ConfiguredVar(self._ctx, self.config, self.config.project_name)
|
||||
|
||||
|
||||
@@ -45,8 +45,14 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.SemanticModel:
|
||||
model_configs = unrendered.get("semantic_models")
|
||||
elif resource_type == NodeType.SavedQuery:
|
||||
model_configs = unrendered.get("saved_queries")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
elif resource_type == NodeType.Unit:
|
||||
model_configs = unrendered.get("unit_tests")
|
||||
else:
|
||||
model_configs = unrendered.get("models")
|
||||
if model_configs is None:
|
||||
@@ -70,8 +76,14 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.SemanticModel:
|
||||
model_configs = self.project.semantic_models
|
||||
elif resource_type == NodeType.SavedQuery:
|
||||
model_configs = self.project.saved_queries
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
elif resource_type == NodeType.Unit:
|
||||
model_configs = self.project.unit_tests
|
||||
else:
|
||||
model_configs = self.project.models
|
||||
return model_configs
|
||||
@@ -189,9 +201,21 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
|
||||
def _update_from_config(self, result: C, partial: Dict[str, Any], validate: bool = False) -> C:
|
||||
translated = self._active_project.credentials.translate_aliases(partial)
|
||||
return result.update_from(
|
||||
translated = self.translate_hook_names(translated)
|
||||
updated = result.update_from(
|
||||
translated, self._active_project.credentials.type, validate=validate
|
||||
)
|
||||
return updated
|
||||
|
||||
def translate_hook_names(self, project_dict):
|
||||
# This is a kind of kludge because the fix for #6411 specifically allowed misspelling
|
||||
# the hook field names in dbt_project.yml, which only ever worked because we didn't
|
||||
# run validate on the dbt_project configs.
|
||||
if "pre_hook" in project_dict:
|
||||
project_dict["pre-hook"] = project_dict.pop("pre_hook")
|
||||
if "post_hook" in project_dict:
|
||||
project_dict["post-hook"] = project_dict.pop("post_hook")
|
||||
return project_dict
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
|
||||
@@ -24,7 +24,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
self.node = node
|
||||
self.manifest = manifest
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def doc(self, *args: str) -> str:
|
||||
"""The `doc` function is used to reference docs blocks in schema.yml
|
||||
files. It is analogous to the `ref` function. For more information,
|
||||
|
||||
@@ -2,7 +2,6 @@ import functools
|
||||
from typing import NoReturn
|
||||
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import JinjaLogWarning
|
||||
|
||||
from dbt.exceptions import (
|
||||
@@ -26,6 +25,8 @@ from dbt.exceptions import (
|
||||
ContractError,
|
||||
ColumnTypeMissingError,
|
||||
FailFastError,
|
||||
scrub_secrets,
|
||||
env_secrets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ class MacroResolver:
|
||||
self._build_internal_packages_namespace()
|
||||
self._build_macros_by_name()
|
||||
|
||||
def _build_internal_packages_namespace(self):
|
||||
def _build_internal_packages_namespace(self) -> None:
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
# in the list are the ones we want to "win".
|
||||
self.internal_packages_namespace: MacroNamespace = {}
|
||||
@@ -56,7 +56,7 @@ class MacroResolver:
|
||||
# root package namespace
|
||||
# non-internal packages (that aren't local or root)
|
||||
# dbt internal packages
|
||||
def _build_macros_by_name(self):
|
||||
def _build_macros_by_name(self) -> None:
|
||||
macros_by_name = {}
|
||||
|
||||
# all internal packages (already in the right order)
|
||||
@@ -78,7 +78,7 @@ class MacroResolver:
|
||||
self,
|
||||
package_namespaces: Dict[str, MacroNamespace],
|
||||
macro: Macro,
|
||||
):
|
||||
) -> None:
|
||||
if macro.package_name in package_namespaces:
|
||||
namespace = package_namespaces[macro.package_name]
|
||||
else:
|
||||
@@ -89,7 +89,7 @@ class MacroResolver:
|
||||
raise DuplicateMacroNameError(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: Macro):
|
||||
def add_macro(self, macro: Macro) -> None:
|
||||
macro_name: str = macro.name
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
@@ -103,11 +103,11 @@ class MacroResolver:
|
||||
if macro.package_name == self.root_project_name:
|
||||
self.root_package_macros[macro_name] = macro
|
||||
|
||||
def add_macros(self):
|
||||
def add_macros(self) -> None:
|
||||
for macro in self.macros.values():
|
||||
self.add_macro(macro)
|
||||
|
||||
def get_macro(self, local_package, macro_name):
|
||||
def get_macro(self, local_package, macro_name) -> Optional[Macro]:
|
||||
local_package_macros = {}
|
||||
# If the macro is explicitly prefixed with an internal namespace
|
||||
# (e.g. 'dbt.some_macro'), look there first
|
||||
@@ -125,7 +125,7 @@ class MacroResolver:
|
||||
return self.macros_by_name[macro_name]
|
||||
return None
|
||||
|
||||
def get_macro_id(self, local_package, macro_name):
|
||||
def get_macro_id(self, local_package, macro_name) -> Optional[str]:
|
||||
macro = self.get_macro(local_package, macro_name)
|
||||
if macro is None:
|
||||
return None
|
||||
|
||||
@@ -67,7 +67,7 @@ class ManifestContext(ConfiguredContext):
|
||||
dct.update(self.namespace)
|
||||
return dct
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def context_macro_stack(self):
|
||||
return self.macro_stack
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import abc
|
||||
from copy import deepcopy
|
||||
import os
|
||||
from typing import (
|
||||
Callable,
|
||||
@@ -17,7 +18,7 @@ from typing_extensions import Protocol
|
||||
from dbt.adapters.base.column import Column
|
||||
from dbt.adapters.factory import get_adapter, get_adapter_package_names, get_adapter_type_names
|
||||
from dbt.clients import agate_helper
|
||||
from dbt.clients.jinja import get_rendered, MacroGenerator, MacroStack
|
||||
from dbt.clients.jinja import get_rendered, MacroGenerator, MacroStack, UnitTestMacroGenerator
|
||||
from dbt.config import RuntimeConfig, Project
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.context.base import contextmember, contextproperty, Var
|
||||
@@ -39,6 +40,7 @@ from dbt.contracts.graph.nodes import (
|
||||
RefArgs,
|
||||
AccessType,
|
||||
SemanticModel,
|
||||
UnitTestNode,
|
||||
)
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.contracts.graph.unparsed import NodeVersion
|
||||
@@ -566,6 +568,17 @@ class OperationRefResolver(RuntimeRefResolver):
|
||||
return super().create_relation(target_model)
|
||||
|
||||
|
||||
class RuntimeUnitTestRefResolver(RuntimeRefResolver):
|
||||
def resolve(
|
||||
self,
|
||||
target_name: str,
|
||||
target_package: Optional[str] = None,
|
||||
target_version: Optional[NodeVersion] = None,
|
||||
) -> RelationProxy:
|
||||
target_name = f"{self.model.name}__{target_name}"
|
||||
return super().resolve(target_name, target_package, target_version)
|
||||
|
||||
|
||||
# `source` implementations
|
||||
class ParseSourceResolver(BaseSourceResolver):
|
||||
def resolve(self, source_name: str, table_name: str):
|
||||
@@ -593,6 +606,29 @@ class RuntimeSourceResolver(BaseSourceResolver):
|
||||
return self.Relation.create_from_source(target_source)
|
||||
|
||||
|
||||
class RuntimeUnitTestSourceResolver(BaseSourceResolver):
|
||||
def resolve(self, source_name: str, table_name: str):
|
||||
target_source = self.manifest.resolve_source(
|
||||
source_name,
|
||||
table_name,
|
||||
self.current_project,
|
||||
self.model.package_name,
|
||||
)
|
||||
if target_source is None or isinstance(target_source, Disabled):
|
||||
raise TargetNotFoundError(
|
||||
node=self.model,
|
||||
target_name=f"{source_name}.{table_name}",
|
||||
target_kind="source",
|
||||
disabled=(isinstance(target_source, Disabled)),
|
||||
)
|
||||
# For unit tests, this isn't a "real" source, it's a ModelNode taking
|
||||
# the place of a source. We don't really need to return the relation here,
|
||||
# we just need to set_cte, but skipping it confuses typing. We *do* need
|
||||
# the relation in the "this" property.
|
||||
self.model.set_cte(target_source.unique_id, None)
|
||||
return self.Relation.create_ephemeral_from_node(self.config, target_source)
|
||||
|
||||
|
||||
# metric` implementations
|
||||
class ParseMetricResolver(BaseMetricResolver):
|
||||
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
|
||||
@@ -618,7 +654,7 @@ class RuntimeMetricResolver(BaseMetricResolver):
|
||||
target_package=target_package,
|
||||
)
|
||||
|
||||
return ResolvedMetricReference(target_metric, self.manifest, self.Relation)
|
||||
return ResolvedMetricReference(target_metric, self.manifest)
|
||||
|
||||
|
||||
# `var` implementations.
|
||||
@@ -670,6 +706,22 @@ class RuntimeVar(ModelConfiguredVar):
|
||||
pass
|
||||
|
||||
|
||||
class UnitTestVar(RuntimeVar):
|
||||
def __init__(
|
||||
self,
|
||||
context: Dict[str, Any],
|
||||
config: RuntimeConfig,
|
||||
node: Resource,
|
||||
) -> None:
|
||||
config_copy = None
|
||||
assert isinstance(node, UnitTestNode)
|
||||
if node.overrides and node.overrides.vars:
|
||||
config_copy = deepcopy(config)
|
||||
config_copy.cli_vars.update(node.overrides.vars)
|
||||
|
||||
super().__init__(context, config_copy or config, node=node)
|
||||
|
||||
|
||||
# Providers
|
||||
class Provider(Protocol):
|
||||
execute: bool
|
||||
@@ -711,6 +763,16 @@ class RuntimeProvider(Provider):
|
||||
metric = RuntimeMetricResolver
|
||||
|
||||
|
||||
class RuntimeUnitTestProvider(Provider):
|
||||
execute = True
|
||||
Config = RuntimeConfigObject
|
||||
DatabaseWrapper = RuntimeDatabaseWrapper
|
||||
Var = UnitTestVar
|
||||
ref = RuntimeUnitTestRefResolver
|
||||
source = RuntimeUnitTestSourceResolver
|
||||
metric = RuntimeMetricResolver
|
||||
|
||||
|
||||
class OperationProvider(RuntimeProvider):
|
||||
ref = OperationRefResolver
|
||||
|
||||
@@ -754,19 +816,19 @@ class ProviderContext(ManifestContext):
|
||||
self.model,
|
||||
)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def dbt_metadata_envs(self) -> Dict[str, str]:
|
||||
return get_metadata_vars()
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def invocation_args_dict(self):
|
||||
return args_to_dict(self.config.args)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def _sql_results(self) -> Dict[str, Optional[AttrDict]]:
|
||||
return self.sql_results
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def load_result(self, name: str) -> Optional[AttrDict]:
|
||||
if name in self.sql_results:
|
||||
# handle the special case of "main" macro
|
||||
@@ -787,7 +849,7 @@ class ProviderContext(ManifestContext):
|
||||
# Handle trying to load a result that was never stored
|
||||
return None
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def store_result(
|
||||
self, name: str, response: Any, agate_table: Optional[agate.Table] = None
|
||||
) -> str:
|
||||
@@ -803,7 +865,7 @@ class ProviderContext(ManifestContext):
|
||||
)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def store_raw_result(
|
||||
self,
|
||||
name: str,
|
||||
@@ -815,7 +877,7 @@ class ProviderContext(ManifestContext):
|
||||
response = AdapterResponse(_message=message, code=code, rows_affected=rows_affected)
|
||||
return self.store_result(name, response, agate_table)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def validation(self):
|
||||
def validate_any(*args) -> Callable[[T], None]:
|
||||
def inner(value: T) -> None:
|
||||
@@ -836,7 +898,7 @@ class ProviderContext(ManifestContext):
|
||||
}
|
||||
)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def write(self, payload: str) -> str:
|
||||
# macros/source defs aren't 'writeable'.
|
||||
if isinstance(self.model, (Macro, SourceDefinition)):
|
||||
@@ -845,11 +907,11 @@ class ProviderContext(ManifestContext):
|
||||
self.model.write_node(self.config.project_root, self.model.build_path, payload)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def render(self, string: str) -> str:
|
||||
return get_rendered(string, self._ctx, self.model)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def try_or_compiler_error(
|
||||
self, message_if_exception: str, func: Callable, *args, **kwargs
|
||||
) -> Any:
|
||||
@@ -858,21 +920,32 @@ class ProviderContext(ManifestContext):
|
||||
except Exception:
|
||||
raise CompilationError(message_if_exception, self.model)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def load_agate_table(self) -> agate.Table:
|
||||
if not isinstance(self.model, SeedNode):
|
||||
raise LoadAgateTableNotSeedError(self.model.resource_type, node=self.model)
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
|
||||
# include package_path for seeds defined in packages
|
||||
package_path = (
|
||||
os.path.join(self.config.packages_install_path, self.model.package_name)
|
||||
if self.model.package_name != self.config.project_name
|
||||
else "."
|
||||
)
|
||||
path = os.path.join(self.config.project_root, package_path, self.model.original_file_path)
|
||||
if not os.path.exists(path):
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
|
||||
column_types = self.model.config.column_types
|
||||
delimiter = self.model.config.delimiter
|
||||
try:
|
||||
table = agate_helper.from_csv(path, text_columns=column_types)
|
||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||
except ValueError as e:
|
||||
raise LoadAgateTableValueError(e, node=self.model)
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
return table
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def ref(self) -> Callable:
|
||||
"""The most important function in dbt is `ref()`; it's impossible to
|
||||
build even moderately complex models without it. `ref()` is how you
|
||||
@@ -913,11 +986,11 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return self.provider.ref(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def source(self) -> Callable:
|
||||
return self.provider.source(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def metric(self) -> Callable:
|
||||
return self.provider.metric(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@@ -978,7 +1051,7 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return self.provider.Config(self.model, self.context_config)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def execute(self) -> bool:
|
||||
"""`execute` is a Jinja variable that returns True when dbt is in
|
||||
"execute" mode.
|
||||
@@ -1039,7 +1112,7 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return self.provider.execute
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def exceptions(self) -> Dict[str, Any]:
|
||||
"""The exceptions namespace can be used to raise warnings and errors in
|
||||
dbt userspace.
|
||||
@@ -1077,15 +1150,15 @@ class ProviderContext(ManifestContext):
|
||||
""" # noqa
|
||||
return wrapped_exports(self.model)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def database(self) -> str:
|
||||
return self.config.credentials.database
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def schema(self) -> str:
|
||||
return self.config.credentials.schema
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def var(self) -> ModelConfiguredVar:
|
||||
return self.provider.Var(
|
||||
context=self._ctx,
|
||||
@@ -1102,22 +1175,22 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return self.db_wrapper
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def api(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"Relation": self.db_wrapper.Relation,
|
||||
"Column": self.adapter.Column,
|
||||
}
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def column(self) -> Type[Column]:
|
||||
return self.adapter.Column
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def env(self) -> Dict[str, Any]:
|
||||
return self.target
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def graph(self) -> Dict[str, Any]:
|
||||
"""The `graph` context variable contains information about the nodes in
|
||||
your dbt project. Models, sources, tests, and snapshots are all
|
||||
@@ -1226,30 +1299,42 @@ class ProviderContext(ManifestContext):
|
||||
|
||||
@contextproperty("model")
|
||||
def ctx_model(self) -> Dict[str, Any]:
|
||||
ret = self.model.to_dict(omit_none=True)
|
||||
model_dct = self.model.to_dict(omit_none=True)
|
||||
# Maintain direct use of compiled_sql
|
||||
# TODO add depreciation logic[CT-934]
|
||||
if "compiled_code" in ret:
|
||||
ret["compiled_sql"] = ret["compiled_code"]
|
||||
return ret
|
||||
if "compiled_code" in model_dct:
|
||||
model_dct["compiled_sql"] = model_dct["compiled_code"]
|
||||
|
||||
@contextproperty
|
||||
if (
|
||||
hasattr(self.model, "contract")
|
||||
and self.model.contract.alias_types is True
|
||||
and "columns" in model_dct
|
||||
):
|
||||
for column in model_dct["columns"].values():
|
||||
if "data_type" in column:
|
||||
orig_data_type = column["data_type"]
|
||||
# translate data_type to value in Column.TYPE_LABELS
|
||||
new_data_type = self.adapter.Column.translate_type(orig_data_type)
|
||||
column["data_type"] = new_data_type
|
||||
return model_dct
|
||||
|
||||
@contextproperty()
|
||||
def pre_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def post_hooks(self) -> Optional[List[Dict[str, Any]]]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql_now(self) -> str:
|
||||
return self.adapter.date_function()
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def adapter_macro(self, name: str, *args, **kwargs):
|
||||
"""This was deprecated in v0.18 in favor of adapter.dispatch"""
|
||||
msg = (
|
||||
@@ -1261,7 +1346,7 @@ class ProviderContext(ManifestContext):
|
||||
)
|
||||
raise CompilationError(msg)
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
@@ -1305,7 +1390,7 @@ class ProviderContext(ManifestContext):
|
||||
else:
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def selected_resources(self) -> List[str]:
|
||||
"""The `selected_resources` variable contains a list of the resources
|
||||
selected based on the parameters provided to the dbt command.
|
||||
@@ -1314,7 +1399,7 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return selected_resources.SELECTED_RESOURCES
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def submit_python_job(self, parsed_model: Dict, compiled_code: str) -> AdapterResponse:
|
||||
# Check macro_stack and that the unique id is for a materialization macro
|
||||
if not (
|
||||
@@ -1357,25 +1442,25 @@ class MacroContext(ProviderContext):
|
||||
class ModelContext(ProviderContext):
|
||||
model: ManifestNode
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def pre_hooks(self) -> List[Dict[str, Any]]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test, NodeType.Unit]:
|
||||
return []
|
||||
# TODO CT-211
|
||||
return [
|
||||
h.to_dict(omit_none=True) for h in self.model.config.pre_hook # type: ignore[union-attr] # noqa
|
||||
]
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def post_hooks(self) -> List[Dict[str, Any]]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test, NodeType.Unit]:
|
||||
return []
|
||||
# TODO CT-211
|
||||
return [
|
||||
h.to_dict(omit_none=True) for h in self.model.config.post_hook # type: ignore[union-attr] # noqa
|
||||
]
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def sql(self) -> Optional[str]:
|
||||
# only doing this in sql model for backward compatible
|
||||
if self.model.language == ModelLanguage.sql: # type: ignore[union-attr]
|
||||
@@ -1392,7 +1477,7 @@ class ModelContext(ProviderContext):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def compiled_code(self) -> Optional[str]:
|
||||
if getattr(self.model, "defer_relation", None):
|
||||
# TODO https://github.com/dbt-labs/dbt-core/issues/7976
|
||||
@@ -1403,15 +1488,15 @@ class ModelContext(ProviderContext):
|
||||
else:
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def database(self) -> str:
|
||||
return getattr(self.model, "database", self.config.credentials.database)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def schema(self) -> str:
|
||||
return getattr(self.model, "schema", self.config.credentials.schema)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def this(self) -> Optional[RelationProxy]:
|
||||
"""`this` makes available schema information about the currently
|
||||
executing model. It's is useful in any context in which you need to
|
||||
@@ -1446,7 +1531,7 @@ class ModelContext(ProviderContext):
|
||||
return None
|
||||
return self.db_wrapper.Relation.create_from(self.config, self.model)
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def defer_relation(self) -> Optional[RelationProxy]:
|
||||
"""
|
||||
For commands which add information about this node's corresponding
|
||||
@@ -1461,6 +1546,33 @@ class ModelContext(ProviderContext):
|
||||
return None
|
||||
|
||||
|
||||
class UnitTestContext(ModelContext):
|
||||
model: UnitTestNode
|
||||
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the overriden unit test environment variable named 'var'.
|
||||
|
||||
If there is no unit test override, return the environment variable named 'var'.
|
||||
|
||||
If there is no such environment variable set, return the default.
|
||||
|
||||
If the default is None, raise an exception for an undefined variable.
|
||||
"""
|
||||
if self.model.overrides and var in self.model.overrides.env_vars:
|
||||
return self.model.overrides.env_vars[var]
|
||||
else:
|
||||
return super().env_var(var, default)
|
||||
|
||||
@contextproperty()
|
||||
def this(self) -> Optional[str]:
|
||||
if self.model.this_input_node_unique_id:
|
||||
this_node = self.manifest.expect(self.model.this_input_node_unique_id)
|
||||
self.model.set_cte(this_node.unique_id, None) # type: ignore
|
||||
return self.adapter.Relation.add_ephemeral_prefix(this_node.name)
|
||||
return None
|
||||
|
||||
|
||||
# This is called by '_context_for', used in 'render_with_context'
|
||||
def generate_parser_model_context(
|
||||
model: ManifestNode,
|
||||
@@ -1505,6 +1617,24 @@ def generate_runtime_macro_context(
|
||||
return ctx.to_dict()
|
||||
|
||||
|
||||
def generate_runtime_unit_test_context(
|
||||
unit_test: UnitTestNode,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
) -> Dict[str, Any]:
|
||||
ctx = UnitTestContext(unit_test, config, manifest, RuntimeUnitTestProvider(), None)
|
||||
ctx_dict = ctx.to_dict()
|
||||
|
||||
if unit_test.overrides and unit_test.overrides.macros:
|
||||
for macro_name, macro_value in unit_test.overrides.macros.items():
|
||||
context_value = ctx_dict.get(macro_name)
|
||||
if isinstance(context_value, MacroGenerator):
|
||||
ctx_dict[macro_name] = UnitTestMacroGenerator(context_value, macro_value)
|
||||
else:
|
||||
ctx_dict[macro_name] = macro_value
|
||||
return ctx_dict
|
||||
|
||||
|
||||
class ExposureRefResolver(BaseResolver):
|
||||
def __call__(self, *args, **kwargs) -> str:
|
||||
package = None
|
||||
@@ -1660,7 +1790,7 @@ class TestContext(ProviderContext):
|
||||
)
|
||||
self.namespace = macro_namespace
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
|
||||
@@ -14,7 +14,7 @@ class SecretContext(BaseContext):
|
||||
"""This context is used in profiles.yml + packages.yml. It can render secret
|
||||
env vars that aren't usable elsewhere"""
|
||||
|
||||
@contextmember
|
||||
@contextmember()
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
"""The env_var() function. Return the environment variable named 'var'.
|
||||
If there is no such environment variable set, return the default.
|
||||
|
||||
@@ -9,7 +9,7 @@ class TargetContext(BaseContext):
|
||||
super().__init__(cli_vars=cli_vars)
|
||||
self.target_dict = target_dict
|
||||
|
||||
@contextproperty
|
||||
@contextproperty()
|
||||
def target(self) -> Dict[str, Any]:
|
||||
"""`target` contains information about your connection to the warehouse
|
||||
(specified in profiles.yml). Some configs are shared between all
|
||||
|
||||
@@ -1 +1,35 @@
|
||||
# Contracts README
|
||||
|
||||
|
||||
## Artifacts
|
||||
|
||||
### Generating JSON schemas
|
||||
A helper script, `sripts/collect-artifact-schema.py` is available to generate json schemas corresponding to versioned artifacts (`ArtifactMixin`s).
|
||||
|
||||
This script is necessary to run when a new artifact schema version is created, or when changes are made to existing artifact versions, and writes json schema to `schema/dbt/<artifact>/v<version>.json`.
|
||||
|
||||
Schemas in `schema/dbt` power the rendering in https://schemas.getdbt.com/ via https://github.com/dbt-labs/schemas.getdbt.com/
|
||||
|
||||
#### Example Usage
|
||||
|
||||
Available arguments:
|
||||
```sh
|
||||
❯ scripts/collect-artifact-schema.py --help
|
||||
usage: Collect and write dbt arfifact schema [-h] [--path PATH] [--artifact {manifest,sources,run-results,catalog}]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--path PATH The dir to write artifact schema
|
||||
--artifact {manifest,sources,run-results,catalog}
|
||||
The name of the artifact to update
|
||||
```
|
||||
|
||||
Generate latest version of schemas of all artifacts to `schema/dbt/<artifact>/v<version>.json`
|
||||
```sh
|
||||
> sripts/collect-artifact-schema.py --path schemas
|
||||
```
|
||||
|
||||
Generate latest version of schemas of manifest to `schema/dbt/manifest/v<version>.json`
|
||||
```sh
|
||||
> sripts/collect-artifact-schema.py --path schemas --artifact manifest
|
||||
```
|
||||
|
||||
@@ -16,26 +16,21 @@ from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from typing_extensions import Protocol
|
||||
from typing_extensions import Protocol, Annotated
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
StrEnum,
|
||||
ExtensibleDbtClassMixin,
|
||||
HyphenatedDbtClassMixin,
|
||||
ValidatedStringMixin,
|
||||
register_pattern,
|
||||
)
|
||||
from dbt.contracts.util import Replaceable
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
||||
|
||||
|
||||
# we need register_pattern for jsonschema validation
|
||||
register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$")
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(dbtClassMixin):
|
||||
_message: str
|
||||
@@ -55,7 +50,8 @@ class ConnectionState(StrEnum):
|
||||
|
||||
@dataclass(init=False)
|
||||
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||
type: Identifier
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
|
||||
name: Optional[str] = None
|
||||
state: ConnectionState = ConnectionState.INIT
|
||||
transaction_open: bool = False
|
||||
@@ -108,7 +104,7 @@ class LazyHandle:
|
||||
connection, updating the handle on the Connection.
|
||||
"""
|
||||
|
||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
||||
def __init__(self, opener: Callable[[Connection], Connection]) -> None:
|
||||
self.opener = opener
|
||||
|
||||
def resolve(self, connection: Connection) -> Connection:
|
||||
@@ -161,6 +157,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
# Need to fixup dbname => database, pass => password
|
||||
data = cls.translate_aliases(data)
|
||||
return data
|
||||
|
||||
@@ -220,10 +217,10 @@ DEFAULT_QUERY_COMMENT = """
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryComment(HyphenatedDbtClassMixin):
|
||||
class QueryComment(dbtClassMixin):
|
||||
comment: str = DEFAULT_QUERY_COMMENT
|
||||
append: bool = False
|
||||
job_label: bool = False
|
||||
job_label: bool = field(default=False, metadata={"alias": "job-label"})
|
||||
|
||||
|
||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
|
||||
@@ -225,10 +225,14 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
# metrics generated from semantic_model measures
|
||||
generated_metrics: List[str] = field(default_factory=list)
|
||||
groups: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
semantic_models: List[str] = field(default_factory=list)
|
||||
unit_tests: List[str] = field(default_factory=list)
|
||||
saved_queries: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
mcp: Dict[str, str] = field(default_factory=dict)
|
||||
# any source patches in this file. The entries are package, name pairs
|
||||
|
||||
@@ -20,6 +20,7 @@ from typing import (
|
||||
Generic,
|
||||
AbstractSet,
|
||||
ClassVar,
|
||||
Iterable,
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
from uuid import UUID
|
||||
@@ -37,14 +38,22 @@ from dbt.contracts.graph.nodes import (
|
||||
ModelNode,
|
||||
DeferRelation,
|
||||
ResultNode,
|
||||
SavedQuery,
|
||||
SemanticModel,
|
||||
SourceDefinition,
|
||||
UnpatchedSourceDefinition,
|
||||
UnitTestDefinition,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import SourcePatch, NodeVersion, UnparsedVersion
|
||||
from dbt.contracts.graph.manifest_upgrade import upgrade_manifest_json
|
||||
from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile
|
||||
from dbt.contracts.util import BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version
|
||||
from dbt.contracts.util import (
|
||||
BaseArtifactMetadata,
|
||||
SourceKey,
|
||||
ArtifactMixin,
|
||||
schema_version,
|
||||
get_artifact_schema_version,
|
||||
)
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
from dbt.exceptions import (
|
||||
CompilationError,
|
||||
@@ -88,7 +97,7 @@ def find_unique_id_for_package(storage, key, package: Optional[PackageName]):
|
||||
|
||||
|
||||
class DocLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
@@ -119,7 +128,7 @@ class DocLookup(dbtClassMixin):
|
||||
|
||||
|
||||
class SourceLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
@@ -156,7 +165,7 @@ class RefableLookup(dbtClassMixin):
|
||||
_lookup_types: ClassVar[set] = set(NodeType.refable())
|
||||
_versioned_types: ClassVar[set] = set(NodeType.versioned())
|
||||
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
@@ -267,7 +276,7 @@ class RefableLookup(dbtClassMixin):
|
||||
|
||||
|
||||
class MetricLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
@@ -299,6 +308,41 @@ class MetricLookup(dbtClassMixin):
|
||||
return manifest.metrics[unique_id]
|
||||
|
||||
|
||||
class SavedQueryLookup(dbtClassMixin):
|
||||
"""Lookup utility for finding SavedQuery nodes"""
|
||||
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, search_name, package)
|
||||
|
||||
def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"):
|
||||
unique_id = self.get_unique_id(search_name, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_saved_query(self, saved_query: SavedQuery):
|
||||
if saved_query.search_name not in self.storage:
|
||||
self.storage[saved_query.search_name] = {}
|
||||
|
||||
self.storage[saved_query.search_name][saved_query.package_name] = saved_query.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for saved_query in manifest.saved_queries.values():
|
||||
if hasattr(saved_query, "name"):
|
||||
self.add_saved_query(saved_query)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SavedQuery:
|
||||
if unique_id not in manifest.saved_queries:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"SavedQUery {unique_id} found in cache but not found in manifest"
|
||||
)
|
||||
return manifest.saved_queries[unique_id]
|
||||
|
||||
|
||||
class SemanticModelByMeasureLookup(dbtClassMixin):
|
||||
"""Lookup utility for finding SemanticModel by measure
|
||||
|
||||
@@ -306,7 +350,7 @@ class SemanticModelByMeasureLookup(dbtClassMixin):
|
||||
the semantic models in a manifest.
|
||||
"""
|
||||
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: DefaultDict[str, Dict[PackageName, UniqueID]] = defaultdict(dict)
|
||||
self.populate(manifest)
|
||||
|
||||
@@ -331,20 +375,31 @@ class SemanticModelByMeasureLookup(dbtClassMixin):
|
||||
"""Populate storage with all the measure + package paths to the Manifest's SemanticModels"""
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.add(semantic_model=semantic_model)
|
||||
for disabled in manifest.disabled.values():
|
||||
for node in disabled:
|
||||
if isinstance(node, SemanticModel):
|
||||
self.add(semantic_model=node)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SemanticModel:
|
||||
"""Tries to get a SemanticModel from the Manifest"""
|
||||
semantic_model = manifest.semantic_models.get(unique_id)
|
||||
if semantic_model is None:
|
||||
enabled_semantic_model: Optional[SemanticModel] = manifest.semantic_models.get(unique_id)
|
||||
disabled_semantic_model: Optional[List] = manifest.disabled.get(unique_id)
|
||||
|
||||
if isinstance(enabled_semantic_model, SemanticModel):
|
||||
return enabled_semantic_model
|
||||
elif disabled_semantic_model is not None and isinstance(
|
||||
disabled_semantic_model[0], SemanticModel
|
||||
):
|
||||
return disabled_semantic_model[0]
|
||||
else:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Semantic model `{unique_id}` found in cache but not found in manifest"
|
||||
)
|
||||
return semantic_model
|
||||
|
||||
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures/semantic_models
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
def __init__(self, manifest: "Manifest") -> None:
|
||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
@@ -598,6 +653,9 @@ class Disabled(Generic[D]):
|
||||
MaybeMetricNode = Optional[Union[Metric, Disabled[Metric]]]
|
||||
|
||||
|
||||
MaybeSavedQueryNode = Optional[Union[SavedQuery, Disabled[SavedQuery]]]
|
||||
|
||||
|
||||
MaybeDocumentation = Optional[Documentation]
|
||||
|
||||
|
||||
@@ -742,6 +800,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
disabled: MutableMapping[str, List[GraphMemberNode]] = field(default_factory=dict)
|
||||
env_vars: MutableMapping[str, str] = field(default_factory=dict)
|
||||
semantic_models: MutableMapping[str, SemanticModel] = field(default_factory=dict)
|
||||
unit_tests: MutableMapping[str, UnitTestDefinition] = field(default_factory=dict)
|
||||
saved_queries: MutableMapping[str, SavedQuery] = field(default_factory=dict)
|
||||
|
||||
_doc_lookup: Optional[DocLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
@@ -755,6 +815,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_metric_lookup: Optional[MetricLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_saved_query_lookup: Optional[SavedQueryLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_semantic_model_by_measure_lookup: Optional[SemanticModelByMeasureLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
@@ -799,6 +862,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
"semantic_models": {
|
||||
k: v.to_dict(omit_none=False) for k, v in self.semantic_models.items()
|
||||
},
|
||||
"saved_queries": {
|
||||
k: v.to_dict(omit_none=False) for k, v in self.saved_queries.items()
|
||||
},
|
||||
}
|
||||
|
||||
def build_disabled_by_file_id(self):
|
||||
@@ -860,6 +926,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.sources.values(),
|
||||
self.metrics.values(),
|
||||
self.semantic_models.values(),
|
||||
self.saved_queries.values(),
|
||||
)
|
||||
for resource in all_resources:
|
||||
resource_type_plural = resource.resource_type.pluralize()
|
||||
@@ -895,6 +962,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
files={k: _deepcopy(v) for k, v in self.files.items()},
|
||||
state_check=_deepcopy(self.state_check),
|
||||
semantic_models={k: _deepcopy(v) for k, v in self.semantic_models.items()},
|
||||
unit_tests={k: _deepcopy(v) for k, v in self.unit_tests.items()},
|
||||
saved_queries={k: _deepcopy(v) for k, v in self.saved_queries.items()},
|
||||
)
|
||||
copy.build_flat_graph()
|
||||
return copy
|
||||
@@ -907,6 +976,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.exposures.values(),
|
||||
self.metrics.values(),
|
||||
self.semantic_models.values(),
|
||||
self.saved_queries.values(),
|
||||
)
|
||||
)
|
||||
forward_edges, backward_edges = build_node_edges(edge_members)
|
||||
@@ -927,13 +997,22 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
groupable_nodes = list(
|
||||
chain(
|
||||
self.nodes.values(),
|
||||
self.saved_queries.values(),
|
||||
self.semantic_models.values(),
|
||||
self.metrics.values(),
|
||||
)
|
||||
)
|
||||
group_map = {group.name: [] for group in self.groups.values()}
|
||||
for node in groupable_nodes:
|
||||
if node.group is not None:
|
||||
group_map[node.group].append(node.unique_id)
|
||||
# group updates are not included with state:modified and
|
||||
# by ignoring the groups that aren't in the group map we
|
||||
# can avoid hitting errors for groups that are not getting
|
||||
# updated. This is a hack but any groups that are not
|
||||
# valid will be caught in
|
||||
# parser.manifest.ManifestLoader.check_valid_group_config_node
|
||||
if node.group in group_map:
|
||||
group_map[node.group].append(node.unique_id)
|
||||
self.group_map = group_map
|
||||
|
||||
def writable_manifest(self) -> "WritableManifest":
|
||||
@@ -954,6 +1033,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
parent_map=self.parent_map,
|
||||
group_map=self.group_map,
|
||||
semantic_models=self.semantic_models,
|
||||
unit_tests=self.unit_tests,
|
||||
saved_queries=self.saved_queries,
|
||||
)
|
||||
|
||||
def write(self, path):
|
||||
@@ -972,6 +1053,10 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return self.metrics[unique_id]
|
||||
elif unique_id in self.semantic_models:
|
||||
return self.semantic_models[unique_id]
|
||||
elif unique_id in self.unit_tests:
|
||||
return self.unit_tests[unique_id]
|
||||
elif unique_id in self.saved_queries:
|
||||
return self.saved_queries[unique_id]
|
||||
else:
|
||||
# something terrible has happened
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
@@ -1008,6 +1093,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._metric_lookup = MetricLookup(self)
|
||||
return self._metric_lookup
|
||||
|
||||
@property
|
||||
def saved_query_lookup(self) -> SavedQueryLookup:
|
||||
"""Retuns a SavedQueryLookup, instantiating it first if necessary."""
|
||||
if self._saved_query_lookup is None:
|
||||
self._saved_query_lookup = SavedQueryLookup(self)
|
||||
return self._saved_query_lookup
|
||||
|
||||
@property
|
||||
def semantic_model_by_measure_lookup(self) -> SemanticModelByMeasureLookup:
|
||||
"""Gets (and creates if necessary) the lookup utility for getting SemanticModels by measures"""
|
||||
@@ -1056,8 +1148,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
return resolved_refs
|
||||
|
||||
# Called by dbt.parser.manifest._process_refs_for_exposure, _process_refs_for_metric,
|
||||
# and dbt.parser.manifest._process_refs_for_node
|
||||
# Called by dbt.parser.manifest._process_refs & ManifestLoader.check_for_model_deprecations
|
||||
def resolve_ref(
|
||||
self,
|
||||
source_node: GraphMemberNode,
|
||||
@@ -1142,6 +1233,35 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
def resolve_saved_query(
|
||||
self,
|
||||
target_saved_query_name: str,
|
||||
target_saved_query_package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeSavedQueryNode:
|
||||
"""Tries to find the SavedQuery by name within the available project and packages.
|
||||
|
||||
Will return the first enabled SavedQuery matching the name found while iterating over
|
||||
the scoped packages. If no enabled SavedQuery node match is found, returns the last
|
||||
disabled SavedQuery node. Otherwise it returns None.
|
||||
"""
|
||||
disabled: Optional[List[SavedQuery]] = None
|
||||
candidates = _packages_to_search(current_project, node_package, target_saved_query_package)
|
||||
for pkg in candidates:
|
||||
saved_query = self.saved_query_lookup.find(target_saved_query_name, pkg, self)
|
||||
|
||||
if saved_query is not None and saved_query.config.enabled:
|
||||
return saved_query
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.disabled_lookup.find(f"{target_saved_query_name}", pkg)
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
|
||||
return None
|
||||
|
||||
def resolve_semantic_model_for_measure(
|
||||
self,
|
||||
target_measure_name: str,
|
||||
@@ -1156,6 +1276,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
semantic_model = self.semantic_model_by_measure_lookup.find(
|
||||
target_measure_name, pkg, self
|
||||
)
|
||||
# need to return it even if it's disabled so know it's not fully missing
|
||||
if semantic_model is not None:
|
||||
return semantic_model
|
||||
|
||||
@@ -1331,10 +1452,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.exposures[exposure.unique_id] = exposure
|
||||
source_file.exposures.append(exposure.unique_id)
|
||||
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric):
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric, generated: bool = False):
|
||||
_check_duplicates(metric, self.metrics)
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
if not generated:
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
else:
|
||||
source_file.generated_metrics.append(metric.unique_id)
|
||||
|
||||
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||
_check_duplicates(group, self.groups)
|
||||
@@ -1356,6 +1480,10 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, Metric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, SavedQuery):
|
||||
source_file.saved_queries.append(node.unique_id)
|
||||
if isinstance(node, SemanticModel):
|
||||
source_file.semantic_models.append(node.unique_id)
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
@@ -1371,6 +1499,17 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.semantic_models[semantic_model.unique_id] = semantic_model
|
||||
source_file.semantic_models.append(semantic_model.unique_id)
|
||||
|
||||
def add_unit_test(self, source_file: SchemaSourceFile, unit_test: UnitTestDefinition):
|
||||
if unit_test.unique_id in self.unit_tests:
|
||||
raise DuplicateResourceNameError(unit_test, self.unit_tests[unit_test.unique_id])
|
||||
self.unit_tests[unit_test.unique_id] = unit_test
|
||||
source_file.unit_tests.append(unit_test.unique_id)
|
||||
|
||||
def add_saved_query(self, source_file: SchemaSourceFile, saved_query: SavedQuery) -> None:
|
||||
_check_duplicates(saved_query, self.saved_queries)
|
||||
self.saved_queries[saved_query.unique_id] = saved_query
|
||||
source_file.saved_queries.append(saved_query.unique_id)
|
||||
|
||||
# end of methods formerly in ParseResult
|
||||
|
||||
# Provide support for copy.deepcopy() - we just need to avoid the lock!
|
||||
@@ -1398,6 +1537,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.disabled,
|
||||
self.env_vars,
|
||||
self.semantic_models,
|
||||
self.unit_tests,
|
||||
self.saved_queries,
|
||||
self._doc_lookup,
|
||||
self._source_lookup,
|
||||
self._ref_lookup,
|
||||
@@ -1410,19 +1551,19 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
|
||||
class MacroManifest(MacroMethods):
|
||||
def __init__(self, macros):
|
||||
def __init__(self, macros) -> None:
|
||||
self.macros = macros
|
||||
self.metadata = ManifestMetadata()
|
||||
# This is returned by the 'graph' context property
|
||||
# in the ProviderContext class.
|
||||
self.flat_graph = {}
|
||||
self.flat_graph: Dict[str, Any] = {}
|
||||
|
||||
|
||||
AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 10)
|
||||
@schema_version("manifest", 11)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1468,6 +1609,9 @@ class WritableManifest(ArtifactMixin):
|
||||
description="A mapping from group names to their nodes",
|
||||
)
|
||||
)
|
||||
saved_queries: Mapping[UniqueID, SavedQuery] = field(
|
||||
metadata=dict(description=("The saved queries defined in the dbt project"))
|
||||
)
|
||||
semantic_models: Mapping[UniqueID, SemanticModel] = field(
|
||||
metadata=dict(description=("The semantic models defined in the dbt project"))
|
||||
)
|
||||
@@ -1476,9 +1620,14 @@ class WritableManifest(ArtifactMixin):
|
||||
description="Metadata about the manifest",
|
||||
)
|
||||
)
|
||||
unit_tests: Mapping[UniqueID, UnitTestDefinition] = field(
|
||||
metadata=dict(
|
||||
description="The unit tests defined in the project",
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(self):
|
||||
def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]:
|
||||
return [
|
||||
("manifest", 4),
|
||||
("manifest", 5),
|
||||
@@ -1486,14 +1635,15 @@ class WritableManifest(ArtifactMixin):
|
||||
("manifest", 7),
|
||||
("manifest", 8),
|
||||
("manifest", 9),
|
||||
("manifest", 10),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def upgrade_schema_version(cls, data):
|
||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
|
||||
manifest_schema_version = get_manifest_schema_version(data)
|
||||
if manifest_schema_version <= 9:
|
||||
manifest_schema_version = get_artifact_schema_version(data)
|
||||
if manifest_schema_version <= 10:
|
||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||
return cls.from_dict(data)
|
||||
|
||||
@@ -1506,13 +1656,6 @@ class WritableManifest(ArtifactMixin):
|
||||
return dct
|
||||
|
||||
|
||||
def get_manifest_schema_version(dct: dict) -> int:
|
||||
schema_version = dct.get("metadata", {}).get("dbt_schema_version", None)
|
||||
if not schema_version:
|
||||
raise ValueError("Manifest doesn't have schema version")
|
||||
return int(schema_version.split(".")[-2][-1])
|
||||
|
||||
|
||||
def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]):
|
||||
if value.unique_id in src:
|
||||
raise DuplicateResourceNameError(value, src[value.unique_id])
|
||||
|
||||
@@ -62,10 +62,72 @@ def drop_v9_and_prior_metrics(manifest: dict) -> None:
|
||||
manifest["disabled"] = filtered_disabled_entries
|
||||
|
||||
|
||||
def _convert_dct_with_filter(v10_dct_with_opt_filter):
|
||||
"""Upgrage the filter object from v10 to v11.
|
||||
|
||||
v10 filters from a serialized manifest looked like:
|
||||
{..., 'filter': {'where_sql_template': '<filter_value>'}}
|
||||
whereas v11 filters look like:
|
||||
{..., 'filter': {'where_filters': [{'where_sql_template': '<filter_value>'}, ...]}}
|
||||
"""
|
||||
if v10_dct_with_opt_filter is not None and v10_dct_with_opt_filter.get("filter") is not None:
|
||||
v10_dct_with_opt_filter["filter"] = {"where_filters": [v10_dct_with_opt_filter["filter"]]}
|
||||
|
||||
|
||||
def _convert_metric(v10_metric_dict):
|
||||
"""Upgrades a v10 metric object to a v11 metric object.
|
||||
|
||||
Specifcally the following properties change
|
||||
1. metric.filter
|
||||
2. metric.type_params.measure.filter
|
||||
3. metric.type_params.input_measures[x].filter
|
||||
4. metric.type_params.numerator.filter
|
||||
5. metric.type_params.denominator.filter
|
||||
6. metric.type_params.metrics[x].filter"
|
||||
"""
|
||||
|
||||
# handles top level metric filter
|
||||
_convert_dct_with_filter(v10_metric_dict)
|
||||
|
||||
type_params = v10_metric_dict.get("type_params")
|
||||
if type_params is not None:
|
||||
_convert_dct_with_filter(type_params.get("measure"))
|
||||
_convert_dct_with_filter(type_params.get("numerator"))
|
||||
_convert_dct_with_filter(type_params.get("denominator"))
|
||||
|
||||
# handles metric.type_params.input_measures[x].filter
|
||||
input_measures = type_params.get("input_measures")
|
||||
if input_measures is not None:
|
||||
for input_measure in input_measures:
|
||||
_convert_dct_with_filter(input_measure)
|
||||
|
||||
# handles metric.type_params.metrics[x].filter
|
||||
metrics = type_params.get("metrics")
|
||||
if metrics is not None:
|
||||
for metric in metrics:
|
||||
_convert_dct_with_filter(metric)
|
||||
|
||||
|
||||
def upgrade_v10_metric_filters(manifest: dict):
|
||||
"""Handles metric filters changes from v10 to v11."""
|
||||
|
||||
metrics = manifest.get("metrics", {})
|
||||
for metric in metrics.values():
|
||||
_convert_metric(metric)
|
||||
|
||||
disabled_nodes = manifest.get("disabled", {})
|
||||
for unique_id, nodes in disabled_nodes.items():
|
||||
if unique_id.split(".")[0] == "metric":
|
||||
for node in nodes:
|
||||
_convert_metric(node)
|
||||
|
||||
|
||||
def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict:
|
||||
# this should remain 9 while the check in `upgrade_schema_version` may change
|
||||
if manifest_schema_version <= 9:
|
||||
drop_v9_and_prior_metrics(manifest=manifest)
|
||||
elif manifest_schema_version == 10:
|
||||
upgrade_v10_metric_filters(manifest=manifest)
|
||||
|
||||
for node_content in manifest.get("nodes", {}).values():
|
||||
upgrade_node_content(node_content)
|
||||
@@ -83,6 +145,9 @@ def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict:
|
||||
manifest["groups"] = {}
|
||||
if "group_map" not in manifest:
|
||||
manifest["group_map"] = {}
|
||||
# add unit_tests key
|
||||
if "unit_tests" not in manifest:
|
||||
manifest["unit_tests"] = {}
|
||||
for metric_content in manifest.get("metrics", {}).values():
|
||||
# handle attr renames + value translation ("expression" -> "derived")
|
||||
metric_content = upgrade_ref_content(metric_content)
|
||||
@@ -104,4 +169,6 @@ def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict:
|
||||
doc_content["resource_type"] = "doc"
|
||||
if "semantic_models" not in manifest:
|
||||
manifest["semantic_models"] = {}
|
||||
if "saved_queries" not in manifest:
|
||||
manifest["saved_queries"] = {}
|
||||
return manifest
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.contracts.graph.manifest import Manifest, Metric
|
||||
from dbt_semantic_interfaces.type_enums import MetricType
|
||||
|
||||
from typing import Any, Dict, Iterator, List
|
||||
|
||||
|
||||
DERIVED_METRICS = [MetricType.DERIVED, MetricType.RATIO]
|
||||
BASE_METRICS = [MetricType.SIMPLE, MetricType.CUMULATIVE]
|
||||
|
||||
|
||||
class MetricReference(object):
|
||||
def __init__(self, metric_name, package_name=None):
|
||||
def __init__(self, metric_name, package_name=None) -> None:
|
||||
self.metric_name = metric_name
|
||||
self.package_name = package_name
|
||||
|
||||
@@ -17,76 +24,74 @@ class ResolvedMetricReference(MetricReference):
|
||||
for working with metrics (ie. __str__ and templating functions)
|
||||
"""
|
||||
|
||||
def __init__(self, node, manifest, Relation):
|
||||
def __init__(self, node: Metric, manifest: Manifest) -> None:
|
||||
super().__init__(node.name, node.package_name)
|
||||
self.node = node
|
||||
self.manifest = manifest
|
||||
self.Relation = Relation
|
||||
|
||||
def __getattr__(self, key):
|
||||
def __getattr__(self, key) -> Any:
|
||||
return getattr(self.node, key)
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return f"{self.node.name}"
|
||||
|
||||
@classmethod
|
||||
def parent_metrics(cls, metric_node, manifest):
|
||||
def parent_metrics(cls, metric_node: Metric, manifest: Manifest) -> Iterator[Metric]:
|
||||
"""For a given metric, yeilds all upstream metrics."""
|
||||
yield metric_node
|
||||
|
||||
for parent_unique_id in metric_node.depends_on.nodes:
|
||||
node = manifest.metrics.get(parent_unique_id)
|
||||
if node and node.resource_type == NodeType.Metric:
|
||||
node = manifest.expect(parent_unique_id)
|
||||
if isinstance(node, Metric):
|
||||
yield from cls.parent_metrics(node, manifest)
|
||||
|
||||
@classmethod
|
||||
def parent_metrics_names(cls, metric_node, manifest):
|
||||
yield metric_node.name
|
||||
|
||||
for parent_unique_id in metric_node.depends_on.nodes:
|
||||
node = manifest.metrics.get(parent_unique_id)
|
||||
if node and node.resource_type == NodeType.Metric:
|
||||
yield from cls.parent_metrics_names(node, manifest)
|
||||
def parent_metrics_names(cls, metric_node: Metric, manifest: Manifest) -> Iterator[str]:
|
||||
"""For a given metric, yeilds all upstream metric names"""
|
||||
for metric in cls.parent_metrics(metric_node, manifest):
|
||||
yield metric.name
|
||||
|
||||
@classmethod
|
||||
def reverse_dag_parsing(cls, metric_node, manifest, metric_depth_count):
|
||||
if metric_node.calculation_method == "derived":
|
||||
yield {metric_node.name: metric_depth_count}
|
||||
metric_depth_count = metric_depth_count + 1
|
||||
def reverse_dag_parsing(
|
||||
cls, metric_node: Metric, manifest: Manifest, metric_depth_count: int
|
||||
) -> Iterator[Dict[str, int]]:
|
||||
"""For the given metric, yeilds dictionaries having {<metric_name>: <depth_from_initial_metric} of upstream derived metrics.
|
||||
|
||||
for parent_unique_id in metric_node.depends_on.nodes:
|
||||
node = manifest.metrics.get(parent_unique_id)
|
||||
if (
|
||||
node
|
||||
and node.resource_type == NodeType.Metric
|
||||
and node.calculation_method == "derived"
|
||||
):
|
||||
yield from cls.reverse_dag_parsing(node, manifest, metric_depth_count)
|
||||
This function is intended as a helper function for other metric helper functions.
|
||||
"""
|
||||
if metric_node.type in DERIVED_METRICS:
|
||||
yield {metric_node.name: metric_depth_count}
|
||||
|
||||
for parent_unique_id in metric_node.depends_on.nodes:
|
||||
node = manifest.expect(parent_unique_id)
|
||||
if isinstance(node, Metric):
|
||||
yield from cls.reverse_dag_parsing(node, manifest, metric_depth_count + 1)
|
||||
|
||||
def full_metric_dependency(self):
|
||||
"""Returns a unique list of all upstream metric names."""
|
||||
to_return = list(set(self.parent_metrics_names(self.node, self.manifest)))
|
||||
return to_return
|
||||
|
||||
def base_metric_dependency(self):
|
||||
def base_metric_dependency(self) -> List[str]:
|
||||
"""Returns a unique list of names for all upstream non-derived metrics."""
|
||||
in_scope_metrics = list(self.parent_metrics(self.node, self.manifest))
|
||||
base_metrics = {
|
||||
metric.name for metric in in_scope_metrics if metric.type not in DERIVED_METRICS
|
||||
}
|
||||
|
||||
to_return = []
|
||||
for metric in in_scope_metrics:
|
||||
if metric.calculation_method != "derived" and metric.name not in to_return:
|
||||
to_return.append(metric.name)
|
||||
return list(base_metrics)
|
||||
|
||||
return to_return
|
||||
|
||||
def derived_metric_dependency(self):
|
||||
def derived_metric_dependency(self) -> List[str]:
|
||||
"""Returns a unique list of names for all upstream derived metrics."""
|
||||
in_scope_metrics = list(self.parent_metrics(self.node, self.manifest))
|
||||
derived_metrics = {
|
||||
metric.name for metric in in_scope_metrics if metric.type in DERIVED_METRICS
|
||||
}
|
||||
|
||||
to_return = []
|
||||
for metric in in_scope_metrics:
|
||||
if metric.calculation_method == "derived" and metric.name not in to_return:
|
||||
to_return.append(metric.name)
|
||||
return list(derived_metrics)
|
||||
|
||||
return to_return
|
||||
|
||||
def derived_metric_dependency_depth(self):
|
||||
def derived_metric_dependency_depth(self) -> List[Dict[str, int]]:
|
||||
"""Returns a list of {<metric_name>: <depth_from_initial_metric>} for all upstream metrics."""
|
||||
metric_depth_count = 1
|
||||
to_return = list(self.reverse_dag_parsing(self.node, self.manifest, metric_depth_count))
|
||||
|
||||
|
||||
@@ -2,11 +2,11 @@ from dataclasses import field, Field, dataclass
|
||||
from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import Any, List, Optional, Dict, Union, Type, TypeVar, Callable
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
ValidationError,
|
||||
register_pattern,
|
||||
StrEnum,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed, Docs
|
||||
@@ -14,7 +14,9 @@ from dbt.contracts.graph.utils import validate_color
|
||||
from dbt.contracts.util import Replaceable, list_str
|
||||
from dbt.exceptions import DbtInternalError, CompilationError
|
||||
from dbt import hooks
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.node_types import NodeType, AccessType
|
||||
from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
|
||||
M = TypeVar("M", bound="Metadata")
|
||||
@@ -188,9 +190,6 @@ class Severity(str):
|
||||
pass
|
||||
|
||||
|
||||
register_pattern(Severity, insensitive_patterns("warn", "error"))
|
||||
|
||||
|
||||
class OnConfigurationChangeOption(StrEnum):
|
||||
Apply = "apply"
|
||||
Continue = "continue"
|
||||
@@ -204,6 +203,7 @@ class OnConfigurationChangeOption(StrEnum):
|
||||
@dataclass
|
||||
class ContractConfig(dbtClassMixin, Replaceable):
|
||||
enforced: bool = False
|
||||
alias_types: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -218,7 +218,6 @@ T = TypeVar("T", bound="BaseConfig")
|
||||
|
||||
@dataclass
|
||||
class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
|
||||
# enable syntax like: config['key']
|
||||
def __getitem__(self, key):
|
||||
return self.get(key)
|
||||
@@ -376,25 +375,50 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
self.validate(dct)
|
||||
return self.from_dict(dct)
|
||||
|
||||
def replace(self, **kwargs):
|
||||
dct = self.to_dict(omit_none=True)
|
||||
|
||||
mapping = self.field_mapping()
|
||||
for key, value in kwargs.items():
|
||||
new_key = mapping.get(key, key)
|
||||
dct[new_key] = value
|
||||
return self.from_dict(dct)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SemanticModelConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
group: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SavedQueryConfig(BaseConfig):
|
||||
"""Where config options for SavedQueries are stored.
|
||||
|
||||
This class is much like many other node config classes. It's likely that
|
||||
this class will expand in the direction of what's in the `NodeAndTestConfig`
|
||||
class. It might make sense to clean the various *Config classes into one at
|
||||
some point.
|
||||
"""
|
||||
|
||||
enabled: bool = True
|
||||
group: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
export_as: Optional[ExportDestinationType] = None
|
||||
schema: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
group: Optional[str] = None
|
||||
group: Optional[str] = field(
|
||||
default=None,
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -447,11 +471,11 @@ class NodeConfig(NodeAndTestConfig):
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
metadata={"merge": MergeBehavior.Append, "alias": "post-hook"},
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
metadata={"merge": MergeBehavior.Append, "alias": "pre-hook"},
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
@@ -511,39 +535,34 @@ class NodeConfig(NodeAndTestConfig):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
field_map = {"post-hook": "post_hook", "pre-hook": "pre_hook"}
|
||||
# create a new dict because otherwise it gets overwritten in
|
||||
# tests
|
||||
new_dict = {}
|
||||
for key in data:
|
||||
new_dict[key] = data[key]
|
||||
data = new_dict
|
||||
for key in hooks.ModelHookType:
|
||||
if key in data:
|
||||
data[key] = [hooks.get_hook_dict(h) for h in data[key]]
|
||||
for field_name in field_map:
|
||||
if field_name in data:
|
||||
new_name = field_map[field_name]
|
||||
data[new_name] = data.pop(field_name)
|
||||
return data
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
dct = super().__post_serialize__(dct)
|
||||
field_map = {"post_hook": "post-hook", "pre_hook": "pre-hook"}
|
||||
for field_name in field_map:
|
||||
if field_name in dct:
|
||||
dct[field_map[field_name]] = dct.pop(field_name)
|
||||
return dct
|
||||
|
||||
# this is still used by jsonschema validation
|
||||
@classmethod
|
||||
def field_mapping(cls):
|
||||
return {"post_hook": "post-hook", "pre_hook": "pre-hook"}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelConfig(NodeConfig):
|
||||
access: AccessType = field(
|
||||
default=AccessType.Protected,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnitTestNodeConfig(NodeConfig):
|
||||
expected_rows: List[Dict[str, Any]] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeedConfig(NodeConfig):
|
||||
materialized: str = "seed"
|
||||
delimiter: str = ","
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
@@ -553,6 +572,9 @@ class SeedConfig(NodeConfig):
|
||||
raise ValidationError("A seed must have a materialized value of 'seed'")
|
||||
|
||||
|
||||
SEVERITY_PATTERN = r"^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestConfig(NodeAndTestConfig):
|
||||
__test__ = False
|
||||
@@ -563,14 +585,64 @@ class TestConfig(NodeAndTestConfig):
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
)
|
||||
materialized: str = "test"
|
||||
severity: Severity = Severity("ERROR")
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
severity: Annotated[Severity, Pattern(SEVERITY_PATTERN)] = Severity("ERROR")
|
||||
store_failures: Optional[bool] = None
|
||||
store_failures_as: Optional[str] = None
|
||||
where: Optional[str] = None
|
||||
limit: Optional[int] = None
|
||||
fail_calc: str = "count(*)"
|
||||
warn_if: str = "!= 0"
|
||||
error_if: str = "!= 0"
|
||||
|
||||
def __post_init__(self):
|
||||
"""
|
||||
The presence of a setting for `store_failures_as` overrides any existing setting for `store_failures`,
|
||||
regardless of level of granularity. If `store_failures_as` is not set, then `store_failures` takes effect.
|
||||
At the time of implementation, `store_failures = True` would always create a table; the user could not
|
||||
configure this. Hence, if `store_failures = True` and `store_failures_as` is not specified, then it
|
||||
should be set to "table" to mimic the existing functionality.
|
||||
|
||||
A side effect of this overriding functionality is that `store_failures_as="view"` at the project
|
||||
level cannot be turned off at the model level without setting both `store_failures_as` and
|
||||
`store_failures`. The former would cascade down and override `store_failures=False`. The proposal
|
||||
is to include "ephemeral" as a value for `store_failures_as`, which effectively sets
|
||||
`store_failures=False`.
|
||||
|
||||
The exception handling for this is tricky. If we raise an exception here, the entire run fails at
|
||||
parse time. We would rather well-formed models run successfully, leaving only exceptions to be rerun
|
||||
if necessary. Hence, the exception needs to be raised in the test materialization. In order to do so,
|
||||
we need to make sure that we go down the `store_failures = True` route with the invalid setting for
|
||||
`store_failures_as`. This results in the `.get()` defaulted to `True` below, instead of a normal
|
||||
dictionary lookup as is done in the `if` block. Refer to the test materialization for the
|
||||
exception that is raise as a result of an invalid value.
|
||||
|
||||
The intention of this block is to behave as if `store_failures_as` is the only setting,
|
||||
but still allow for backwards compatibility for `store_failures`.
|
||||
See https://github.com/dbt-labs/dbt-core/issues/6914 for more information.
|
||||
"""
|
||||
|
||||
# if `store_failures_as` is not set, it gets set by `store_failures`
|
||||
# the settings below mimic existing behavior prior to `store_failures_as`
|
||||
get_store_failures_as_map = {
|
||||
True: "table",
|
||||
False: "ephemeral",
|
||||
None: None,
|
||||
}
|
||||
|
||||
# if `store_failures_as` is set, it dictates what `store_failures` gets set to
|
||||
# the settings below overrides whatever `store_failures` is set to by the user
|
||||
get_store_failures_map = {
|
||||
"ephemeral": False,
|
||||
"table": True,
|
||||
"view": True,
|
||||
}
|
||||
|
||||
if self.store_failures_as is None:
|
||||
self.store_failures_as = get_store_failures_as_map[self.store_failures]
|
||||
else:
|
||||
self.store_failures = get_store_failures_map.get(self.store_failures_as, True)
|
||||
|
||||
@classmethod
|
||||
def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool:
|
||||
"""This is like __eq__, except it explicitly checks certain fields."""
|
||||
@@ -582,6 +654,7 @@ class TestConfig(NodeAndTestConfig):
|
||||
"warn_if",
|
||||
"error_if",
|
||||
"store_failures",
|
||||
"store_failures_as",
|
||||
]
|
||||
|
||||
seen = set()
|
||||
@@ -619,6 +692,8 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super().validate(data)
|
||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||
# will fail when parsing the snapshot node.
|
||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||
raise ValidationError(
|
||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||
@@ -649,20 +724,36 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||
|
||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||
def finalize_and_validate(self):
|
||||
data = self.to_dict(omit_none=True)
|
||||
self.validate(data)
|
||||
return self.from_dict(data)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnitTestConfig(BaseConfig):
|
||||
tags: Union[str, List[str]] = field(
|
||||
default_factory=list_str,
|
||||
metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
||||
)
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||
NodeType.Metric: MetricConfig,
|
||||
NodeType.SemanticModel: SemanticModelConfig,
|
||||
NodeType.SavedQuery: SavedQueryConfig,
|
||||
NodeType.Exposure: ExposureConfig,
|
||||
NodeType.Source: SourceConfig,
|
||||
NodeType.Seed: SeedConfig,
|
||||
NodeType.Test: TestConfig,
|
||||
NodeType.Model: NodeConfig,
|
||||
NodeType.Snapshot: SnapshotConfig,
|
||||
NodeType.Unit: UnitTestConfig,
|
||||
}
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user