forked from repo-mirrors/dbt-core
Compare commits
287 Commits
jerco/upda
...
macro-reso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
60f87411d5 | ||
|
|
eb96e3deec | ||
|
|
f68af070f3 | ||
|
|
7ad1accf2b | ||
|
|
ed8f5d38e4 | ||
|
|
7ad6aa18da | ||
|
|
6796edd66e | ||
|
|
e01eb30884 | ||
|
|
ba53f053fd | ||
|
|
b8de881ed3 | ||
|
|
d7d5e2335c | ||
|
|
160d0db238 | ||
|
|
2cee8652a6 | ||
|
|
7f777f8a42 | ||
|
|
00f49206e9 | ||
|
|
1bca662883 | ||
|
|
41ac915949 | ||
|
|
373125ecb8 | ||
|
|
294ad82e50 | ||
|
|
12bd1e87fb | ||
|
|
8bad75c65b | ||
|
|
c836b7585e | ||
|
|
220f56d8d2 | ||
|
|
32fde75504 | ||
|
|
615ad1fe2d | ||
|
|
81236a3dca | ||
|
|
6d834a18ed | ||
|
|
2ab0f7b26b | ||
|
|
9bb970e6ef | ||
|
|
e56a5dae8b | ||
|
|
1c9cec1787 | ||
|
|
4d02ef637b | ||
|
|
19f027b7a7 | ||
|
|
1d0a3e92c8 | ||
|
|
ab90c777d0 | ||
|
|
3902137dfc | ||
|
|
0131feac68 | ||
|
|
017faf4bd1 | ||
|
|
c2f7d75e9e | ||
|
|
51b94b26cc | ||
|
|
e24f9b3da7 | ||
|
|
b58e8e3ffc | ||
|
|
f45b013321 | ||
|
|
e547c0ec64 | ||
|
|
6871fc46b5 | ||
|
|
931b2dbe40 | ||
|
|
bb35b3eb87 | ||
|
|
01d481bc8d | ||
|
|
46b9a1d621 | ||
|
|
839c720e91 | ||
|
|
d88c6987a2 | ||
|
|
4ee950427a | ||
|
|
6c1822f186 | ||
|
|
c7c3ac872c | ||
|
|
c4ff280436 | ||
|
|
7fddd6e448 | ||
|
|
1260782bd2 | ||
|
|
333120b111 | ||
|
|
bb21403c9e | ||
|
|
ac972948b8 | ||
|
|
211392c4a4 | ||
|
|
7317de23a3 | ||
|
|
af916666a2 | ||
|
|
7de8930d1d | ||
|
|
200bcdcd9f | ||
|
|
b9a603e3aa | ||
|
|
1a825484fb | ||
|
|
a2a7b7d795 | ||
|
|
4122f6c308 | ||
|
|
6aeebc4c76 | ||
|
|
f44d704801 | ||
|
|
dbd02e54c2 | ||
|
|
a89642a6f9 | ||
|
|
c141148616 | ||
|
|
469a9aca06 | ||
|
|
98310b6612 | ||
|
|
ef9d6a870f | ||
|
|
35f46dac8c | ||
|
|
efa6339e18 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
26c7675c28 | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
b39eeb328c | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.8.0a1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
kind: Breaking Changes
|
||||
body: Remove adapter.get_compiler interface
|
||||
time: 2023-11-27T11:47:57.443202-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9148"
|
||||
@@ -0,0 +1,6 @@
|
||||
kind: Breaking Changes
|
||||
body: Move AdapterLogger to adapters folder
|
||||
time: 2023-11-28T13:43:56.853925-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9151"
|
||||
@@ -0,0 +1,7 @@
|
||||
kind: Breaking Changes
|
||||
body: move event manager setup back to core, remove ref to global EVENT_MANAGER and
|
||||
clean up event manager functions
|
||||
time: 2023-11-30T13:53:48.645192-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9150"
|
||||
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Begin using DSI 0.4.x
|
||||
time: 2023-10-31T13:19:54.750009-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
PR: "8892"
|
||||
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update typing-extensions version to >=4.4
|
||||
time: 2023-11-06T13:00:51.062386-08:00
|
||||
custom:
|
||||
Author: tlento
|
||||
PR: "9012"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix for column tests not rendering on quoted columns
|
||||
time: 2023-05-31T11:54:19.687363-04:00
|
||||
custom:
|
||||
Author: drewbanin
|
||||
Issue: "201"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fix get_custom_database docstring
|
||||
time: 2023-11-06T12:31:57.525711Z
|
||||
custom:
|
||||
Author: LeoTheGriff
|
||||
Issue: "9003"
|
||||
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Allow adapters to include package logs in dbt standard logging '
|
||||
time: 2023-09-15T12:37:33.862862-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "7859"
|
||||
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add drop_schema_named macro
|
||||
time: 2023-10-17T14:36:20.612289-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8025"
|
||||
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: migrate utils to common and adapters folders
|
||||
time: 2023-10-26T11:08:21.458709-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8924"
|
||||
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Move Agate helper client into common
|
||||
time: 2023-10-26T12:35:56.538587-07:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "8926"
|
||||
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: remove usage of dbt.config.PartialProject from dbt/adapters
|
||||
time: 2023-10-26T12:39:13.904116-07:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "8928"
|
||||
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add exports to SavedQuery spec
|
||||
time: 2023-10-31T13:20:22.448158-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
Issue: "8892"
|
||||
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Remove legacy logger
|
||||
time: 2023-11-07T13:56:35.186648-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8027"
|
||||
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support setting export configs hierarchically via saved query and project configs
|
||||
time: 2023-11-10T15:42:55.042317-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8956"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: For packages installed with tarball method, fetch metadata to resolve nested dependencies
|
||||
time: 2023-10-13T13:09:43.188308-04:00
|
||||
custom:
|
||||
Author: adamlopez
|
||||
Issue: "8621"
|
||||
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix partial parsing not working for semantic model change
|
||||
time: 2023-10-16T16:39:53.05058-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8859"
|
||||
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Handle unknown `type_code` for model contracts
|
||||
time: 2023-10-24T11:01:51.980781-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 8877 8353
|
||||
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add back contract enforcement for temporary tables on postgres
|
||||
time: 2023-10-24T14:55:04.051683-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8857"
|
||||
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Rework get_catalog implementation to retain previous adapter interface semantics
|
||||
time: 2023-10-24T15:54:00.628086-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8846"
|
||||
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add version to fqn when version==0
|
||||
time: 2023-10-26T00:25:36.259356-05:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8836"
|
||||
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix cased comparison in catalog-retrieval function.
|
||||
time: 2023-10-30T09:37:34.258612-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8939"
|
||||
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Catalog queries now assign the correct type to materialized views
|
||||
time: 2023-10-31T00:53:45.486203-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8864"
|
||||
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix compilation exception running empty seed file and support new Integer agate data_type
|
||||
time: 2023-10-31T14:48:37.774871-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8895"
|
||||
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make relation filtering None-tolerant for maximal flexibility across adapters.
|
||||
time: 2023-11-01T15:58:24.552054-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8974"
|
||||
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Update run_results.json from previous versions of dbt to support deferral and
|
||||
rerun from failure
|
||||
time: 2023-11-06T15:59:33.677915-05:00
|
||||
custom:
|
||||
Author: jtcohen6 peterallenwebb
|
||||
Issue: "9010"
|
||||
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix git repository with subdirectory for Deps
|
||||
time: 2023-11-07T09:23:58.214271-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9000"
|
||||
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Use MANIFEST.in to recursively include all jinja templates; fixes issue where
|
||||
some templates were not included in the distribution
|
||||
time: 2023-11-07T09:41:30.121733-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9016"
|
||||
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix formatting of tarball information in packages-lock.yml
|
||||
time: 2023-11-13T11:49:56.437007-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx QMalcolm
|
||||
Issue: "9062"
|
||||
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'deps: Lock git packages to commit SHA during resolution'
|
||||
time: 2023-11-27T15:43:10.122069+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "9050"
|
||||
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'deps: Use PackageRenderer to read package-lock.json'
|
||||
time: 2023-11-27T15:43:47.842423+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "9127"
|
||||
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'Get sources working again in dbt docs generate'
|
||||
time: 2023-11-28T15:52:25.738256Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "9119"
|
||||
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Added more type annotations.
|
||||
time: 2023-08-31T16:44:35.737954-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8537"
|
||||
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.include.global_project in dbt/adapters
|
||||
time: 2023-10-26T18:49:53.36449-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8925"
|
||||
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add a no-op runner for Saved Qeury
|
||||
time: 2023-10-27T14:00:48.4755-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8893"
|
||||
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: remove dbt.flags.MP_CONTEXT usage in dbt/adapters
|
||||
time: 2023-11-01T10:27:58.790153-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8967"
|
||||
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: 'Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters'
|
||||
time: 2023-11-01T17:31:24.974093-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8969"
|
||||
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move CatalogRelationTypes test case to the shared test suite to be reused by
|
||||
adapter maintainers
|
||||
time: 2023-11-03T19:52:22.694394-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8952"
|
||||
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Treat SystemExit as an interrupt if raised during node execution.
|
||||
time: 2023-11-06T08:04:22.022179-05:00
|
||||
custom:
|
||||
Author: benmosher
|
||||
Issue: n/a
|
||||
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Removing unused 'documentable'
|
||||
time: 2023-11-06T10:57:30.694056-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8871"
|
||||
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove use of dbt/core exceptions in dbt/adapter
|
||||
time: 2023-11-07T13:57:28.683727-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt MichelleArk
|
||||
Issue: "8920"
|
||||
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Cache dbt plugin modules to improve integration test performance
|
||||
time: 2023-11-07T19:15:46.170151-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9029"
|
||||
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock
|
||||
variance
|
||||
time: 2023-11-11T17:53:50.098843-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9057"
|
||||
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific
|
||||
event types and protos
|
||||
time: 2023-11-16T17:42:51.005023-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: 8927 8918
|
||||
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Clean up unused adaptor folders
|
||||
time: 2023-11-20T13:47:35.923794-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9123"
|
||||
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move column constraints into common/contracts, removing another dependency of
|
||||
adapters on core.
|
||||
time: 2023-11-20T18:32:14.859503-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9024"
|
||||
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move dbt.semver to dbt.common.semver and update references.
|
||||
time: 2023-11-28T17:07:32.172421-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "9039"
|
||||
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move lowercase utils method to common
|
||||
time: 2023-11-30T13:54:32.561673-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9180"
|
||||
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usages of dbt.clients.jinja in dbt/adapters
|
||||
time: 2023-12-05T09:35:44.845352+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9205"
|
||||
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.contracts in dbt/adapters
|
||||
time: 2023-12-05T12:05:59.936775+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9208"
|
||||
6
.changes/unreleased/Under the Hood-20231205-165812.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-165812.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters
|
||||
time: 2023-12-05T16:58:12.932172+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9214"
|
||||
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Introduce RelationConfig Protocol, consolidate Relation.create_from
|
||||
time: 2023-12-05T17:07:25.33861+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9215"
|
||||
2
.flake8
2
.flake8
@@ -10,3 +10,5 @@ ignore =
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
per-file-ignores =
|
||||
*/__init__.py: F401
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
||||
core/dbt/include/index.html binary
|
||||
core/dbt/task/docs/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
|
||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -13,23 +13,6 @@
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
@@ -40,7 +23,7 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -28,3 +28,10 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# github dependencies
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Add the `user docs` label to this PR if it will need docs changes. An
|
||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
@@ -33,3 +30,4 @@ resolves #
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,7 +17,7 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
43
.github/workflows/docs-issue.yml
vendored
Normal file
43
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.pull_request.merged == true) &&
|
||||
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
84
.github/workflows/main.yml
vendored
84
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -103,26 +108,59 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +203,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -182,8 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
6
.github/workflows/release-docker.yml
vendored
6
.github/workflows/release-docker.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# **what?**
|
||||
# Cleanup branches left over from automation and testing. Also cleanup
|
||||
# draft releases from release testing.
|
||||
|
||||
# **why?**
|
||||
# The automations are leaving behind branches and releases that clutter
|
||||
# the repository. Sometimes we need them to debug processes so we don't
|
||||
# want them immediately deleted. Running on Saturday to avoid running
|
||||
# at the same time as an actual release to prevent breaking a release
|
||||
# mid-release.
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 12:00 Saturday.
|
||||
# Manual trigger can also run on demand
|
||||
|
||||
name: Repository Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cleanup-repo:
|
||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||
secrets: inherit
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -26,7 +26,7 @@ Legacy tests are found in the 'test' directory:
|
||||
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
|
||||
core/dbt/include/index.html
|
||||
core/dbt/task/docs/index.html
|
||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||
|
||||
## Adapters
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
11
Makefile
11
Makefile
@@ -40,7 +40,16 @@ dev: dev_req ## Installs dbt-* packages in develop mode along with development d
|
||||
|
||||
.PHONY: proto_types
|
||||
proto_types: ## generates google protobuf python file from types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||
protoc -I=./core/dbt/common/events --python_out=./core/dbt/common/events ./core/dbt/common/events/types.proto
|
||||
|
||||
.PHONY: core_proto_types
|
||||
core_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
||||
|
||||
.PHONY: adapter_proto_types
|
||||
adapter_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/adapters/events --python_out=./core/dbt/adapters/events ./core/dbt/adapters/events/adapter_types.proto
|
||||
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
|
||||
13
codecov.yml
13
codecov.yml
@@ -0,0 +1,13 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.base.meta import available # noqa: F401
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||
from dbt.adapters.base.relation import ( # noqa: F401
|
||||
|
||||
@@ -2,17 +2,17 @@ from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
# Note: This is automatically used by contract code
|
||||
# No-op conversions (INTEGER => INT) have been removed.
|
||||
# Any adapter that wants to take advantage of "translate_type"
|
||||
# should create a ClassVar with the appropriate conversions.
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
"STRING": "TEXT",
|
||||
"TIMESTAMP": "TIMESTAMP",
|
||||
"FLOAT": "FLOAT",
|
||||
"INTEGER": "INT",
|
||||
"BOOLEAN": "BOOLEAN",
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
|
||||
@@ -6,6 +6,7 @@ import traceback
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from multiprocessing.context import SpawnContext
|
||||
from threading import get_ident
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -23,8 +24,9 @@ from typing import (
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.connection import (
|
||||
import dbt.adapters.exceptions
|
||||
import dbt.common.exceptions.base
|
||||
from dbt.adapters.contracts.connection import (
|
||||
Connection,
|
||||
Identifier,
|
||||
ConnectionState,
|
||||
@@ -36,9 +38,9 @@ from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.events import AdapterLogger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
from dbt.adapters.events.logging import AdapterLogger
|
||||
from dbt.common.events.functions import fire_event
|
||||
from dbt.adapters.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpenInCleanup,
|
||||
@@ -48,9 +50,8 @@ from dbt.events.types import (
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
from dbt.common.events.contextvars import get_node_info
|
||||
from dbt.common.utils import cast_to_str
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
@@ -72,10 +73,10 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
def __init__(self, profile: AdapterRequiredConfig, mp_context: SpawnContext) -> None:
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
self.lock: RLock = mp_context.RLock()
|
||||
self.query_header: Optional[MacroQueryStringSetter] = None
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
@@ -91,13 +92,15 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
raise dbt.adapters.exceptions.InvalidConnectionError(
|
||||
key, list(self.thread_connections)
|
||||
)
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.common.exceptions.DbtInternalError(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
@@ -137,13 +140,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
'connection_named'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
|
||||
@@ -220,14 +223,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
:raises dbt.adapters.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
@@ -235,7 +238,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
raise dbt.adapters.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
@@ -246,7 +249,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
@@ -268,12 +271,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -288,7 +291,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -320,12 +325,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`begin` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`commit` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
@@ -361,7 +370,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.common.exceptions.DbtInternalError(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
@@ -400,7 +409,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
@@ -408,7 +417,30 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param int limit: If set, limits the result set
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
"""
|
||||
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||
|
||||
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||
"""
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`add_select_query` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -9,7 +9,6 @@ from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
@@ -17,39 +16,55 @@ from typing import (
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
from multiprocessing.context import SpawnContext
|
||||
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
from dbt.adapters.capability import Capability, CapabilityDict
|
||||
from dbt.common.contracts.constraints import (
|
||||
ColumnLevelConstraint,
|
||||
ConstraintType,
|
||||
ModelLevelConstraint,
|
||||
)
|
||||
from dbt.adapters.contracts.macros import MacroResolver
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
from dbt.adapters.exceptions import (
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
NullRelationDropAttemptedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
UnexpectedNonTimestampError,
|
||||
RenameToNoneAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
)
|
||||
|
||||
from dbt.common.exceptions import (
|
||||
NotImplementedError,
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
DbtValidationError,
|
||||
UnexpectedNullError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNonTimestampError,
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import (
|
||||
from dbt.adapters.protocol import AdapterConfig
|
||||
from dbt.common.clients.agate_helper import (
|
||||
empty_table,
|
||||
get_column_value_uncased,
|
||||
merge_tables,
|
||||
table_from_rows,
|
||||
Integer,
|
||||
)
|
||||
from dbt.common.clients.jinja import CallableMacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.common.events.functions import fire_event, warn_or_error
|
||||
from dbt.adapters.events.types import (
|
||||
CacheMiss,
|
||||
ListRelations,
|
||||
CodeExecution,
|
||||
@@ -58,9 +73,9 @@ from dbt.events.types import (
|
||||
ConstraintNotSupported,
|
||||
ConstraintNotEnforced,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
from dbt.common.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName,
|
||||
@@ -71,10 +86,13 @@ from dbt.adapters.base.relation import (
|
||||
from dbt.adapters.base import Column as BaseColumn
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.events.types import CollectFreshnessReturnSignature
|
||||
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
@@ -109,7 +127,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
||||
return test
|
||||
|
||||
|
||||
def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
|
||||
def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
|
||||
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
@@ -161,6 +179,12 @@ class PythonJobHelper:
|
||||
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
||||
|
||||
|
||||
class FreshnessResponse(TypedDict):
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float # age in seconds
|
||||
|
||||
|
||||
class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""The BaseAdapter provides an abstract base class for adapters.
|
||||
|
||||
@@ -208,7 +232,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Relation: Type[BaseRelation] = BaseRelation
|
||||
Column: Type[BaseColumn] = BaseColumn
|
||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
||||
ConnectionManager: Type[BaseConnectionManager]
|
||||
|
||||
# A set of clobber config fields accepted by this adapter
|
||||
# for use in materializations
|
||||
@@ -222,11 +246,28 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
# This static member variable can be overriden in concrete adapter
|
||||
# implementations to indicate adapter support for optional capabilities.
|
||||
_capabilities = CapabilityDict({})
|
||||
|
||||
def __init__(self, config, mp_context: SpawnContext) -> None:
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
self.connections = self.ConnectionManager(config)
|
||||
self._macro_manifest_lazy: Optional[MacroManifest] = None
|
||||
self.cache = RelationsCache(log_cache_events=config.log_cache_events)
|
||||
self.connections = self.ConnectionManager(config, mp_context)
|
||||
self._macro_resolver: Optional[MacroResolver] = None
|
||||
|
||||
###
|
||||
# Methods to set / access a macro resolver
|
||||
###
|
||||
def set_macro_resolver(self, macro_resolver: MacroResolver) -> None:
|
||||
self._macro_resolver = macro_resolver
|
||||
|
||||
def get_macro_resolver(self) -> Optional[MacroResolver]:
|
||||
return self._macro_resolver
|
||||
|
||||
def clear_macro_resolver(self) -> None:
|
||||
if self._macro_resolver is not None:
|
||||
self._macro_resolver = None
|
||||
|
||||
###
|
||||
# Methods that pass through to the connection manager
|
||||
@@ -256,10 +297,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return conn.name
|
||||
|
||||
@contextmanager
|
||||
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
|
||||
def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]:
|
||||
try:
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.set(name, node)
|
||||
self.connections.query_header.set(name, query_header_context)
|
||||
self.acquire_connection(name)
|
||||
yield
|
||||
finally:
|
||||
@@ -267,11 +308,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.reset()
|
||||
|
||||
@contextmanager
|
||||
def connection_for(self, node: ResultNode) -> Iterator[None]:
|
||||
with self.connection_named(node.unique_id, node):
|
||||
yield
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
@@ -315,14 +351,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
"""
|
||||
TODO: Can we move this to dbt-bigquery?
|
||||
Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:param str table: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
if hasattr(self.connections, "get_partitions_metadata"):
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
@@ -337,39 +380,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
return cls.ConnectionManager.TYPE
|
||||
|
||||
@property
|
||||
def _macro_manifest(self) -> MacroManifest:
|
||||
if self._macro_manifest_lazy is None:
|
||||
return self.load_macro_manifest()
|
||||
return self._macro_manifest_lazy
|
||||
|
||||
def check_macro_manifest(self) -> Optional[MacroManifest]:
|
||||
"""Return the internal manifest (used for executing macros) if it's
|
||||
been initialized, otherwise return None.
|
||||
"""
|
||||
return self._macro_manifest_lazy
|
||||
|
||||
def load_macro_manifest(self, base_macros_only=False) -> MacroManifest:
|
||||
# base_macros_only is for the test framework
|
||||
if self._macro_manifest_lazy is None:
|
||||
# avoid a circular import
|
||||
from dbt.parser.manifest import ManifestLoader
|
||||
|
||||
manifest = ManifestLoader.load_macros(
|
||||
self.config,
|
||||
self.connections.set_query_header,
|
||||
base_macros_only=base_macros_only,
|
||||
)
|
||||
# TODO CT-211
|
||||
self._macro_manifest_lazy = manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
return self._macro_manifest_lazy # type: ignore[return-value]
|
||||
|
||||
def clear_macro_manifest(self):
|
||||
if self._macro_manifest_lazy is not None:
|
||||
self._macro_manifest_lazy = None
|
||||
|
||||
###
|
||||
# Caching methods
|
||||
###
|
||||
def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
|
||||
@@ -393,7 +403,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
# the cache only cares about executable nodes
|
||||
return {
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
self.Relation.create_from(self.config, node).without_identifier() # type: ignore[arg-type]
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
|
||||
}
|
||||
@@ -408,7 +418,30 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
relations = self._get_catalog_relations(manifest)
|
||||
for relation in relations:
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
# are sets of lowercase schema names that are valid members of those
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _get_catalog_relations_by_info_schema(
|
||||
self, relations
|
||||
) -> Dict[InformationSchema, List[BaseRelation]]:
|
||||
relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
|
||||
for relation in relations:
|
||||
info_schema = relation.information_schema_only()
|
||||
if info_schema not in relations_by_info_schema:
|
||||
relations_by_info_schema[info_schema] = []
|
||||
relations_by_info_schema[info_schema].append(relation)
|
||||
|
||||
return relations_by_info_schema
|
||||
|
||||
def _get_catalog_relations(self, manifest: Manifest) -> List[BaseRelation]:
|
||||
|
||||
nodes = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
@@ -416,14 +449,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
relation = self.Relation.create_from(self.config, node)
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
# are sets of lowercase schema names that are valid members of those
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
relations = [self.Relation.create_from(self.config, n) for n in nodes] # type: ignore[arg-type]
|
||||
return relations
|
||||
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||
@@ -453,9 +481,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||
for relation in cache_schemas:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
if relation.schema:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(
|
||||
@@ -917,6 +946,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
|
||||
:param agate_table: The table
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
@@ -974,6 +1014,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||
agate_type: Type = agate_table.column_types[col_idx]
|
||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||
(Integer, cls.convert_integer_type),
|
||||
(agate.Text, cls.convert_text_type),
|
||||
(agate.Number, cls.convert_number_type),
|
||||
(agate.Boolean, cls.convert_boolean_type),
|
||||
@@ -993,11 +1034,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def execute_macro(
|
||||
self,
|
||||
macro_name: str,
|
||||
manifest: Optional[Manifest] = None,
|
||||
macro_resolver: Optional[MacroResolver] = None,
|
||||
project: Optional[str] = None,
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Optional[Dict[str, Any]] = None,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> AttrDict:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
|
||||
@@ -1017,13 +1057,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
if context_override is None:
|
||||
context_override = {}
|
||||
|
||||
if manifest is None:
|
||||
# TODO CT-211
|
||||
manifest = self._macro_manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
macro = manifest.find_macro_by_name( # type: ignore[union-attr]
|
||||
macro_name, self.config.project_name, project
|
||||
)
|
||||
resolver = macro_resolver or self._macro_resolver
|
||||
if resolver is None:
|
||||
raise DbtInternalError("macro resolver was None when calling execute_macro!")
|
||||
|
||||
macro = resolver.find_macro_by_name(macro_name, self.config.project_name, project)
|
||||
if macro is None:
|
||||
if project is None:
|
||||
package_name = "any package"
|
||||
@@ -1043,12 +1081,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# TODO CT-211
|
||||
macro=macro,
|
||||
config=self.config,
|
||||
manifest=manifest, # type: ignore[arg-type]
|
||||
manifest=resolver, # type: ignore[arg-type]
|
||||
package_name=project,
|
||||
)
|
||||
macro_context.update(context_override)
|
||||
|
||||
macro_function = MacroGenerator(macro, macro_context)
|
||||
macro_function = CallableMacroGenerator(macro, macro_context)
|
||||
|
||||
with self.connections.exception_handler(f"macro {macro_name}"):
|
||||
result = macro_function(**kwargs)
|
||||
@@ -1079,31 +1117,114 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
kwargs=kwargs,
|
||||
# pass in the full manifest so we get any local project
|
||||
# overrides
|
||||
manifest=manifest,
|
||||
macro_resolver=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
schema_map = self._get_catalog_schemas(manifest)
|
||||
def _get_one_catalog_by_relations(
|
||||
self,
|
||||
information_schema: InformationSchema,
|
||||
relations: List[BaseRelation],
|
||||
manifest: Manifest,
|
||||
) -> agate.Table:
|
||||
|
||||
kwargs = {
|
||||
"information_schema": information_schema,
|
||||
"relations": relations,
|
||||
}
|
||||
table = self.execute_macro(
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME,
|
||||
kwargs=kwargs,
|
||||
# pass in the full manifest, so we get any local project
|
||||
# overrides
|
||||
macro_resolver=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_filtered_catalog(
|
||||
self, manifest: Manifest, relations: Optional[Set[BaseRelation]] = None
|
||||
):
|
||||
catalogs: agate.Table
|
||||
if (
|
||||
relations is None
|
||||
or len(relations) > 100
|
||||
or not self.supports(Capability.SchemaMetadataByRelations)
|
||||
):
|
||||
# Do it the traditional way. We get the full catalog.
|
||||
catalogs, exceptions = self.get_catalog(manifest)
|
||||
else:
|
||||
# Do it the new way. We try to save time by selecting information
|
||||
# only for the exact set of relations we are interested in.
|
||||
catalogs, exceptions = self.get_catalog_by_relations(manifest, relations)
|
||||
|
||||
if relations and catalogs:
|
||||
relation_map = {
|
||||
(
|
||||
r.database.casefold() if r.database else None,
|
||||
r.schema.casefold() if r.schema else None,
|
||||
r.identifier.casefold() if r.identifier else None,
|
||||
)
|
||||
for r in relations
|
||||
}
|
||||
|
||||
def in_map(row: agate.Row):
|
||||
d = _expect_row_value("table_database", row)
|
||||
s = _expect_row_value("table_schema", row)
|
||||
i = _expect_row_value("table_name", row)
|
||||
d = d.casefold() if d is not None else None
|
||||
s = s.casefold() if s is not None else None
|
||||
i = i.casefold() if i is not None else None
|
||||
return (d, s, i) in relation_map
|
||||
|
||||
catalogs = catalogs.where(in_map)
|
||||
|
||||
return catalogs, exceptions
|
||||
|
||||
def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
|
||||
pass
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
schema_map: SchemaSearchMap = self._get_catalog_schemas(manifest)
|
||||
for info, schemas in schema_map.items():
|
||||
if len(schemas) == 0:
|
||||
continue
|
||||
name = ".".join([str(info.database), "information_schema"])
|
||||
|
||||
fut = tpe.submit_connected(
|
||||
self, name, self._get_one_catalog, info, schemas, manifest
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def get_catalog_by_relations(
|
||||
self, manifest: Manifest, relations: Set[BaseRelation]
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
|
||||
for info_schema in relations_by_schema:
|
||||
name = ".".join([str(info_schema.database), "information_schema"])
|
||||
relations = set(relations_by_schema[info_schema])
|
||||
fut = tpe.submit_connected(
|
||||
self,
|
||||
name,
|
||||
self._get_one_catalog_by_relations,
|
||||
info_schema,
|
||||
relations,
|
||||
manifest,
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def cancel_open_connections(self):
|
||||
"""Cancel all open connections."""
|
||||
return self.connections.cancel_open()
|
||||
@@ -1114,7 +1235,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1129,9 +1250,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
AttrDict, # current: contains AdapterResponse + agate.Table
|
||||
agate.Table, # previous: just table
|
||||
]
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=manifest)
|
||||
if isinstance(result, agate.Table):
|
||||
deprecations.warn("collect-freshness-return-signature")
|
||||
warn_or_error(CollectFreshnessReturnSignature())
|
||||
adapter_response = None
|
||||
table = result
|
||||
else:
|
||||
@@ -1149,13 +1270,52 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
freshness = {
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def calculate_freshness_from_metadata(
|
||||
self,
|
||||
source: BaseRelation,
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
kwargs: Dict[str, Any] = {
|
||||
"information_schema": source.information_schema_only(),
|
||||
"relations": [source],
|
||||
}
|
||||
result = self.execute_macro(
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, macro_resolver=manifest
|
||||
)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
|
||||
try:
|
||||
row = table[0]
|
||||
last_modified_val = get_column_value_uncased("last_modified", row)
|
||||
snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
|
||||
except Exception:
|
||||
raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
|
||||
|
||||
if last_modified_val is None:
|
||||
# Interpret missing value as "infinitely long ago"
|
||||
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
|
||||
else:
|
||||
max_loaded_at = _utc(last_modified_val, None, "last_modified")
|
||||
|
||||
snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
|
||||
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
runs. The hook can assume it has a connection available.
|
||||
@@ -1181,11 +1341,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_compiler(self):
|
||||
from dbt.compilation import Compiler
|
||||
|
||||
return Compiler(self.config)
|
||||
|
||||
# Methods used in adapter tests
|
||||
def update_column_sql(
|
||||
self,
|
||||
@@ -1305,7 +1460,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
strategy = strategy.replace("+", "_")
|
||||
macro_name = f"get_incremental_{strategy}_sql"
|
||||
# The model_context should have MacroGenerator callable objects for all macros
|
||||
# The model_context should have callable objects for all macros
|
||||
if macro_name not in model_context:
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||
@@ -1429,6 +1584,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def capabilities(cls) -> CapabilityDict:
|
||||
return cls._capabilities
|
||||
|
||||
@classmethod
|
||||
def supports(cls, capability: Capability) -> bool:
|
||||
return bool(cls.capabilities()[capability])
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import abc
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
||||
|
||||
from dbt.deprecations import warn, renamed_method
|
||||
|
||||
from dbt.common.events.functions import warn_or_error
|
||||
from dbt.adapters.events.types import AdapterDeprecationWarning
|
||||
|
||||
Decorator = Callable[[Any], Callable]
|
||||
|
||||
@@ -62,11 +61,12 @@ class _Available:
|
||||
|
||||
def wrapper(func):
|
||||
func_name = func.__name__
|
||||
renamed_method(func_name, supported_name)
|
||||
|
||||
@wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
warn("adapter:{}".format(func_name))
|
||||
warn_or_error(
|
||||
AdapterDeprecationWarning(old_name=func_name, new_name=supported_name)
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if parse_replacement:
|
||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
from typing import List, Optional, Type
|
||||
from pathlib import Path
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import PartialProject
|
||||
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
class AdapterPlugin:
|
||||
"""Defines the basic requirements for a dbt adapter plugin.
|
||||
|
||||
@@ -29,12 +19,13 @@ class AdapterPlugin:
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None,
|
||||
):
|
||||
project_name: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
self.include_path: str = include_path
|
||||
self.project_name: str = project_name_from_path(include_path)
|
||||
self.project_name: str = project_name or f"dbt_{Path(include_path).name}"
|
||||
self.dependencies: List[str]
|
||||
if dependencies is None:
|
||||
self.dependencies = []
|
||||
|
||||
@@ -1,21 +1,20 @@
|
||||
from threading import local
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
|
||||
from dbt.clients.jinja import QueryStringGenerator
|
||||
from dbt.adapters.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
self._inner_node = node
|
||||
class QueryHeaderContextWrapper:
|
||||
def __init__(self, context) -> None:
|
||||
self._inner_context = context
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._inner_node, name, "")
|
||||
return getattr(self._inner_context, name, "")
|
||||
|
||||
|
||||
class _QueryComment(local):
|
||||
@@ -25,9 +24,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
@@ -53,11 +52,11 @@ class _QueryComment(local):
|
||||
self.append = append
|
||||
|
||||
|
||||
QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
QueryStringFunc = Callable[[str, Optional[QueryHeaderContextWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None:
|
||||
self.manifest = manifest
|
||||
self.config = config
|
||||
|
||||
@@ -90,10 +89,10 @@ class MacroQueryStringSetter:
|
||||
def reset(self):
|
||||
self.set("master", None)
|
||||
|
||||
def set(self, name: str, node: Optional[ResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
def set(self, name: str, query_header_context: Any):
|
||||
wrapped: Optional[QueryHeaderContextWrapper] = None
|
||||
if query_header_context is not None:
|
||||
wrapped = QueryHeaderContextWrapper(query_header_context)
|
||||
comment_str = self.generator(name, wrapped)
|
||||
|
||||
append = False
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
from dbt.adapters.contracts.relation import (
|
||||
RelationConfig,
|
||||
RelationType,
|
||||
ComponentName,
|
||||
HasQuoting,
|
||||
@@ -11,18 +11,15 @@ from dbt.contracts.relation import (
|
||||
Policy,
|
||||
Path,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
ApproximateMatchError,
|
||||
DbtInternalError,
|
||||
MultipleDatabasesNotAllowedError,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
from dbt.adapters.exceptions import MultipleDatabasesNotAllowedError, ApproximateMatchError
|
||||
from dbt.common.utils import filter_null_values, deep_merge
|
||||
from dbt.adapters.utils import classproperty
|
||||
|
||||
import dbt.exceptions
|
||||
import dbt.common.exceptions
|
||||
|
||||
|
||||
Self = TypeVar("Self", bound="BaseRelation")
|
||||
SerializableIterable = Union[Tuple, FrozenSet]
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
@@ -36,6 +33,18 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
||||
# adding a relation type here also requires defining the associated rename macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql()
|
||||
renameable_relations: SerializableIterable = ()
|
||||
|
||||
# register relation types that are atomically replaceable, e.g. they have "create or replace" syntax
|
||||
# adding a relation type here also requires defining the associated replace macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql()
|
||||
replaceable_relations: SerializableIterable = ()
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
return self.path.get_lowered_part(field) == value.lower()
|
||||
@@ -87,7 +96,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.DbtRuntimeError(
|
||||
raise dbt.common.exceptions.DbtRuntimeError(
|
||||
"Tried to match relation, but no search path was passed!"
|
||||
)
|
||||
|
||||
@@ -169,7 +178,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
@@ -188,83 +196,50 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
identifier=identifier,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||
source_quoting,
|
||||
kwargs.get("quote_policy", {}),
|
||||
)
|
||||
|
||||
return cls.create(
|
||||
database=source.database,
|
||||
schema=source.schema,
|
||||
identifier=source.identifier,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_ephemeral_prefix(name: str):
|
||||
return f"__dbt__cte__{name}"
|
||||
|
||||
@classmethod
|
||||
def create_ephemeral_from_node(
|
||||
def create_ephemeral_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ManifestNode,
|
||||
relation_config: RelationConfig,
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
identifier = cls.add_ephemeral_prefix(relation_config.name)
|
||||
return cls.create(
|
||||
type=cls.CTE,
|
||||
identifier=identifier,
|
||||
).quote(identifier=False)
|
||||
|
||||
@classmethod
|
||||
def create_from_node(
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node,
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
quoting: HasQuoting,
|
||||
relation_config: RelationConfig,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if quote_policy is None:
|
||||
quote_policy = {}
|
||||
quote_policy = kwargs.pop("quote_policy", {})
|
||||
|
||||
quote_policy = dbt.utils.merge(config.quoting, quote_policy)
|
||||
config_quoting = relation_config.quoting_dict
|
||||
config_quoting.pop("column", None)
|
||||
|
||||
# precedence: kwargs quoting > relation config quoting > base quoting > default quoting
|
||||
quote_policy = deep_merge(
|
||||
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||
quoting.quoting,
|
||||
config_quoting,
|
||||
quote_policy,
|
||||
)
|
||||
|
||||
return cls.create(
|
||||
database=node.database,
|
||||
schema=node.schema,
|
||||
identifier=node.alias,
|
||||
database=relation_config.database,
|
||||
schema=relation_config.schema,
|
||||
identifier=relation_config.identifier,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise DbtInternalError(
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[Self],
|
||||
@@ -286,6 +261,14 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
@property
|
||||
def can_be_renamed(self) -> bool:
|
||||
return self.type in self.renameable_relations
|
||||
|
||||
@property
|
||||
def can_be_replaced(self) -> bool:
|
||||
return self.type in self.replaceable_relations
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
@@ -366,7 +349,7 @@ class InformationSchema(BaseRelation):
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationError(
|
||||
raise dbt.common.exceptions.CompilationError(
|
||||
"Got an invalid name: {}".format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@@ -439,11 +422,11 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
self[key].add(schema)
|
||||
|
||||
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||
for information_schema_name, schemas in self.items():
|
||||
for information_schema, schemas in self.items():
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
yield information_schema, schema
|
||||
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap":
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
|
||||
@@ -7,17 +7,16 @@ from dbt.adapters.reference_keys import (
|
||||
_make_ref_key_dict,
|
||||
_ReferenceKey,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
DependentLinkNotCachedError,
|
||||
from dbt.common.exceptions.cache import (
|
||||
NewNameAlreadyInCacheError,
|
||||
NoneRelationFoundError,
|
||||
ReferencedLinkNotCachedError,
|
||||
DependentLinkNotCachedError,
|
||||
TruncatedModelNameCausedCollisionError,
|
||||
NoneRelationFoundError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
from dbt.flags import get_flags
|
||||
from dbt.utils import lowercase
|
||||
from dbt.common.events.functions import fire_event, fire_event_if
|
||||
from dbt.adapters.events.types import CacheAction, CacheDumpGraph
|
||||
from dbt.common.utils.formatting import lowercase
|
||||
|
||||
|
||||
def dot_separated(key: _ReferenceKey) -> str:
|
||||
@@ -38,8 +37,8 @@ class _CachedRelation:
|
||||
:attr BaseRelation inner: The underlying dbt relation.
|
||||
"""
|
||||
|
||||
def __init__(self, inner):
|
||||
self.referenced_by = {}
|
||||
def __init__(self, inner) -> None:
|
||||
self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
@@ -165,10 +164,11 @@ class RelationsCache:
|
||||
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, log_cache_events: bool = False) -> None:
|
||||
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.lock = threading.RLock()
|
||||
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
self.log_cache_events = log_cache_events
|
||||
|
||||
def add_schema(
|
||||
self,
|
||||
@@ -318,10 +318,9 @@ class RelationsCache:
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
flags = get_flags()
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||
@@ -329,7 +328,7 @@ class RelationsCache:
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
@@ -454,9 +453,8 @@ class RelationsCache:
|
||||
ref_key_2=new_key._asdict(),
|
||||
)
|
||||
)
|
||||
flags = get_flags()
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
@@ -467,7 +465,7 @@ class RelationsCache:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
|
||||
52
core/dbt/adapters/capability.py
Normal file
52
core/dbt/adapters/capability.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional, DefaultDict, Mapping
|
||||
|
||||
|
||||
class Capability(str, Enum):
|
||||
"""Enumeration of optional adapter features which can be probed using BaseAdapter.capabilities()"""
|
||||
|
||||
SchemaMetadataByRelations = "SchemaMetadataByRelations"
|
||||
"""Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving
|
||||
all the relations in a schema."""
|
||||
|
||||
TableLastModifiedMetadata = "TableLastModifiedMetadata"
|
||||
"""Indicates support for determining the time of the last table modification by querying database metadata."""
|
||||
|
||||
|
||||
class Support(str, Enum):
|
||||
Unknown = "Unknown"
|
||||
"""The adapter has not declared whether this capability is a feature of the underlying DBMS."""
|
||||
|
||||
Unsupported = "Unsupported"
|
||||
"""This capability is not possible with the underlying DBMS, so the adapter does not implement related macros."""
|
||||
|
||||
NotImplemented = "NotImplemented"
|
||||
"""This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter."""
|
||||
|
||||
Versioned = "Versioned"
|
||||
"""Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
Full = "Full"
|
||||
"""All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class CapabilitySupport:
|
||||
support: Support
|
||||
first_version: Optional[str] = None
|
||||
|
||||
def __bool__(self):
|
||||
return self.support == Support.Versioned or self.support == Support.Full
|
||||
|
||||
|
||||
class CapabilityDict(DefaultDict[Capability, CapabilitySupport]):
|
||||
def __init__(self, vals: Mapping[Capability, CapabilitySupport]):
|
||||
super().__init__(self._default)
|
||||
self.update(vals)
|
||||
|
||||
@staticmethod
|
||||
def _default():
|
||||
return CapabilitySupport(support=Support.Unknown)
|
||||
23
core/dbt/adapters/clients/jinja.py
Normal file
23
core/dbt/adapters/clients/jinja.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from typing import Dict, Any
|
||||
from dbt.common.clients.jinja import BaseMacroGenerator, get_environment
|
||||
|
||||
|
||||
class QueryStringGenerator(BaseMacroGenerator):
|
||||
def __init__(self, template_str: str, context: Dict[str, Any]) -> None:
|
||||
super().__init__(context)
|
||||
self.template_str: str = template_str
|
||||
env = get_environment()
|
||||
self.template = env.from_string(
|
||||
self.template_str,
|
||||
globals=self.context,
|
||||
)
|
||||
|
||||
def get_name(self) -> str:
|
||||
return "query_comment_macro"
|
||||
|
||||
def get_template(self):
|
||||
"""Don't use the template cache, we don't have a node"""
|
||||
return self.template
|
||||
|
||||
def __call__(self, connection_name: str, node) -> str:
|
||||
return str(self.call_macro(connection_name, node))
|
||||
0
core/dbt/adapters/contracts/__init__.py
Normal file
0
core/dbt/adapters/contracts/__init__.py
Normal file
@@ -11,31 +11,32 @@ from typing import (
|
||||
List,
|
||||
Callable,
|
||||
)
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from typing_extensions import Protocol
|
||||
from dbt.dataclass_schema import (
|
||||
from typing_extensions import Protocol, Annotated
|
||||
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
from dbt.adapters.utils import translate_aliases
|
||||
from dbt.common.exceptions import DbtInternalError
|
||||
from dbt.common.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
StrEnum,
|
||||
ExtensibleDbtClassMixin,
|
||||
HyphenatedDbtClassMixin,
|
||||
ValidatedStringMixin,
|
||||
register_pattern,
|
||||
)
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.common.contracts.util import Replaceable
|
||||
from dbt.common.utils import md5
|
||||
|
||||
from dbt.common.events.functions import fire_event
|
||||
from dbt.adapters.events.types import NewConnectionOpening
|
||||
|
||||
# TODO: this is a very bad dependency - shared global state
|
||||
from dbt.common.events.contextvars import get_node_info
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
||||
|
||||
|
||||
# we need register_pattern for jsonschema validation
|
||||
register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$")
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(dbtClassMixin):
|
||||
_message: str
|
||||
@@ -55,7 +56,8 @@ class ConnectionState(StrEnum):
|
||||
|
||||
@dataclass(init=False)
|
||||
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||
type: Identifier
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
|
||||
name: Optional[str] = None
|
||||
state: ConnectionState = ConnectionState.INIT
|
||||
transaction_open: bool = False
|
||||
@@ -108,7 +110,7 @@ class LazyHandle:
|
||||
connection, updating the handle on the Connection.
|
||||
"""
|
||||
|
||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
||||
def __init__(self, opener: Callable[[Connection], Connection]) -> None:
|
||||
self.opener = opener
|
||||
|
||||
def resolve(self, connection: Connection) -> Connection:
|
||||
@@ -161,6 +163,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
# Need to fixup dbname => database, pass => password
|
||||
data = cls.translate_aliases(data)
|
||||
return data
|
||||
|
||||
@@ -220,10 +223,10 @@ DEFAULT_QUERY_COMMENT = """
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryComment(HyphenatedDbtClassMixin):
|
||||
class QueryComment(dbtClassMixin):
|
||||
comment: str = DEFAULT_QUERY_COMMENT
|
||||
append: bool = False
|
||||
job_label: bool = False
|
||||
job_label: bool = field(default=False, metadata={"alias": "job-label"})
|
||||
|
||||
|
||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
@@ -231,3 +234,4 @@ class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
query_comment: QueryComment
|
||||
cli_vars: Dict[str, Any]
|
||||
target_path: str
|
||||
log_cache_events: bool
|
||||
11
core/dbt/adapters/contracts/macros.py
Normal file
11
core/dbt/adapters/contracts/macros.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from typing import Optional
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from dbt.common.clients.jinja import MacroProtocol
|
||||
|
||||
|
||||
class MacroResolver(Protocol):
|
||||
def find_macro_by_name(
|
||||
self, name: str, root_project_name: str, package: Optional[str]
|
||||
) -> Optional[MacroProtocol]:
|
||||
raise NotImplementedError("find_macro_by_name not implemented")
|
||||
@@ -6,11 +6,11 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
from dbt.common.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.exceptions import CompilationError, DataclassNotDictError
|
||||
from dbt.utils import deep_merge
|
||||
from dbt.common.contracts.util import Replaceable
|
||||
from dbt.common.exceptions import CompilationError, DataclassNotDictError
|
||||
from dbt.common.utils import deep_merge
|
||||
|
||||
|
||||
class RelationType(StrEnum):
|
||||
@@ -19,6 +19,15 @@ class RelationType(StrEnum):
|
||||
CTE = "cte"
|
||||
MaterializedView = "materialized_view"
|
||||
External = "external"
|
||||
Ephemeral = "ephemeral"
|
||||
|
||||
|
||||
class RelationConfig(Protocol):
|
||||
name: str
|
||||
database: str
|
||||
schema: str
|
||||
identifier: str
|
||||
quoting_dict: Dict[str, bool]
|
||||
|
||||
|
||||
class ComponentName(StrEnum):
|
||||
57
core/dbt/adapters/events/README.md
Normal file
57
core/dbt/adapters/events/README.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Events Module
|
||||
The Events module is responsible for communicating internal dbt structures into a consumable interface. Because the "event" classes are based entirely on protobuf definitions, the interface is really clearly defined, whether or not protobufs are used to consume it. We use Betterproto for compiling the protobuf message definitions into Python classes.
|
||||
|
||||
# Using the Events Module
|
||||
The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `common.events.functions::fire_event` is the entry point to the module from everywhere in dbt.
|
||||
|
||||
# Logging
|
||||
When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions.
|
||||
|
||||
# Adding a New Event
|
||||
* Add a new message in types.proto, and a second message with the same name + "Msg". The "Msg" message should have two fields, an "info" field of EventInfo, and a "data" field referring to the message name without "Msg"
|
||||
* run the protoc compiler to update adapter_types_pb2.py: make adapter_proto_types
|
||||
* Add a wrapping class in core/dbt/adapters/event/types.py with a Level superclass plus code and message methods
|
||||
|
||||
We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto.
|
||||
|
||||
The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters.
|
||||
|
||||
## Required for Every Event
|
||||
|
||||
- a method `code`, that's unique across events
|
||||
- assign a log level by using the Level mixin: `DebugLevel`, `InfoLevel`, `WarnLevel`, or `ErrorLevel`
|
||||
- a message()
|
||||
|
||||
Example
|
||||
```
|
||||
class PartialParsingDeletedExposure(DebugLevel):
|
||||
def code(self):
|
||||
return "I049"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Partial parsing: deleted exposure {self.unique_id}"
|
||||
|
||||
```
|
||||
|
||||
|
||||
# Adapter Maintainers
|
||||
To integrate existing log messages from adapters, you likely have a line of code like this in your adapter already:
|
||||
```python
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
```
|
||||
|
||||
Simply change it to these two lines with your adapter's database name, and all your existing call sites will now use the new system for v1.0:
|
||||
|
||||
```python
|
||||
|
||||
from dbt.adapter.events.logging import AdapterLogger
|
||||
|
||||
logger = AdapterLogger("<database name>")
|
||||
# e.g. AdapterLogger("Snowflake")
|
||||
```
|
||||
|
||||
## Compiling types.proto
|
||||
|
||||
After adding a new message in `adapter_types.proto`, either:
|
||||
- In the repository root directory: `make adapter_proto_types`
|
||||
- In the `core/dbt/adapters/events` directory: `protoc -I=. --python_out=. types.proto`
|
||||
0
core/dbt/adapters/events/__init__.py
Normal file
0
core/dbt/adapters/events/__init__.py
Normal file
517
core/dbt/adapters/events/adapter_types.proto
Normal file
517
core/dbt/adapters/events/adapter_types.proto
Normal file
@@ -0,0 +1,517 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package proto_types;
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
// Common event info
|
||||
message AdapterCommonEventInfo {
|
||||
string name = 1;
|
||||
string code = 2;
|
||||
string msg = 3;
|
||||
string level = 4;
|
||||
string invocation_id = 5;
|
||||
int32 pid = 6;
|
||||
string thread = 7;
|
||||
google.protobuf.Timestamp ts = 8;
|
||||
map<string, string> extra = 9;
|
||||
string category = 10;
|
||||
}
|
||||
|
||||
// AdapterNodeRelation
|
||||
message AdapterNodeRelation {
|
||||
string database = 10;
|
||||
string schema = 11;
|
||||
string alias = 12;
|
||||
string relation_name = 13;
|
||||
}
|
||||
|
||||
// NodeInfo
|
||||
message AdapterNodeInfo {
|
||||
string node_path = 1;
|
||||
string node_name = 2;
|
||||
string unique_id = 3;
|
||||
string resource_type = 4;
|
||||
string materialized = 5;
|
||||
string node_status = 6;
|
||||
string node_started_at = 7;
|
||||
string node_finished_at = 8;
|
||||
google.protobuf.Struct meta = 9;
|
||||
AdapterNodeRelation node_relation = 10;
|
||||
}
|
||||
|
||||
// ReferenceKey
|
||||
message ReferenceKeyMsg {
|
||||
string database = 1;
|
||||
string schema = 2;
|
||||
string identifier = 3;
|
||||
}
|
||||
|
||||
// D - Deprecations
|
||||
|
||||
// D005
|
||||
message AdapterDeprecationWarning {
|
||||
string old_name = 1;
|
||||
string new_name = 2;
|
||||
}
|
||||
|
||||
message AdapterDeprecationWarningMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterDeprecationWarning data = 2;
|
||||
}
|
||||
|
||||
// D012
|
||||
message CollectFreshnessReturnSignature {
|
||||
}
|
||||
|
||||
message CollectFreshnessReturnSignatureMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CollectFreshnessReturnSignature data = 2;
|
||||
}
|
||||
|
||||
// E - DB Adapter
|
||||
|
||||
// E001
|
||||
message AdapterEventDebug {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
google.protobuf.ListValue args = 4;
|
||||
}
|
||||
|
||||
message AdapterEventDebugMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterEventDebug data = 2;
|
||||
}
|
||||
|
||||
// E002
|
||||
message AdapterEventInfo {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
google.protobuf.ListValue args = 4;
|
||||
}
|
||||
|
||||
message AdapterEventInfoMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterEventInfo data = 2;
|
||||
}
|
||||
|
||||
// E003
|
||||
message AdapterEventWarning {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
google.protobuf.ListValue args = 4;
|
||||
}
|
||||
|
||||
message AdapterEventWarningMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterEventWarning data = 2;
|
||||
}
|
||||
|
||||
// E004
|
||||
message AdapterEventError {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
google.protobuf.ListValue args = 4;
|
||||
string exc_info = 5;
|
||||
}
|
||||
|
||||
message AdapterEventErrorMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterEventError data = 2;
|
||||
}
|
||||
|
||||
// E005
|
||||
message NewConnection {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_type = 2;
|
||||
string conn_name = 3;
|
||||
}
|
||||
|
||||
message NewConnectionMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
NewConnection data = 2;
|
||||
}
|
||||
|
||||
// E006
|
||||
message ConnectionReused {
|
||||
string conn_name = 1;
|
||||
string orig_conn_name = 2;
|
||||
}
|
||||
|
||||
message ConnectionReusedMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConnectionReused data = 2;
|
||||
}
|
||||
|
||||
// E007
|
||||
message ConnectionLeftOpenInCleanup {
|
||||
string conn_name = 1;
|
||||
}
|
||||
|
||||
message ConnectionLeftOpenInCleanupMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConnectionLeftOpenInCleanup data = 2;
|
||||
}
|
||||
|
||||
// E008
|
||||
message ConnectionClosedInCleanup {
|
||||
string conn_name = 1;
|
||||
}
|
||||
|
||||
message ConnectionClosedInCleanupMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConnectionClosedInCleanup data = 2;
|
||||
}
|
||||
|
||||
// E009
|
||||
message RollbackFailed {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_name = 2;
|
||||
string exc_info = 3;
|
||||
}
|
||||
|
||||
message RollbackFailedMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
RollbackFailed data = 2;
|
||||
}
|
||||
|
||||
// E010
|
||||
message ConnectionClosed {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
|
||||
message ConnectionClosedMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConnectionClosed data = 2;
|
||||
}
|
||||
|
||||
// E011
|
||||
message ConnectionLeftOpen {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
|
||||
message ConnectionLeftOpenMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConnectionLeftOpen data = 2;
|
||||
}
|
||||
|
||||
// E012
|
||||
message Rollback {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
|
||||
message RollbackMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
Rollback data = 2;
|
||||
}
|
||||
|
||||
// E013
|
||||
message CacheMiss {
|
||||
string conn_name = 1;
|
||||
string database = 2;
|
||||
string schema = 3;
|
||||
}
|
||||
|
||||
message CacheMissMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CacheMiss data = 2;
|
||||
}
|
||||
|
||||
// E014
|
||||
message ListRelations {
|
||||
string database = 1;
|
||||
string schema = 2;
|
||||
repeated ReferenceKeyMsg relations = 3;
|
||||
}
|
||||
|
||||
message ListRelationsMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ListRelations data = 2;
|
||||
}
|
||||
|
||||
// E015
|
||||
message ConnectionUsed {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_type = 2;
|
||||
string conn_name = 3;
|
||||
}
|
||||
|
||||
message ConnectionUsedMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConnectionUsed data = 2;
|
||||
}
|
||||
|
||||
// E016
|
||||
message SQLQuery {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_name = 2;
|
||||
string sql = 3;
|
||||
}
|
||||
|
||||
message SQLQueryMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
SQLQuery data = 2;
|
||||
}
|
||||
|
||||
// E017
|
||||
message SQLQueryStatus {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string status = 2;
|
||||
float elapsed = 3;
|
||||
}
|
||||
|
||||
message SQLQueryStatusMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
SQLQueryStatus data = 2;
|
||||
}
|
||||
|
||||
// E018
|
||||
message SQLCommit {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
|
||||
message SQLCommitMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
SQLCommit data = 2;
|
||||
}
|
||||
|
||||
// E019
|
||||
message ColTypeChange {
|
||||
string orig_type = 1;
|
||||
string new_type = 2;
|
||||
ReferenceKeyMsg table = 3;
|
||||
}
|
||||
|
||||
message ColTypeChangeMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ColTypeChange data = 2;
|
||||
}
|
||||
|
||||
// E020
|
||||
message SchemaCreation {
|
||||
ReferenceKeyMsg relation = 1;
|
||||
}
|
||||
|
||||
message SchemaCreationMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
SchemaCreation data = 2;
|
||||
}
|
||||
|
||||
// E021
|
||||
message SchemaDrop {
|
||||
ReferenceKeyMsg relation = 1;
|
||||
}
|
||||
|
||||
message SchemaDropMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
SchemaDrop data = 2;
|
||||
}
|
||||
|
||||
// E022
|
||||
message CacheAction {
|
||||
string action = 1;
|
||||
ReferenceKeyMsg ref_key = 2;
|
||||
ReferenceKeyMsg ref_key_2 = 3;
|
||||
ReferenceKeyMsg ref_key_3 = 4;
|
||||
repeated ReferenceKeyMsg ref_list = 5;
|
||||
}
|
||||
|
||||
message CacheActionMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CacheAction data = 2;
|
||||
}
|
||||
|
||||
// Skipping E023, E024, E025, E026, E027, E028, E029, E0230
|
||||
|
||||
// E031
|
||||
message CacheDumpGraph {
|
||||
map<string, string> dump = 1;
|
||||
string before_after = 2;
|
||||
string action = 3;
|
||||
}
|
||||
|
||||
message CacheDumpGraphMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CacheDumpGraph data = 2;
|
||||
}
|
||||
|
||||
|
||||
// Skipping E032, E033, E034
|
||||
|
||||
|
||||
|
||||
// E034
|
||||
message AdapterRegistered {
|
||||
string adapter_name = 1;
|
||||
string adapter_version = 2;
|
||||
}
|
||||
|
||||
message AdapterRegisteredMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterRegistered data = 2;
|
||||
}
|
||||
|
||||
// E035
|
||||
message AdapterImportError {
|
||||
string exc = 1;
|
||||
}
|
||||
|
||||
message AdapterImportErrorMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
AdapterImportError data = 2;
|
||||
}
|
||||
|
||||
// E036
|
||||
message PluginLoadError {
|
||||
string exc_info = 1;
|
||||
}
|
||||
|
||||
message PluginLoadErrorMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
PluginLoadError data = 2;
|
||||
}
|
||||
|
||||
// E037
|
||||
message NewConnectionOpening {
|
||||
AdapterNodeInfo node_info = 1;
|
||||
string connection_state = 2;
|
||||
}
|
||||
|
||||
message NewConnectionOpeningMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
NewConnectionOpening data = 2;
|
||||
}
|
||||
|
||||
// E038
|
||||
message CodeExecution {
|
||||
string conn_name = 1;
|
||||
string code_content = 2;
|
||||
}
|
||||
|
||||
message CodeExecutionMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CodeExecution data = 2;
|
||||
}
|
||||
|
||||
// E039
|
||||
message CodeExecutionStatus {
|
||||
string status = 1;
|
||||
float elapsed = 2;
|
||||
}
|
||||
|
||||
message CodeExecutionStatusMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CodeExecutionStatus data = 2;
|
||||
}
|
||||
|
||||
// E040
|
||||
message CatalogGenerationError {
|
||||
string exc = 1;
|
||||
}
|
||||
|
||||
message CatalogGenerationErrorMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CatalogGenerationError data = 2;
|
||||
}
|
||||
|
||||
// E041
|
||||
message WriteCatalogFailure {
|
||||
int32 num_exceptions = 1;
|
||||
}
|
||||
|
||||
message WriteCatalogFailureMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
WriteCatalogFailure data = 2;
|
||||
}
|
||||
|
||||
// E042
|
||||
message CatalogWritten {
|
||||
string path = 1;
|
||||
}
|
||||
|
||||
message CatalogWrittenMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CatalogWritten data = 2;
|
||||
}
|
||||
|
||||
// E043
|
||||
message CannotGenerateDocs {
|
||||
}
|
||||
|
||||
message CannotGenerateDocsMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
CannotGenerateDocs data = 2;
|
||||
}
|
||||
|
||||
// E044
|
||||
message BuildingCatalog {
|
||||
}
|
||||
|
||||
message BuildingCatalogMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
BuildingCatalog data = 2;
|
||||
}
|
||||
|
||||
// E045
|
||||
message DatabaseErrorRunningHook {
|
||||
string hook_type = 1;
|
||||
}
|
||||
|
||||
message DatabaseErrorRunningHookMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
DatabaseErrorRunningHook data = 2;
|
||||
}
|
||||
|
||||
// E046
|
||||
message HooksRunning {
|
||||
int32 num_hooks = 1;
|
||||
string hook_type = 2;
|
||||
}
|
||||
|
||||
message HooksRunningMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
HooksRunning data = 2;
|
||||
}
|
||||
|
||||
// E047
|
||||
message FinishedRunningStats {
|
||||
string stat_line = 1;
|
||||
string execution = 2;
|
||||
float execution_time = 3;
|
||||
}
|
||||
|
||||
message FinishedRunningStatsMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
FinishedRunningStats data = 2;
|
||||
}
|
||||
|
||||
// E048
|
||||
message ConstraintNotEnforced {
|
||||
string constraint = 1;
|
||||
string adapter = 2;
|
||||
}
|
||||
|
||||
message ConstraintNotEnforcedMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConstraintNotEnforced data = 2;
|
||||
}
|
||||
|
||||
// E049
|
||||
message ConstraintNotSupported {
|
||||
string constraint = 1;
|
||||
string adapter = 2;
|
||||
}
|
||||
|
||||
message ConstraintNotSupportedMsg {
|
||||
AdapterCommonEventInfo info = 1;
|
||||
ConstraintNotSupported data = 2;
|
||||
}
|
||||
209
core/dbt/adapters/events/adapter_types_pb2.py
Normal file
209
core/dbt/adapters/events/adapter_types_pb2.py
Normal file
File diff suppressed because one or more lines are too long
39
core/dbt/adapters/events/base_types.py
Normal file
39
core/dbt/adapters/events/base_types.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Aliasing common Level classes in order to make custom, but not overly-verbose versions that have PROTO_TYPES_MODULE set to the adapter-specific generated types_pb2 module
|
||||
from dbt.common.events.base_types import (
|
||||
BaseEvent,
|
||||
DynamicLevel as CommonDyanicLevel,
|
||||
TestLevel as CommonTestLevel,
|
||||
DebugLevel as CommonDebugLevel,
|
||||
InfoLevel as CommonInfoLevel,
|
||||
WarnLevel as CommonWarnLevel,
|
||||
ErrorLevel as CommonErrorLevel,
|
||||
)
|
||||
from dbt.adapters.events import adapter_types_pb2
|
||||
|
||||
|
||||
class AdapterBaseEvent(BaseEvent):
|
||||
PROTO_TYPES_MODULE = adapter_types_pb2
|
||||
|
||||
|
||||
class DynamicLevel(CommonDyanicLevel, AdapterBaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
class TestLevel(CommonTestLevel, AdapterBaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
class DebugLevel(CommonDebugLevel, AdapterBaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InfoLevel(CommonInfoLevel, AdapterBaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
class WarnLevel(CommonWarnLevel, AdapterBaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
class ErrorLevel(CommonErrorLevel, AdapterBaseEvent):
|
||||
pass
|
||||
@@ -1,46 +1,48 @@
|
||||
import traceback
|
||||
from dataclasses import dataclass
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.events.types import (
|
||||
|
||||
from dbt.adapters.events.types import (
|
||||
AdapterEventDebug,
|
||||
AdapterEventInfo,
|
||||
AdapterEventWarning,
|
||||
AdapterEventError,
|
||||
)
|
||||
from dbt.common.events import get_event_manager
|
||||
from dbt.common.events.contextvars import get_node_info
|
||||
from dbt.common.events.event_handler import set_package_logging
|
||||
from dbt.common.events.functions import fire_event
|
||||
|
||||
|
||||
# N.B. No guarantees for what type param msg is.
|
||||
@dataclass
|
||||
class AdapterLogger:
|
||||
name: str
|
||||
|
||||
def debug(self, msg, *args):
|
||||
def debug(self, msg, *args) -> None:
|
||||
event = AdapterEventDebug(
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def info(self, msg, *args):
|
||||
def info(self, msg, *args) -> None:
|
||||
event = AdapterEventInfo(
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def warning(self, msg, *args):
|
||||
def warning(self, msg, *args) -> None:
|
||||
event = AdapterEventWarning(
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def error(self, msg, *args):
|
||||
def error(self, msg, *args) -> None:
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
# The default exc_info=True is what makes this method different
|
||||
def exception(self, msg, *args):
|
||||
def exception(self, msg, *args) -> None:
|
||||
exc_info = str(traceback.format_exc())
|
||||
event = AdapterEventError(
|
||||
name=self.name,
|
||||
@@ -51,8 +53,15 @@ class AdapterLogger:
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def critical(self, msg, *args):
|
||||
def critical(self, msg, *args) -> None:
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
@staticmethod
|
||||
def set_adapter_dependency_log_level(package_name, level):
|
||||
"""By default, dbt suppresses non-dbt package logs. This method allows
|
||||
you to set the log level for a specific package.
|
||||
"""
|
||||
set_package_logging(package_name, level, get_event_manager())
|
||||
417
core/dbt/adapters/events/types.py
Normal file
417
core/dbt/adapters/events/types.py
Normal file
@@ -0,0 +1,417 @@
|
||||
from dbt.adapters.events.base_types import WarnLevel, InfoLevel, ErrorLevel, DebugLevel
|
||||
from dbt.common.ui import line_wrap_message, warning_tag
|
||||
|
||||
|
||||
def format_adapter_message(name, base_msg, args) -> str:
|
||||
# only apply formatting if there are arguments to format.
|
||||
# avoids issues like "dict: {k: v}".format() which results in `KeyError 'k'`
|
||||
msg = base_msg if len(args) == 0 else base_msg.format(*args)
|
||||
return f"{name} adapter: {msg}"
|
||||
|
||||
|
||||
# =======================================================
|
||||
# D - Deprecations
|
||||
# =======================================================
|
||||
|
||||
|
||||
class CollectFreshnessReturnSignature(WarnLevel):
|
||||
def code(self) -> str:
|
||||
return "D012"
|
||||
|
||||
def message(self) -> str:
|
||||
description = (
|
||||
"The 'collect_freshness' macro signature has changed to return the full "
|
||||
"query result, rather than just a table of values. See the v1.5 migration guide "
|
||||
"for details on how to update your custom macro: https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.5"
|
||||
)
|
||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
||||
|
||||
|
||||
class AdapterDeprecationWarning(WarnLevel):
|
||||
def code(self) -> str:
|
||||
return "D005"
|
||||
|
||||
def message(self) -> str:
|
||||
description = (
|
||||
f"The adapter function `adapter.{self.old_name}` is deprecated and will be removed in "
|
||||
f"a future release of dbt. Please use `adapter.{self.new_name}` instead. "
|
||||
f"\n\nDocumentation for {self.new_name} can be found here:"
|
||||
f"\n\nhttps://docs.getdbt.com/docs/adapter"
|
||||
)
|
||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
||||
|
||||
|
||||
# =======================================================
|
||||
# E - DB Adapter
|
||||
# =======================================================
|
||||
|
||||
|
||||
class AdapterEventDebug(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E001"
|
||||
|
||||
def message(self) -> str:
|
||||
return format_adapter_message(self.name, self.base_msg, self.args)
|
||||
|
||||
|
||||
class AdapterEventInfo(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E002"
|
||||
|
||||
def message(self) -> str:
|
||||
return format_adapter_message(self.name, self.base_msg, self.args)
|
||||
|
||||
|
||||
class AdapterEventWarning(WarnLevel):
|
||||
def code(self) -> str:
|
||||
return "E003"
|
||||
|
||||
def message(self) -> str:
|
||||
return format_adapter_message(self.name, self.base_msg, self.args)
|
||||
|
||||
|
||||
class AdapterEventError(ErrorLevel):
|
||||
def code(self) -> str:
|
||||
return "E004"
|
||||
|
||||
def message(self) -> str:
|
||||
return format_adapter_message(self.name, self.base_msg, self.args)
|
||||
|
||||
|
||||
class NewConnection(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E005"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Acquiring new {self.conn_type} connection '{self.conn_name}'"
|
||||
|
||||
|
||||
class ConnectionReused(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E006"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Re-using an available connection from the pool (formerly {self.orig_conn_name}, now {self.conn_name})"
|
||||
|
||||
|
||||
class ConnectionLeftOpenInCleanup(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E007"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Connection '{self.conn_name}' was left open."
|
||||
|
||||
|
||||
class ConnectionClosedInCleanup(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E008"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Connection '{self.conn_name}' was properly closed."
|
||||
|
||||
|
||||
class RollbackFailed(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E009"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Failed to rollback '{self.conn_name}'"
|
||||
|
||||
|
||||
class ConnectionClosed(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E010"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"On {self.conn_name}: Close"
|
||||
|
||||
|
||||
class ConnectionLeftOpen(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E011"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"On {self.conn_name}: No close available on handle"
|
||||
|
||||
|
||||
class Rollback(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E012"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"On {self.conn_name}: ROLLBACK"
|
||||
|
||||
|
||||
class CacheMiss(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E013"
|
||||
|
||||
def message(self) -> str:
|
||||
return (
|
||||
f'On "{self.conn_name}": cache miss for schema '
|
||||
f'"{self.database}.{self.schema}", this is inefficient'
|
||||
)
|
||||
|
||||
|
||||
class ListRelations(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E014"
|
||||
|
||||
def message(self) -> str:
|
||||
identifiers_str = ", ".join(r.identifier for r in self.relations)
|
||||
return f"While listing relations in database={self.database}, schema={self.schema}, found: {identifiers_str}"
|
||||
|
||||
|
||||
class ConnectionUsed(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E015"
|
||||
|
||||
def message(self) -> str:
|
||||
return f'Using {self.conn_type} connection "{self.conn_name}"'
|
||||
|
||||
|
||||
class SQLQuery(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E016"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"On {self.conn_name}: {self.sql}"
|
||||
|
||||
|
||||
class SQLQueryStatus(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E017"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"SQL status: {self.status} in {self.elapsed} seconds"
|
||||
|
||||
|
||||
class SQLCommit(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E018"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"On {self.conn_name}: COMMIT"
|
||||
|
||||
|
||||
class ColTypeChange(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E019"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Changing col type from {self.orig_type} to {self.new_type} in table {self.table}"
|
||||
|
||||
|
||||
class SchemaCreation(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E020"
|
||||
|
||||
def message(self) -> str:
|
||||
return f'Creating schema "{self.relation}"'
|
||||
|
||||
|
||||
class SchemaDrop(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E021"
|
||||
|
||||
def message(self) -> str:
|
||||
return f'Dropping schema "{self.relation}".'
|
||||
|
||||
|
||||
class CacheAction(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E022"
|
||||
|
||||
def format_ref_key(self, ref_key) -> str:
|
||||
return f"(database={ref_key.database}, schema={ref_key.schema}, identifier={ref_key.identifier})"
|
||||
|
||||
def message(self) -> str:
|
||||
ref_key = self.format_ref_key(self.ref_key)
|
||||
ref_key_2 = self.format_ref_key(self.ref_key_2)
|
||||
ref_key_3 = self.format_ref_key(self.ref_key_3)
|
||||
ref_list = []
|
||||
for rfk in self.ref_list:
|
||||
ref_list.append(self.format_ref_key(rfk))
|
||||
if self.action == "add_link":
|
||||
return f"adding link, {ref_key} references {ref_key_2}"
|
||||
elif self.action == "add_relation":
|
||||
return f"adding relation: {ref_key}"
|
||||
elif self.action == "drop_missing_relation":
|
||||
return f"dropped a nonexistent relationship: {ref_key}"
|
||||
elif self.action == "drop_cascade":
|
||||
return f"drop {ref_key} is cascading to {ref_list}"
|
||||
elif self.action == "drop_relation":
|
||||
return f"Dropping relation: {ref_key}"
|
||||
elif self.action == "update_reference":
|
||||
return (
|
||||
f"updated reference from {ref_key} -> {ref_key_3} to "
|
||||
f"{ref_key_2} -> {ref_key_3}"
|
||||
)
|
||||
elif self.action == "temporary_relation":
|
||||
return f"old key {ref_key} not found in self.relations, assuming temporary"
|
||||
elif self.action == "rename_relation":
|
||||
return f"Renaming relation {ref_key} to {ref_key_2}"
|
||||
elif self.action == "uncached_relation":
|
||||
return (
|
||||
f"{ref_key_2} references {ref_key} "
|
||||
f"but {self.ref_key.database}.{self.ref_key.schema}"
|
||||
"is not in the cache, skipping assumed external relation"
|
||||
)
|
||||
else:
|
||||
return ref_key
|
||||
|
||||
|
||||
# Skipping E023, E024, E025, E026, E027, E028, E029, E030
|
||||
|
||||
|
||||
class CacheDumpGraph(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E031"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"dump {self.before_after} {self.action} : {self.dump}"
|
||||
|
||||
|
||||
# Skipping E032, E033, E034
|
||||
|
||||
|
||||
class AdapterRegistered(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E034"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Registered adapter: {self.adapter_name}{self.adapter_version}"
|
||||
|
||||
|
||||
class AdapterImportError(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E035"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Error importing adapter: {self.exc}"
|
||||
|
||||
|
||||
class PluginLoadError(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E036"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"{self.exc_info}"
|
||||
|
||||
|
||||
class NewConnectionOpening(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E037"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Opening a new connection, currently in state {self.connection_state}"
|
||||
|
||||
|
||||
class CodeExecution(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E038"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"On {self.conn_name}: {self.code_content}"
|
||||
|
||||
|
||||
class CodeExecutionStatus(DebugLevel):
|
||||
def code(self) -> str:
|
||||
return "E039"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Execution status: {self.status} in {self.elapsed} seconds"
|
||||
|
||||
|
||||
class CatalogGenerationError(WarnLevel):
|
||||
def code(self) -> str:
|
||||
return "E040"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Encountered an error while generating catalog: {self.exc}"
|
||||
|
||||
|
||||
class WriteCatalogFailure(ErrorLevel):
|
||||
def code(self) -> str:
|
||||
return "E041"
|
||||
|
||||
def message(self) -> str:
|
||||
return (
|
||||
f"dbt encountered {self.num_exceptions} failure{(self.num_exceptions != 1) * 's'} "
|
||||
"while writing the catalog"
|
||||
)
|
||||
|
||||
|
||||
class CatalogWritten(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E042"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Catalog written to {self.path}"
|
||||
|
||||
|
||||
class CannotGenerateDocs(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E043"
|
||||
|
||||
def message(self) -> str:
|
||||
return "compile failed, cannot generate docs"
|
||||
|
||||
|
||||
class BuildingCatalog(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E044"
|
||||
|
||||
def message(self) -> str:
|
||||
return "Building catalog"
|
||||
|
||||
|
||||
class DatabaseErrorRunningHook(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E045"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Database error while running {self.hook_type}"
|
||||
|
||||
|
||||
class HooksRunning(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E046"
|
||||
|
||||
def message(self) -> str:
|
||||
plural = "hook" if self.num_hooks == 1 else "hooks"
|
||||
return f"Running {self.num_hooks} {self.hook_type} {plural}"
|
||||
|
||||
|
||||
class FinishedRunningStats(InfoLevel):
|
||||
def code(self) -> str:
|
||||
return "E047"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Finished running {self.stat_line}{self.execution} ({self.execution_time:0.2f}s)."
|
||||
|
||||
|
||||
class ConstraintNotEnforced(WarnLevel):
|
||||
def code(self) -> str:
|
||||
return "E048"
|
||||
|
||||
def message(self) -> str:
|
||||
msg = (
|
||||
f"The constraint type {self.constraint} is not enforced by {self.adapter}. "
|
||||
"The constraint will be included in this model's DDL statement, but it will not "
|
||||
"guarantee anything about the underlying data. Set 'warn_unenforced: false' on "
|
||||
"this constraint to ignore this warning."
|
||||
)
|
||||
return line_wrap_message(warning_tag(msg))
|
||||
|
||||
|
||||
class ConstraintNotSupported(WarnLevel):
|
||||
def code(self) -> str:
|
||||
return "E049"
|
||||
|
||||
def message(self) -> str:
|
||||
msg = (
|
||||
f"The constraint type {self.constraint} is not supported by {self.adapter}, and will "
|
||||
"be ignored. Set 'warn_unsupported: false' on this constraint to ignore this warning."
|
||||
)
|
||||
return line_wrap_message(warning_tag(msg))
|
||||
4
core/dbt/adapters/exceptions/__init__.py
Normal file
4
core/dbt/adapters/exceptions/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from dbt.adapters.exceptions.compilation import * # noqa
|
||||
from dbt.adapters.exceptions.alias import * # noqa
|
||||
from dbt.adapters.exceptions.database import * # noqa
|
||||
from dbt.adapters.exceptions.connection import * # noqa
|
||||
24
core/dbt/adapters/exceptions/alias.py
Normal file
24
core/dbt/adapters/exceptions/alias.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from typing import Mapping, Any
|
||||
|
||||
from dbt.common.exceptions import DbtValidationError
|
||||
|
||||
|
||||
class AliasError(DbtValidationError):
|
||||
pass
|
||||
|
||||
|
||||
# core level exceptions
|
||||
class DuplicateAliasError(AliasError):
|
||||
def __init__(self, kwargs: Mapping[str, Any], aliases: Mapping[str, str], canonical_key: str):
|
||||
self.kwargs = kwargs
|
||||
self.aliases = aliases
|
||||
self.canonical_key = canonical_key
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
# dupe found: go through the dict so we can have a nice-ish error
|
||||
key_names = ", ".join(
|
||||
"{}".format(k) for k in self.kwargs if self.aliases.get(k) == self.canonical_key
|
||||
)
|
||||
msg = f'Got duplicate keys: ({key_names}) all map to "{self.canonical_key}"'
|
||||
return msg
|
||||
255
core/dbt/adapters/exceptions/compilation.py
Normal file
255
core/dbt/adapters/exceptions/compilation.py
Normal file
@@ -0,0 +1,255 @@
|
||||
from typing import List, Mapping, Any
|
||||
|
||||
from dbt.common.exceptions import CompilationError, DbtDatabaseError
|
||||
from dbt.common.ui import line_wrap_message
|
||||
|
||||
|
||||
class MissingConfigError(CompilationError):
|
||||
def __init__(self, unique_id: str, name: str):
|
||||
self.unique_id = unique_id
|
||||
self.name = name
|
||||
msg = (
|
||||
f"Model '{self.unique_id}' does not define a required config parameter '{self.name}'."
|
||||
)
|
||||
super().__init__(msg=msg)
|
||||
|
||||
|
||||
class MultipleDatabasesNotAllowedError(CompilationError):
|
||||
def __init__(self, databases):
|
||||
self.databases = databases
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = str(self.databases)
|
||||
return msg
|
||||
|
||||
|
||||
class ApproximateMatchError(CompilationError):
|
||||
def __init__(self, target, relation):
|
||||
self.target = target
|
||||
self.relation = relation
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = (
|
||||
"When searching for a relation, dbt found an approximate match. "
|
||||
"Instead of guessing \nwhich relation to use, dbt will move on. "
|
||||
f"Please delete {self.relation}, or rename it to be less ambiguous."
|
||||
f"\nSearched for: {self.target}\nFound: {self.relation}"
|
||||
)
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
class SnapshotTargetIncompleteError(CompilationError):
|
||||
def __init__(self, extra: List, missing: List):
|
||||
self.extra = extra
|
||||
self.missing = missing
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = (
|
||||
'Snapshot target has ("{}") but not ("{}") - is it an '
|
||||
"unmigrated previous version archive?".format(
|
||||
'", "'.join(self.extra), '", "'.join(self.missing)
|
||||
)
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class DuplicateMacroInPackageError(CompilationError):
|
||||
def __init__(self, macro, macro_mapping: Mapping):
|
||||
self.macro = macro
|
||||
self.macro_mapping = macro_mapping
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
other_path = self.macro_mapping[self.macro.unique_id].original_file_path
|
||||
# subtract 2 for the "Compilation Error" indent
|
||||
# note that the line wrap eats newlines, so if you want newlines,
|
||||
# this is the result :(
|
||||
msg = line_wrap_message(
|
||||
f"""\
|
||||
dbt found two macros named "{self.macro.name}" in the project
|
||||
"{self.macro.package_name}".
|
||||
|
||||
|
||||
To fix this error, rename or remove one of the following
|
||||
macros:
|
||||
|
||||
- {self.macro.original_file_path}
|
||||
|
||||
- {other_path}
|
||||
""",
|
||||
subtract=2,
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class DuplicateMaterializationNameError(CompilationError):
|
||||
def __init__(self, macro, other_macro):
|
||||
self.macro = macro
|
||||
self.other_macro = other_macro
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
macro_name = self.macro.name
|
||||
macro_package_name = self.macro.package_name
|
||||
other_package_name = self.other_macro.macro.package_name
|
||||
|
||||
msg = (
|
||||
f"Found two materializations with the name {macro_name} (packages "
|
||||
f"{macro_package_name} and {other_package_name}). dbt cannot resolve "
|
||||
"this ambiguity"
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class ColumnTypeMissingError(CompilationError):
|
||||
def __init__(self, column_names: List):
|
||||
self.column_names = column_names
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = (
|
||||
"Contracted models require data_type to be defined for each column. "
|
||||
"Please ensure that the column name and data_type are defined within "
|
||||
f"the YAML configuration for the {self.column_names} column(s)."
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class MacroNotFoundError(CompilationError):
|
||||
def __init__(self, node, target_macro_id: str):
|
||||
self.node = node
|
||||
self.target_macro_id = target_macro_id
|
||||
msg = f"'{self.node.unique_id}' references macro '{self.target_macro_id}' which is not defined!"
|
||||
|
||||
super().__init__(msg=msg)
|
||||
|
||||
|
||||
class MissingMaterializationError(CompilationError):
|
||||
def __init__(self, materialization, adapter_type):
|
||||
self.materialization = materialization
|
||||
self.adapter_type = adapter_type
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
valid_types = "'default'"
|
||||
|
||||
if self.adapter_type != "default":
|
||||
valid_types = f"'default' and '{self.adapter_type}'"
|
||||
|
||||
msg = f"No materialization '{self.materialization}' was found for adapter {self.adapter_type}! (searched types {valid_types})"
|
||||
return msg
|
||||
|
||||
|
||||
class SnapshotTargetNotSnapshotTableError(CompilationError):
|
||||
def __init__(self, missing: List):
|
||||
self.missing = missing
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
|
||||
'", "'.join(self.missing)
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class NullRelationDropAttemptedError(CompilationError):
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.msg = f"Attempted to drop a null relation for {self.name}"
|
||||
super().__init__(msg=self.msg)
|
||||
|
||||
|
||||
class NullRelationCacheAttemptedError(CompilationError):
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.msg = f"Attempted to cache a null relation for {self.name}"
|
||||
super().__init__(msg=self.msg)
|
||||
|
||||
|
||||
class RelationTypeNullError(CompilationError):
|
||||
def __init__(self, relation):
|
||||
self.relation = relation
|
||||
self.msg = f"Tried to drop relation {self.relation}, but its type is null."
|
||||
super().__init__(msg=self.msg)
|
||||
|
||||
|
||||
class MaterializationNotAvailableError(CompilationError):
|
||||
def __init__(self, materialization, adapter_type: str):
|
||||
self.materialization = materialization
|
||||
self.adapter_type = adapter_type
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = f"Materialization '{self.materialization}' is not available for {self.adapter_type}!"
|
||||
return msg
|
||||
|
||||
|
||||
class RelationReturnedMultipleResultsError(CompilationError):
|
||||
def __init__(self, kwargs: Mapping[str, Any], matches: List):
|
||||
self.kwargs = kwargs
|
||||
self.matches = matches
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = (
|
||||
"get_relation returned more than one relation with the given args. "
|
||||
"Please specify a database or schema to narrow down the result set."
|
||||
f"\n{self.kwargs}\n\n{self.matches}"
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class UnexpectedNonTimestampError(DbtDatabaseError):
|
||||
def __init__(self, field_name: str, source, dt: Any):
|
||||
self.field_name = field_name
|
||||
self.source = source
|
||||
self.type_name = type(dt).__name__
|
||||
msg = (
|
||||
f"Expected a timestamp value when querying field '{self.field_name}' of table "
|
||||
f"{self.source} but received value of type '{self.type_name}' instead"
|
||||
)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class RenameToNoneAttemptedError(CompilationError):
|
||||
def __init__(self, src_name: str, dst_name: str, name: str):
|
||||
self.src_name = src_name
|
||||
self.dst_name = dst_name
|
||||
self.name = name
|
||||
self.msg = f"Attempted to rename {self.src_name} to {self.dst_name} for {self.name}"
|
||||
super().__init__(msg=self.msg)
|
||||
|
||||
|
||||
class QuoteConfigTypeError(CompilationError):
|
||||
def __init__(self, quote_config: Any):
|
||||
self.quote_config = quote_config
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = (
|
||||
'The seed configuration value of "quote_columns" has an '
|
||||
f"invalid type {type(self.quote_config)}"
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class RelationWrongTypeError(CompilationError):
|
||||
def __init__(self, relation, expected_type, model=None):
|
||||
self.relation = relation
|
||||
self.expected_type = expected_type
|
||||
self.model = model
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = (
|
||||
f"Trying to create {self.expected_type} {self.relation}, "
|
||||
f"but it currently exists as a {self.relation.type}. Either "
|
||||
f"drop {self.relation} manually, or run dbt with "
|
||||
"`--full-refresh` and dbt will drop it for you."
|
||||
)
|
||||
|
||||
return msg
|
||||
16
core/dbt/adapters/exceptions/connection.py
Normal file
16
core/dbt/adapters/exceptions/connection.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from typing import List
|
||||
|
||||
from dbt.common.exceptions import DbtRuntimeError, DbtDatabaseError
|
||||
|
||||
|
||||
class InvalidConnectionError(DbtRuntimeError):
|
||||
def __init__(self, thread_id, known: List) -> None:
|
||||
self.thread_id = thread_id
|
||||
self.known = known
|
||||
super().__init__(
|
||||
msg=f"connection never acquired for thread {self.thread_id}, have {self.known}"
|
||||
)
|
||||
|
||||
|
||||
class FailedToConnectError(DbtDatabaseError):
|
||||
pass
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user