mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
263 Commits
arky/add-p
...
relation-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03229d65d8 | ||
|
|
48a0f8025d | ||
|
|
ba53f053fd | ||
|
|
b8de881ed3 | ||
|
|
8c96285650 | ||
|
|
d7d5e2335c | ||
|
|
160d0db238 | ||
|
|
2cee8652a6 | ||
|
|
7f777f8a42 | ||
|
|
00f49206e9 | ||
|
|
1bca662883 | ||
|
|
41ac915949 | ||
|
|
373125ecb8 | ||
|
|
294ad82e50 | ||
|
|
12bd1e87fb | ||
|
|
8bad75c65b | ||
|
|
c836b7585e | ||
|
|
220f56d8d2 | ||
|
|
32fde75504 | ||
|
|
615ad1fe2d | ||
|
|
81236a3dca | ||
|
|
6d834a18ed | ||
|
|
2ab0f7b26b | ||
|
|
9bb970e6ef | ||
|
|
e56a5dae8b | ||
|
|
1c9cec1787 | ||
|
|
4d02ef637b | ||
|
|
19f027b7a7 | ||
|
|
1d0a3e92c8 | ||
|
|
ab90c777d0 | ||
|
|
3902137dfc | ||
|
|
0131feac68 | ||
|
|
017faf4bd1 | ||
|
|
c2f7d75e9e | ||
|
|
51b94b26cc | ||
|
|
e24f9b3da7 | ||
|
|
b58e8e3ffc | ||
|
|
f45b013321 | ||
|
|
e547c0ec64 | ||
|
|
6871fc46b5 | ||
|
|
931b2dbe40 | ||
|
|
bb35b3eb87 | ||
|
|
01d481bc8d | ||
|
|
46b9a1d621 | ||
|
|
839c720e91 | ||
|
|
d88c6987a2 | ||
|
|
4ee950427a | ||
|
|
6c1822f186 | ||
|
|
c7c3ac872c | ||
|
|
c4ff280436 | ||
|
|
7fddd6e448 | ||
|
|
1260782bd2 | ||
|
|
333120b111 | ||
|
|
bb21403c9e | ||
|
|
ac972948b8 | ||
|
|
211392c4a4 | ||
|
|
7317de23a3 | ||
|
|
af916666a2 | ||
|
|
7de8930d1d | ||
|
|
200bcdcd9f | ||
|
|
b9a603e3aa | ||
|
|
1a825484fb | ||
|
|
a2a7b7d795 | ||
|
|
4122f6c308 | ||
|
|
6aeebc4c76 | ||
|
|
f44d704801 | ||
|
|
dbd02e54c2 | ||
|
|
a89642a6f9 | ||
|
|
c141148616 | ||
|
|
469a9aca06 | ||
|
|
98310b6612 | ||
|
|
ef9d6a870f | ||
|
|
35f46dac8c | ||
|
|
efa6339e18 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
26c7675c28 | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
b39eeb328c | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.8.0a1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
kind: Breaking Changes
|
||||
body: Remove adapter.get_compiler interface
|
||||
time: 2023-11-27T11:47:57.443202-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9148"
|
||||
@@ -0,0 +1,6 @@
|
||||
kind: Breaking Changes
|
||||
body: Move AdapterLogger to adapters folder
|
||||
time: 2023-11-28T13:43:56.853925-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9151"
|
||||
@@ -0,0 +1,7 @@
|
||||
kind: Breaking Changes
|
||||
body: move event manager setup back to core, remove ref to global EVENT_MANAGER and
|
||||
clean up event manager functions
|
||||
time: 2023-11-30T13:53:48.645192-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9150"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Begin using DSI 0.4.x
|
||||
time: 2023-10-31T13:19:54.750009-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
PR: "8892"
|
||||
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update typing-extensions version to >=4.4
|
||||
time: 2023-11-06T13:00:51.062386-08:00
|
||||
custom:
|
||||
Author: tlento
|
||||
PR: "9012"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fix get_custom_database docstring
|
||||
time: 2023-11-06T12:31:57.525711Z
|
||||
custom:
|
||||
Author: LeoTheGriff
|
||||
Issue: "9003"
|
||||
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Allow adapters to include package logs in dbt standard logging '
|
||||
time: 2023-09-15T12:37:33.862862-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "7859"
|
||||
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add drop_schema_named macro
|
||||
time: 2023-10-17T14:36:20.612289-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8025"
|
||||
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: migrate utils to common and adapters folders
|
||||
time: 2023-10-26T11:08:21.458709-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8924"
|
||||
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Move Agate helper client into common
|
||||
time: 2023-10-26T12:35:56.538587-07:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "8926"
|
||||
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: remove usage of dbt.config.PartialProject from dbt/adapters
|
||||
time: 2023-10-26T12:39:13.904116-07:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "8928"
|
||||
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add exports to SavedQuery spec
|
||||
time: 2023-10-31T13:20:22.448158-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
Issue: "8892"
|
||||
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Remove legacy logger
|
||||
time: 2023-11-07T13:56:35.186648-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8027"
|
||||
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support setting export configs hierarchically via saved query and project configs
|
||||
time: 2023-11-10T15:42:55.042317-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8956"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: For packages installed with tarball method, fetch metadata to resolve nested dependencies
|
||||
time: 2023-10-13T13:09:43.188308-04:00
|
||||
custom:
|
||||
Author: adamlopez
|
||||
Issue: "8621"
|
||||
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix partial parsing not working for semantic model change
|
||||
time: 2023-10-16T16:39:53.05058-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8859"
|
||||
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Handle unknown `type_code` for model contracts
|
||||
time: 2023-10-24T11:01:51.980781-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 8877 8353
|
||||
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add back contract enforcement for temporary tables on postgres
|
||||
time: 2023-10-24T14:55:04.051683-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8857"
|
||||
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Rework get_catalog implementation to retain previous adapter interface semantics
|
||||
time: 2023-10-24T15:54:00.628086-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8846"
|
||||
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add version to fqn when version==0
|
||||
time: 2023-10-26T00:25:36.259356-05:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8836"
|
||||
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix cased comparison in catalog-retrieval function.
|
||||
time: 2023-10-30T09:37:34.258612-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8939"
|
||||
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Catalog queries now assign the correct type to materialized views
|
||||
time: 2023-10-31T00:53:45.486203-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8864"
|
||||
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix compilation exception running empty seed file and support new Integer agate data_type
|
||||
time: 2023-10-31T14:48:37.774871-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8895"
|
||||
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make relation filtering None-tolerant for maximal flexibility across adapters.
|
||||
time: 2023-11-01T15:58:24.552054-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8974"
|
||||
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Update run_results.json from previous versions of dbt to support deferral and
|
||||
rerun from failure
|
||||
time: 2023-11-06T15:59:33.677915-05:00
|
||||
custom:
|
||||
Author: jtcohen6 peterallenwebb
|
||||
Issue: "9010"
|
||||
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix git repository with subdirectory for Deps
|
||||
time: 2023-11-07T09:23:58.214271-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9000"
|
||||
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Use MANIFEST.in to recursively include all jinja templates; fixes issue where
|
||||
some templates were not included in the distribution
|
||||
time: 2023-11-07T09:41:30.121733-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9016"
|
||||
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix formatting of tarball information in packages-lock.yml
|
||||
time: 2023-11-13T11:49:56.437007-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx QMalcolm
|
||||
Issue: "9062"
|
||||
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'deps: Lock git packages to commit SHA during resolution'
|
||||
time: 2023-11-27T15:43:10.122069+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "9050"
|
||||
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'deps: Use PackageRenderer to read package-lock.json'
|
||||
time: 2023-11-27T15:43:47.842423+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "9127"
|
||||
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'Get sources working again in dbt docs generate'
|
||||
time: 2023-11-28T15:52:25.738256Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "9119"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Added more type annotations.
|
||||
time: 2023-08-31T16:44:35.737954-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8537"
|
||||
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.include.global_project in dbt/adapters
|
||||
time: 2023-10-26T18:49:53.36449-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8925"
|
||||
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add a no-op runner for Saved Qeury
|
||||
time: 2023-10-27T14:00:48.4755-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8893"
|
||||
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: remove dbt.flags.MP_CONTEXT usage in dbt/adapters
|
||||
time: 2023-11-01T10:27:58.790153-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8967"
|
||||
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: 'Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters'
|
||||
time: 2023-11-01T17:31:24.974093-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8969"
|
||||
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move CatalogRelationTypes test case to the shared test suite to be reused by
|
||||
adapter maintainers
|
||||
time: 2023-11-03T19:52:22.694394-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8952"
|
||||
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Treat SystemExit as an interrupt if raised during node execution.
|
||||
time: 2023-11-06T08:04:22.022179-05:00
|
||||
custom:
|
||||
Author: benmosher
|
||||
Issue: n/a
|
||||
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Removing unused 'documentable'
|
||||
time: 2023-11-06T10:57:30.694056-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8871"
|
||||
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove use of dbt/core exceptions in dbt/adapter
|
||||
time: 2023-11-07T13:57:28.683727-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt MichelleArk
|
||||
Issue: "8920"
|
||||
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Cache dbt plugin modules to improve integration test performance
|
||||
time: 2023-11-07T19:15:46.170151-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9029"
|
||||
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock
|
||||
variance
|
||||
time: 2023-11-11T17:53:50.098843-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9057"
|
||||
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific
|
||||
event types and protos
|
||||
time: 2023-11-16T17:42:51.005023-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: 8927 8918
|
||||
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Clean up unused adaptor folders
|
||||
time: 2023-11-20T13:47:35.923794-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9123"
|
||||
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move column constraints into common/contracts, removing another dependency of
|
||||
adapters on core.
|
||||
time: 2023-11-20T18:32:14.859503-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9024"
|
||||
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move dbt.semver to dbt.common.semver and update references.
|
||||
time: 2023-11-28T17:07:32.172421-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "9039"
|
||||
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move lowercase utils method to common
|
||||
time: 2023-11-30T13:54:32.561673-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9180"
|
||||
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usages of dbt.clients.jinja in dbt/adapters
|
||||
time: 2023-12-05T09:35:44.845352+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9205"
|
||||
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.contracts in dbt/adapters
|
||||
time: 2023-12-05T12:05:59.936775+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9208"
|
||||
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Introduce RelationConfig Protocol, consolidate Relation.create_from
|
||||
time: 2023-12-05T17:07:25.33861+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9215"
|
||||
6
.changes/unreleased/Under the Hood-20231205-235830.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-235830.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: remove manifest from adapter.set_relations_cache signature
|
||||
time: 2023-12-05T23:58:30.920144+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9217"
|
||||
2
.flake8
2
.flake8
@@ -10,3 +10,5 @@ ignore =
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
per-file-ignores =
|
||||
*/__init__.py: F401
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
||||
core/dbt/include/index.html binary
|
||||
core/dbt/task/docs/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
|
||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -13,23 +13,6 @@
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
@@ -40,7 +23,7 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
24
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user_docs"]
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -11,7 +11,7 @@ body:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
@@ -25,11 +25,29 @@ body:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance critera
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -28,3 +28,10 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# github dependencies
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Add the `user docs` label to this PR if it will need docs changes. An
|
||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
@@ -33,3 +30,4 @@ resolves #
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,7 +17,7 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
43
.github/workflows/docs-issue.yml
vendored
Normal file
43
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.pull_request.merged == true) &&
|
||||
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
26
.github/workflows/main.yml
vendored
26
.github/workflows/main.yml
vendored
@@ -36,7 +36,7 @@ defaults:
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
@@ -108,8 +108,9 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
@@ -221,17 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
name: integration test suite
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "[Notification] Integration test suite passes"
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title="Integration test suite passes""
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
6
.github/workflows/release-docker.yml
vendored
6
.github/workflows/release-docker.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# **what?**
|
||||
# Cleanup branches left over from automation and testing. Also cleanup
|
||||
# draft releases from release testing.
|
||||
|
||||
# **why?**
|
||||
# The automations are leaving behind branches and releases that clutter
|
||||
# the repository. Sometimes we need them to debug processes so we don't
|
||||
# want them immediately deleted. Running on Saturday to avoid running
|
||||
# at the same time as an actual release to prevent breaking a release
|
||||
# mid-release.
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 12:00 Saturday.
|
||||
# Manual trigger can also run on demand
|
||||
|
||||
name: Repository Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cleanup-repo:
|
||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||
secrets: inherit
|
||||
@@ -21,7 +21,7 @@ permissions: read-all
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.4.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -26,7 +26,7 @@ Legacy tests are found in the 'test' directory:
|
||||
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
|
||||
core/dbt/include/index.html
|
||||
core/dbt/task/docs/index.html
|
||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||
|
||||
## Adapters
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
11
Makefile
11
Makefile
@@ -40,7 +40,16 @@ dev: dev_req ## Installs dbt-* packages in develop mode along with development d
|
||||
|
||||
.PHONY: proto_types
|
||||
proto_types: ## generates google protobuf python file from types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||
protoc -I=./core/dbt/common/events --python_out=./core/dbt/common/events ./core/dbt/common/events/types.proto
|
||||
|
||||
.PHONY: core_proto_types
|
||||
core_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
||||
|
||||
.PHONY: adapter_proto_types
|
||||
adapter_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/adapters/events --python_out=./core/dbt/adapters/events ./core/dbt/adapters/events/adapter_types.proto
|
||||
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
|
||||
13
codecov.yml
13
codecov.yml
@@ -0,0 +1,13 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.base.meta import available # noqa: F401
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||
from dbt.adapters.base.relation import ( # noqa: F401
|
||||
|
||||
@@ -2,17 +2,17 @@ from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
# Note: This is automatically used by contract code
|
||||
# No-op conversions (INTEGER => INT) have been removed.
|
||||
# Any adapter that wants to take advantage of "translate_type"
|
||||
# should create a ClassVar with the appropriate conversions.
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
"STRING": "TEXT",
|
||||
"TIMESTAMP": "TIMESTAMP",
|
||||
"FLOAT": "FLOAT",
|
||||
"INTEGER": "INT",
|
||||
"BOOLEAN": "BOOLEAN",
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
|
||||
@@ -6,6 +6,7 @@ import traceback
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from multiprocessing.context import SpawnContext
|
||||
from threading import get_ident
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -23,8 +24,9 @@ from typing import (
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.connection import (
|
||||
import dbt.adapters.exceptions
|
||||
import dbt.common.exceptions.base
|
||||
from dbt.adapters.contracts.connection import (
|
||||
Connection,
|
||||
Identifier,
|
||||
ConnectionState,
|
||||
@@ -36,9 +38,9 @@ from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.events import AdapterLogger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
from dbt.adapters.events.logging import AdapterLogger
|
||||
from dbt.common.events.functions import fire_event
|
||||
from dbt.adapters.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpenInCleanup,
|
||||
@@ -48,9 +50,8 @@ from dbt.events.types import (
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
from dbt.common.events.contextvars import get_node_info
|
||||
from dbt.common.utils import cast_to_str
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
@@ -72,10 +73,10 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
def __init__(self, profile: AdapterRequiredConfig, mp_context: SpawnContext) -> None:
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
self.lock: RLock = mp_context.RLock()
|
||||
self.query_header: Optional[MacroQueryStringSetter] = None
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
@@ -91,13 +92,15 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
raise dbt.adapters.exceptions.InvalidConnectionError(
|
||||
key, list(self.thread_connections)
|
||||
)
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.common.exceptions.DbtInternalError(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
@@ -137,7 +140,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -220,14 +223,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
:raises dbt.adapters.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
@@ -235,7 +238,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
raise dbt.adapters.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
@@ -246,7 +249,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
@@ -268,12 +271,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -288,7 +291,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -320,12 +325,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`begin` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`commit` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
@@ -361,7 +370,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.common.exceptions.DbtInternalError(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
@@ -400,7 +409,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
@@ -408,7 +417,30 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param int limit: If set, limits the result set
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
"""
|
||||
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||
|
||||
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||
"""
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`add_select_query` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -17,39 +17,55 @@ from typing import (
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
from multiprocessing.context import SpawnContext
|
||||
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
from dbt.adapters.capability import Capability, CapabilityDict
|
||||
from dbt.common.contracts.constraints import (
|
||||
ColumnLevelConstraint,
|
||||
ConstraintType,
|
||||
ModelLevelConstraint,
|
||||
)
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
from dbt.adapters.exceptions import (
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
NullRelationDropAttemptedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
UnexpectedNonTimestampError,
|
||||
RenameToNoneAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
)
|
||||
|
||||
from dbt.common.exceptions import (
|
||||
NotImplementedError,
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
DbtValidationError,
|
||||
UnexpectedNullError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNonTimestampError,
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.adapters.protocol import AdapterConfig
|
||||
from dbt.common.clients.agate_helper import (
|
||||
empty_table,
|
||||
get_column_value_uncased,
|
||||
merge_tables,
|
||||
table_from_rows,
|
||||
Integer,
|
||||
)
|
||||
from dbt.common.clients.jinja import CallableMacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import (
|
||||
from dbt.common.events.functions import fire_event, warn_or_error
|
||||
from dbt.adapters.events.types import (
|
||||
CacheMiss,
|
||||
ListRelations,
|
||||
CodeExecution,
|
||||
@@ -58,9 +74,10 @@ from dbt.events.types import (
|
||||
ConstraintNotSupported,
|
||||
ConstraintNotEnforced,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
from dbt.common.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.contracts.relation import RelationConfig
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName,
|
||||
@@ -71,10 +88,13 @@ from dbt.adapters.base.relation import (
|
||||
from dbt.adapters.base import Column as BaseColumn
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.events.types import CollectFreshnessReturnSignature
|
||||
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
@@ -109,7 +129,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
||||
return test
|
||||
|
||||
|
||||
def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
|
||||
def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
|
||||
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
@@ -161,6 +181,12 @@ class PythonJobHelper:
|
||||
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
||||
|
||||
|
||||
class FreshnessResponse(TypedDict):
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float # age in seconds
|
||||
|
||||
|
||||
class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""The BaseAdapter provides an abstract base class for adapters.
|
||||
|
||||
@@ -208,7 +234,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Relation: Type[BaseRelation] = BaseRelation
|
||||
Column: Type[BaseColumn] = BaseColumn
|
||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
||||
ConnectionManager: Type[BaseConnectionManager]
|
||||
|
||||
# A set of clobber config fields accepted by this adapter
|
||||
# for use in materializations
|
||||
@@ -222,10 +248,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
# This static member variable can be overriden in concrete adapter
|
||||
# implementations to indicate adapter support for optional capabilities.
|
||||
_capabilities = CapabilityDict({})
|
||||
|
||||
def __init__(self, config, mp_context: SpawnContext) -> None:
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
self.connections = self.ConnectionManager(config)
|
||||
self.cache = RelationsCache(log_cache_events=config.log_cache_events)
|
||||
self.connections = self.ConnectionManager(config, mp_context)
|
||||
self._macro_manifest_lazy: Optional[MacroManifest] = None
|
||||
|
||||
###
|
||||
@@ -315,14 +345,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
"""
|
||||
TODO: Can we move this to dbt-bigquery?
|
||||
Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:param str table: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
if hasattr(self.connections, "get_partitions_metadata"):
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
@@ -387,15 +424,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return True
|
||||
|
||||
def _get_cache_schemas(self, manifest: Manifest) -> Set[BaseRelation]:
|
||||
def _get_cache_schemas(self, relation_configs: Iterable[RelationConfig]) -> Set[BaseRelation]:
|
||||
"""Get the set of schema relations that the cache logic needs to
|
||||
populate. This means only executable nodes are included.
|
||||
populate.
|
||||
"""
|
||||
# the cache only cares about executable nodes
|
||||
return {
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
|
||||
self.Relation.create_from(quoting=self.config, config=relation_config)
|
||||
for relation_config in relation_configs
|
||||
}
|
||||
|
||||
def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:
|
||||
@@ -408,16 +443,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model)
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
relation = self.Relation.create_from(self.config, node)
|
||||
relations = self._get_catalog_relations(manifest)
|
||||
for relation in relations:
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
@@ -425,14 +452,42 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _get_catalog_relations_by_info_schema(
|
||||
self, relations
|
||||
) -> Dict[InformationSchema, List[BaseRelation]]:
|
||||
relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
|
||||
for relation in relations:
|
||||
info_schema = relation.information_schema_only()
|
||||
if info_schema not in relations_by_info_schema:
|
||||
relations_by_info_schema[info_schema] = []
|
||||
relations_by_info_schema[info_schema].append(relation)
|
||||
|
||||
return relations_by_info_schema
|
||||
|
||||
def _get_catalog_relations(self, manifest: Manifest) -> List[BaseRelation]:
|
||||
|
||||
nodes = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model)
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
|
||||
relations = [self.Relation.create_from(self.config, n) for n in nodes] # type: ignore[arg-type]
|
||||
return relations
|
||||
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||
self,
|
||||
relation_configs: Iterable[RelationConfig],
|
||||
cache_schemas: Optional[Set[BaseRelation]] = None,
|
||||
) -> None:
|
||||
"""Populate the relations cache for the given schemas. Returns an
|
||||
iterable of the schemas populated, as strings.
|
||||
"""
|
||||
if not cache_schemas:
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
cache_schemas = self._get_cache_schemas(relation_configs)
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[List[BaseRelation]]] = []
|
||||
for cache_schema in cache_schemas:
|
||||
@@ -453,14 +508,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||
for relation in cache_schemas:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
if relation.schema:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(
|
||||
self,
|
||||
manifest: Manifest,
|
||||
relation_configs: Iterable[RelationConfig],
|
||||
clear: bool = False,
|
||||
required_schemas: Optional[Set[BaseRelation]] = None,
|
||||
) -> None:
|
||||
@@ -470,7 +526,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
with self.cache.lock:
|
||||
if clear:
|
||||
self.cache.clear()
|
||||
self._relations_cache_for_schemas(manifest, required_schemas)
|
||||
self._relations_cache_for_schemas(relation_configs, required_schemas)
|
||||
|
||||
@available
|
||||
def cache_added(self, relation: Optional[BaseRelation]) -> str:
|
||||
@@ -917,6 +973,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
|
||||
:param agate_table: The table
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
@@ -974,6 +1041,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||
agate_type: Type = agate_table.column_types[col_idx]
|
||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||
(Integer, cls.convert_integer_type),
|
||||
(agate.Text, cls.convert_text_type),
|
||||
(agate.Number, cls.convert_number_type),
|
||||
(agate.Boolean, cls.convert_boolean_type),
|
||||
@@ -1048,7 +1116,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
macro_context.update(context_override)
|
||||
|
||||
macro_function = MacroGenerator(macro, macro_context)
|
||||
macro_function = CallableMacroGenerator(macro, macro_context)
|
||||
|
||||
with self.connections.exception_handler(f"macro {macro_name}"):
|
||||
result = macro_function(**kwargs)
|
||||
@@ -1085,25 +1153,108 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
schema_map = self._get_catalog_schemas(manifest)
|
||||
def _get_one_catalog_by_relations(
|
||||
self,
|
||||
information_schema: InformationSchema,
|
||||
relations: List[BaseRelation],
|
||||
manifest: Manifest,
|
||||
) -> agate.Table:
|
||||
|
||||
kwargs = {
|
||||
"information_schema": information_schema,
|
||||
"relations": relations,
|
||||
}
|
||||
table = self.execute_macro(
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME,
|
||||
kwargs=kwargs,
|
||||
# pass in the full manifest, so we get any local project
|
||||
# overrides
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_filtered_catalog(
|
||||
self, manifest: Manifest, relations: Optional[Set[BaseRelation]] = None
|
||||
):
|
||||
catalogs: agate.Table
|
||||
if (
|
||||
relations is None
|
||||
or len(relations) > 100
|
||||
or not self.supports(Capability.SchemaMetadataByRelations)
|
||||
):
|
||||
# Do it the traditional way. We get the full catalog.
|
||||
catalogs, exceptions = self.get_catalog(manifest)
|
||||
else:
|
||||
# Do it the new way. We try to save time by selecting information
|
||||
# only for the exact set of relations we are interested in.
|
||||
catalogs, exceptions = self.get_catalog_by_relations(manifest, relations)
|
||||
|
||||
if relations and catalogs:
|
||||
relation_map = {
|
||||
(
|
||||
r.database.casefold() if r.database else None,
|
||||
r.schema.casefold() if r.schema else None,
|
||||
r.identifier.casefold() if r.identifier else None,
|
||||
)
|
||||
for r in relations
|
||||
}
|
||||
|
||||
def in_map(row: agate.Row):
|
||||
d = _expect_row_value("table_database", row)
|
||||
s = _expect_row_value("table_schema", row)
|
||||
i = _expect_row_value("table_name", row)
|
||||
d = d.casefold() if d is not None else None
|
||||
s = s.casefold() if s is not None else None
|
||||
i = i.casefold() if i is not None else None
|
||||
return (d, s, i) in relation_map
|
||||
|
||||
catalogs = catalogs.where(in_map)
|
||||
|
||||
return catalogs, exceptions
|
||||
|
||||
def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
|
||||
pass
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
schema_map: SchemaSearchMap = self._get_catalog_schemas(manifest)
|
||||
for info, schemas in schema_map.items():
|
||||
if len(schemas) == 0:
|
||||
continue
|
||||
name = ".".join([str(info.database), "information_schema"])
|
||||
|
||||
fut = tpe.submit_connected(
|
||||
self, name, self._get_one_catalog, info, schemas, manifest
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def get_catalog_by_relations(
|
||||
self, manifest: Manifest, relations: Set[BaseRelation]
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
|
||||
for info_schema in relations_by_schema:
|
||||
name = ".".join([str(info_schema.database), "information_schema"])
|
||||
relations = set(relations_by_schema[info_schema])
|
||||
fut = tpe.submit_connected(
|
||||
self,
|
||||
name,
|
||||
self._get_one_catalog_by_relations,
|
||||
info_schema,
|
||||
relations,
|
||||
manifest,
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def cancel_open_connections(self):
|
||||
"""Cancel all open connections."""
|
||||
return self.connections.cancel_open()
|
||||
@@ -1114,7 +1265,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1131,7 +1282,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
]
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
if isinstance(result, agate.Table):
|
||||
deprecations.warn("collect-freshness-return-signature")
|
||||
warn_or_error(CollectFreshnessReturnSignature())
|
||||
adapter_response = None
|
||||
table = result
|
||||
else:
|
||||
@@ -1149,13 +1300,52 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
freshness = {
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def calculate_freshness_from_metadata(
|
||||
self,
|
||||
source: BaseRelation,
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
kwargs: Dict[str, Any] = {
|
||||
"information_schema": source.information_schema_only(),
|
||||
"relations": [source],
|
||||
}
|
||||
result = self.execute_macro(
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, manifest=manifest
|
||||
)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
|
||||
try:
|
||||
row = table[0]
|
||||
last_modified_val = get_column_value_uncased("last_modified", row)
|
||||
snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
|
||||
except Exception:
|
||||
raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
|
||||
|
||||
if last_modified_val is None:
|
||||
# Interpret missing value as "infinitely long ago"
|
||||
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
|
||||
else:
|
||||
max_loaded_at = _utc(last_modified_val, None, "last_modified")
|
||||
|
||||
snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
|
||||
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
runs. The hook can assume it has a connection available.
|
||||
@@ -1181,11 +1371,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_compiler(self):
|
||||
from dbt.compilation import Compiler
|
||||
|
||||
return Compiler(self.config)
|
||||
|
||||
# Methods used in adapter tests
|
||||
def update_column_sql(
|
||||
self,
|
||||
@@ -1305,7 +1490,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
strategy = strategy.replace("+", "_")
|
||||
macro_name = f"get_incremental_{strategy}_sql"
|
||||
# The model_context should have MacroGenerator callable objects for all macros
|
||||
# The model_context should have callable objects for all macros
|
||||
if macro_name not in model_context:
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||
@@ -1429,6 +1614,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def capabilities(cls) -> CapabilityDict:
|
||||
return cls._capabilities
|
||||
|
||||
@classmethod
|
||||
def supports(cls, capability: Capability) -> bool:
|
||||
return bool(cls.capabilities()[capability])
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import abc
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
||||
|
||||
from dbt.deprecations import warn, renamed_method
|
||||
|
||||
from dbt.common.events.functions import warn_or_error
|
||||
from dbt.adapters.events.types import AdapterDeprecationWarning
|
||||
|
||||
Decorator = Callable[[Any], Callable]
|
||||
|
||||
@@ -62,11 +61,12 @@ class _Available:
|
||||
|
||||
def wrapper(func):
|
||||
func_name = func.__name__
|
||||
renamed_method(func_name, supported_name)
|
||||
|
||||
@wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
warn("adapter:{}".format(func_name))
|
||||
warn_or_error(
|
||||
AdapterDeprecationWarning(old_name=func_name, new_name=supported_name)
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if parse_replacement:
|
||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
from typing import List, Optional, Type
|
||||
from pathlib import Path
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import PartialProject
|
||||
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
class AdapterPlugin:
|
||||
"""Defines the basic requirements for a dbt adapter plugin.
|
||||
|
||||
@@ -29,12 +19,13 @@ class AdapterPlugin:
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None,
|
||||
):
|
||||
project_name: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
self.include_path: str = include_path
|
||||
self.project_name: str = project_name_from_path(include_path)
|
||||
self.project_name: str = project_name or f"dbt_{Path(include_path).name}"
|
||||
self.dependencies: List[str]
|
||||
if dependencies is None:
|
||||
self.dependencies = []
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
from threading import local
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
|
||||
from dbt.clients.jinja import QueryStringGenerator
|
||||
from dbt.adapters.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
def __init__(self, node) -> None:
|
||||
self._inner_node = node
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -25,9 +25,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
@@ -57,7 +57,7 @@ QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None:
|
||||
self.manifest = manifest
|
||||
self.config = config
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
from dbt.adapters.contracts.relation import (
|
||||
RelationConfig,
|
||||
RelationType,
|
||||
ComponentName,
|
||||
HasQuoting,
|
||||
@@ -11,18 +11,15 @@ from dbt.contracts.relation import (
|
||||
Policy,
|
||||
Path,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
ApproximateMatchError,
|
||||
DbtInternalError,
|
||||
MultipleDatabasesNotAllowedError,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
from dbt.adapters.exceptions import MultipleDatabasesNotAllowedError, ApproximateMatchError
|
||||
from dbt.common.utils import filter_null_values, deep_merge
|
||||
from dbt.adapters.utils import classproperty
|
||||
|
||||
import dbt.exceptions
|
||||
import dbt.common.exceptions
|
||||
|
||||
|
||||
Self = TypeVar("Self", bound="BaseRelation")
|
||||
SerializableIterable = Union[Tuple, FrozenSet]
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, repr=False)
|
||||
@@ -36,6 +33,18 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
||||
# adding a relation type here also requires defining the associated rename macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql()
|
||||
renameable_relations: SerializableIterable = ()
|
||||
|
||||
# register relation types that are atomically replaceable, e.g. they have "create or replace" syntax
|
||||
# adding a relation type here also requires defining the associated replace macro
|
||||
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||
# include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql()
|
||||
replaceable_relations: SerializableIterable = ()
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||
return self.path.get_lowered_part(field) == value.lower()
|
||||
@@ -87,7 +96,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.DbtRuntimeError(
|
||||
raise dbt.common.exceptions.DbtRuntimeError(
|
||||
"Tried to match relation, but no search path was passed!"
|
||||
)
|
||||
|
||||
@@ -169,7 +178,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
return self.include(identifier=False).replace_path(identifier=None)
|
||||
|
||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||
|
||||
for key in ComponentName:
|
||||
path_part: Optional[str] = None
|
||||
if self.include_policy.get_part(key):
|
||||
@@ -188,33 +196,14 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
identifier=identifier,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||
source_quoting,
|
||||
kwargs.get("quote_policy", {}),
|
||||
)
|
||||
|
||||
return cls.create(
|
||||
database=source.database,
|
||||
schema=source.schema,
|
||||
identifier=source.identifier,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_ephemeral_prefix(name: str):
|
||||
return f"__dbt__cte__{name}"
|
||||
|
||||
@classmethod
|
||||
def create_ephemeral_from_node(
|
||||
def create_ephemeral_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ManifestNode,
|
||||
node: RelationConfig,
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
@@ -224,47 +213,33 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
).quote(identifier=False)
|
||||
|
||||
@classmethod
|
||||
def create_from_node(
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node,
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
quoting: HasQuoting,
|
||||
config: RelationConfig,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if quote_policy is None:
|
||||
quote_policy = {}
|
||||
quote_policy = kwargs.pop("quote_policy", {})
|
||||
|
||||
quote_policy = dbt.utils.merge(config.quoting, quote_policy)
|
||||
config_quoting = config.quoting_dict
|
||||
config_quoting.pop("column", None)
|
||||
|
||||
# precedence: kwargs quoting > config quoting > base quoting > default quoting
|
||||
quote_policy = deep_merge(
|
||||
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||
quoting.quoting,
|
||||
config_quoting,
|
||||
quote_policy,
|
||||
)
|
||||
|
||||
return cls.create(
|
||||
database=node.database,
|
||||
schema=node.schema,
|
||||
identifier=node.alias,
|
||||
database=config.database,
|
||||
schema=config.schema,
|
||||
identifier=config.identifier,
|
||||
quote_policy=quote_policy,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise DbtInternalError(
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[Self],
|
||||
@@ -286,6 +261,14 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
return cls.from_dict(kwargs)
|
||||
|
||||
@property
|
||||
def can_be_renamed(self) -> bool:
|
||||
return self.type in self.renameable_relations
|
||||
|
||||
@property
|
||||
def can_be_replaced(self) -> bool:
|
||||
return self.type in self.replaceable_relations
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||
|
||||
@@ -366,7 +349,7 @@ class InformationSchema(BaseRelation):
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationError(
|
||||
raise dbt.common.exceptions.CompilationError(
|
||||
"Got an invalid name: {}".format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@@ -439,11 +422,11 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
self[key].add(schema)
|
||||
|
||||
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||
for information_schema_name, schemas in self.items():
|
||||
for information_schema, schemas in self.items():
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
yield information_schema, schema
|
||||
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap":
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
|
||||
@@ -7,17 +7,16 @@ from dbt.adapters.reference_keys import (
|
||||
_make_ref_key_dict,
|
||||
_ReferenceKey,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
DependentLinkNotCachedError,
|
||||
from dbt.common.exceptions.cache import (
|
||||
NewNameAlreadyInCacheError,
|
||||
NoneRelationFoundError,
|
||||
ReferencedLinkNotCachedError,
|
||||
DependentLinkNotCachedError,
|
||||
TruncatedModelNameCausedCollisionError,
|
||||
NoneRelationFoundError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
from dbt.flags import get_flags
|
||||
from dbt.utils import lowercase
|
||||
from dbt.common.events.functions import fire_event, fire_event_if
|
||||
from dbt.adapters.events.types import CacheAction, CacheDumpGraph
|
||||
from dbt.common.utils.formatting import lowercase
|
||||
|
||||
|
||||
def dot_separated(key: _ReferenceKey) -> str:
|
||||
@@ -38,8 +37,8 @@ class _CachedRelation:
|
||||
:attr BaseRelation inner: The underlying dbt relation.
|
||||
"""
|
||||
|
||||
def __init__(self, inner):
|
||||
self.referenced_by = {}
|
||||
def __init__(self, inner) -> None:
|
||||
self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.inner = inner
|
||||
|
||||
def __str__(self) -> str:
|
||||
@@ -165,10 +164,11 @@ class RelationsCache:
|
||||
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, log_cache_events: bool = False) -> None:
|
||||
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||
self.lock = threading.RLock()
|
||||
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
self.log_cache_events = log_cache_events
|
||||
|
||||
def add_schema(
|
||||
self,
|
||||
@@ -318,10 +318,9 @@ class RelationsCache:
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
flags = get_flags()
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||
@@ -329,7 +328,7 @@ class RelationsCache:
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
@@ -454,9 +453,8 @@ class RelationsCache:
|
||||
ref_key_2=new_key._asdict(),
|
||||
)
|
||||
)
|
||||
flags = get_flags()
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
@@ -467,7 +465,7 @@ class RelationsCache:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
self.log_cache_events,
|
||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
|
||||
52
core/dbt/adapters/capability.py
Normal file
52
core/dbt/adapters/capability.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional, DefaultDict, Mapping
|
||||
|
||||
|
||||
class Capability(str, Enum):
|
||||
"""Enumeration of optional adapter features which can be probed using BaseAdapter.capabilities()"""
|
||||
|
||||
SchemaMetadataByRelations = "SchemaMetadataByRelations"
|
||||
"""Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving
|
||||
all the relations in a schema."""
|
||||
|
||||
TableLastModifiedMetadata = "TableLastModifiedMetadata"
|
||||
"""Indicates support for determining the time of the last table modification by querying database metadata."""
|
||||
|
||||
|
||||
class Support(str, Enum):
|
||||
Unknown = "Unknown"
|
||||
"""The adapter has not declared whether this capability is a feature of the underlying DBMS."""
|
||||
|
||||
Unsupported = "Unsupported"
|
||||
"""This capability is not possible with the underlying DBMS, so the adapter does not implement related macros."""
|
||||
|
||||
NotImplemented = "NotImplemented"
|
||||
"""This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter."""
|
||||
|
||||
Versioned = "Versioned"
|
||||
"""Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
Full = "Full"
|
||||
"""All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||
macros needed to use it."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class CapabilitySupport:
|
||||
support: Support
|
||||
first_version: Optional[str] = None
|
||||
|
||||
def __bool__(self):
|
||||
return self.support == Support.Versioned or self.support == Support.Full
|
||||
|
||||
|
||||
class CapabilityDict(DefaultDict[Capability, CapabilitySupport]):
|
||||
def __init__(self, vals: Mapping[Capability, CapabilitySupport]):
|
||||
super().__init__(self._default)
|
||||
self.update(vals)
|
||||
|
||||
@staticmethod
|
||||
def _default():
|
||||
return CapabilitySupport(support=Support.Unknown)
|
||||
23
core/dbt/adapters/clients/jinja.py
Normal file
23
core/dbt/adapters/clients/jinja.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from typing import Dict, Any
|
||||
from dbt.common.clients.jinja import BaseMacroGenerator, get_environment
|
||||
|
||||
|
||||
class QueryStringGenerator(BaseMacroGenerator):
|
||||
def __init__(self, template_str: str, context: Dict[str, Any]) -> None:
|
||||
super().__init__(context)
|
||||
self.template_str: str = template_str
|
||||
env = get_environment()
|
||||
self.template = env.from_string(
|
||||
self.template_str,
|
||||
globals=self.context,
|
||||
)
|
||||
|
||||
def get_name(self) -> str:
|
||||
return "query_comment_macro"
|
||||
|
||||
def get_template(self):
|
||||
"""Don't use the template cache, we don't have a node"""
|
||||
return self.template
|
||||
|
||||
def __call__(self, connection_name: str, node) -> str:
|
||||
return str(self.call_macro(connection_name, node))
|
||||
@@ -11,31 +11,32 @@ from typing import (
|
||||
List,
|
||||
Callable,
|
||||
)
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from typing_extensions import Protocol
|
||||
from dbt.dataclass_schema import (
|
||||
from typing_extensions import Protocol, Annotated
|
||||
|
||||
from mashumaro.jsonschema.annotations import Pattern
|
||||
|
||||
from dbt.adapters.utils import translate_aliases
|
||||
from dbt.common.exceptions import DbtInternalError
|
||||
from dbt.common.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
StrEnum,
|
||||
ExtensibleDbtClassMixin,
|
||||
HyphenatedDbtClassMixin,
|
||||
ValidatedStringMixin,
|
||||
register_pattern,
|
||||
)
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.common.contracts.util import Replaceable
|
||||
from dbt.common.utils import md5
|
||||
|
||||
from dbt.common.events.functions import fire_event
|
||||
from dbt.adapters.events.types import NewConnectionOpening
|
||||
|
||||
# TODO: this is a very bad dependency - shared global state
|
||||
from dbt.common.events.contextvars import get_node_info
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
||||
|
||||
|
||||
# we need register_pattern for jsonschema validation
|
||||
register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$")
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(dbtClassMixin):
|
||||
_message: str
|
||||
@@ -55,7 +56,8 @@ class ConnectionState(StrEnum):
|
||||
|
||||
@dataclass(init=False)
|
||||
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||
type: Identifier
|
||||
# Annotated is used by mashumaro for jsonschema generation
|
||||
type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
|
||||
name: Optional[str] = None
|
||||
state: ConnectionState = ConnectionState.INIT
|
||||
transaction_open: bool = False
|
||||
@@ -108,7 +110,7 @@ class LazyHandle:
|
||||
connection, updating the handle on the Connection.
|
||||
"""
|
||||
|
||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
||||
def __init__(self, opener: Callable[[Connection], Connection]) -> None:
|
||||
self.opener = opener
|
||||
|
||||
def resolve(self, connection: Connection) -> Connection:
|
||||
@@ -161,6 +163,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
# Need to fixup dbname => database, pass => password
|
||||
data = cls.translate_aliases(data)
|
||||
return data
|
||||
|
||||
@@ -220,10 +223,10 @@ DEFAULT_QUERY_COMMENT = """
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryComment(HyphenatedDbtClassMixin):
|
||||
class QueryComment(dbtClassMixin):
|
||||
comment: str = DEFAULT_QUERY_COMMENT
|
||||
append: bool = False
|
||||
job_label: bool = False
|
||||
job_label: bool = field(default=False, metadata={"alias": "job-label"})
|
||||
|
||||
|
||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
@@ -231,3 +234,4 @@ class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||
query_comment: QueryComment
|
||||
cli_vars: Dict[str, Any]
|
||||
target_path: str
|
||||
log_cache_events: bool
|
||||
@@ -6,11 +6,11 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
from dbt.common.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.exceptions import CompilationError, DataclassNotDictError
|
||||
from dbt.utils import deep_merge
|
||||
from dbt.common.contracts.util import Replaceable
|
||||
from dbt.common.exceptions import CompilationError, DataclassNotDictError
|
||||
from dbt.common.utils import deep_merge
|
||||
|
||||
|
||||
class RelationType(StrEnum):
|
||||
@@ -19,6 +19,15 @@ class RelationType(StrEnum):
|
||||
CTE = "cte"
|
||||
MaterializedView = "materialized_view"
|
||||
External = "external"
|
||||
Ephemeral = "ephemeral"
|
||||
|
||||
|
||||
class RelationConfig(Protocol):
|
||||
name: str
|
||||
database: str
|
||||
schema: str
|
||||
identifier: str
|
||||
quoting_dict: Dict[str, bool]
|
||||
|
||||
|
||||
class ComponentName(StrEnum):
|
||||
57
core/dbt/adapters/events/README.md
Normal file
57
core/dbt/adapters/events/README.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Events Module
|
||||
The Events module is responsible for communicating internal dbt structures into a consumable interface. Because the "event" classes are based entirely on protobuf definitions, the interface is really clearly defined, whether or not protobufs are used to consume it. We use Betterproto for compiling the protobuf message definitions into Python classes.
|
||||
|
||||
# Using the Events Module
|
||||
The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `common.events.functions::fire_event` is the entry point to the module from everywhere in dbt.
|
||||
|
||||
# Logging
|
||||
When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions.
|
||||
|
||||
# Adding a New Event
|
||||
* Add a new message in types.proto, and a second message with the same name + "Msg". The "Msg" message should have two fields, an "info" field of EventInfo, and a "data" field referring to the message name without "Msg"
|
||||
* run the protoc compiler to update adapter_types_pb2.py: make adapter_proto_types
|
||||
* Add a wrapping class in core/dbt/adapters/event/types.py with a Level superclass plus code and message methods
|
||||
|
||||
We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto.
|
||||
|
||||
The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters.
|
||||
|
||||
## Required for Every Event
|
||||
|
||||
- a method `code`, that's unique across events
|
||||
- assign a log level by using the Level mixin: `DebugLevel`, `InfoLevel`, `WarnLevel`, or `ErrorLevel`
|
||||
- a message()
|
||||
|
||||
Example
|
||||
```
|
||||
class PartialParsingDeletedExposure(DebugLevel):
|
||||
def code(self):
|
||||
return "I049"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Partial parsing: deleted exposure {self.unique_id}"
|
||||
|
||||
```
|
||||
|
||||
|
||||
# Adapter Maintainers
|
||||
To integrate existing log messages from adapters, you likely have a line of code like this in your adapter already:
|
||||
```python
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
```
|
||||
|
||||
Simply change it to these two lines with your adapter's database name, and all your existing call sites will now use the new system for v1.0:
|
||||
|
||||
```python
|
||||
|
||||
from dbt.adapter.events.logging import AdapterLogger
|
||||
|
||||
logger = AdapterLogger("<database name>")
|
||||
# e.g. AdapterLogger("Snowflake")
|
||||
```
|
||||
|
||||
## Compiling types.proto
|
||||
|
||||
After adding a new message in `adapter_types.proto`, either:
|
||||
- In the repository root directory: `make adapter_proto_types`
|
||||
- In the `core/dbt/adapters/events` directory: `protoc -I=. --python_out=. types.proto`
|
||||
0
core/dbt/adapters/events/__init__.py
Normal file
0
core/dbt/adapters/events/__init__.py
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user