mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-23 00:01:28 +00:00
Compare commits
268 Commits
arky/add-p
...
macro-reso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
60f87411d5 | ||
|
|
eb96e3deec | ||
|
|
f68af070f3 | ||
|
|
7ad1accf2b | ||
|
|
ed8f5d38e4 | ||
|
|
7ad6aa18da | ||
|
|
6796edd66e | ||
|
|
e01eb30884 | ||
|
|
ba53f053fd | ||
|
|
b8de881ed3 | ||
|
|
d7d5e2335c | ||
|
|
160d0db238 | ||
|
|
2cee8652a6 | ||
|
|
7f777f8a42 | ||
|
|
00f49206e9 | ||
|
|
1bca662883 | ||
|
|
41ac915949 | ||
|
|
373125ecb8 | ||
|
|
294ad82e50 | ||
|
|
12bd1e87fb | ||
|
|
8bad75c65b | ||
|
|
c836b7585e | ||
|
|
220f56d8d2 | ||
|
|
32fde75504 | ||
|
|
615ad1fe2d | ||
|
|
81236a3dca | ||
|
|
6d834a18ed | ||
|
|
2ab0f7b26b | ||
|
|
9bb970e6ef | ||
|
|
e56a5dae8b | ||
|
|
1c9cec1787 | ||
|
|
4d02ef637b | ||
|
|
19f027b7a7 | ||
|
|
1d0a3e92c8 | ||
|
|
ab90c777d0 | ||
|
|
3902137dfc | ||
|
|
0131feac68 | ||
|
|
017faf4bd1 | ||
|
|
c2f7d75e9e | ||
|
|
51b94b26cc | ||
|
|
e24f9b3da7 | ||
|
|
b58e8e3ffc | ||
|
|
f45b013321 | ||
|
|
e547c0ec64 | ||
|
|
6871fc46b5 | ||
|
|
931b2dbe40 | ||
|
|
bb35b3eb87 | ||
|
|
01d481bc8d | ||
|
|
46b9a1d621 | ||
|
|
839c720e91 | ||
|
|
d88c6987a2 | ||
|
|
4ee950427a | ||
|
|
6c1822f186 | ||
|
|
c7c3ac872c | ||
|
|
c4ff280436 | ||
|
|
7fddd6e448 | ||
|
|
1260782bd2 | ||
|
|
333120b111 | ||
|
|
bb21403c9e | ||
|
|
ac972948b8 | ||
|
|
211392c4a4 | ||
|
|
7317de23a3 | ||
|
|
af916666a2 | ||
|
|
7de8930d1d | ||
|
|
200bcdcd9f | ||
|
|
b9a603e3aa | ||
|
|
1a825484fb | ||
|
|
a2a7b7d795 | ||
|
|
4122f6c308 | ||
|
|
6aeebc4c76 | ||
|
|
f44d704801 | ||
|
|
dbd02e54c2 | ||
|
|
a89642a6f9 | ||
|
|
c141148616 | ||
|
|
469a9aca06 | ||
|
|
98310b6612 | ||
|
|
ef9d6a870f | ||
|
|
35f46dac8c | ||
|
|
efa6339e18 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
26c7675c28 | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
b39eeb328c | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 1.7.0a1
|
current_version = 1.8.0a1
|
||||||
parse = (?P<major>[\d]+) # major version number
|
parse = (?P<major>[\d]+) # major version number
|
||||||
\.(?P<minor>[\d]+) # minor version number
|
\.(?P<minor>[\d]+) # minor version number
|
||||||
\.(?P<patch>[\d]+) # patch version number
|
\.(?P<patch>[\d]+) # patch version number
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Breaking Changes
|
||||||
|
body: Remove adapter.get_compiler interface
|
||||||
|
time: 2023-11-27T11:47:57.443202-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "9148"
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Breaking Changes
|
||||||
|
body: Move AdapterLogger to adapters folder
|
||||||
|
time: 2023-11-28T13:43:56.853925-08:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "9151"
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Breaking Changes
|
||||||
|
body: move event manager setup back to core, remove ref to global EVENT_MANAGER and
|
||||||
|
clean up event manager functions
|
||||||
|
time: 2023-11-30T13:53:48.645192-08:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "9150"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: "Dependencies"
|
|
||||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
|
||||||
time: 2023-06-21T00:57:52.00000Z
|
|
||||||
custom:
|
|
||||||
Author: dependabot[bot]
|
|
||||||
PR: 7912
|
|
||||||
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Begin using DSI 0.4.x
|
||||||
|
time: 2023-10-31T13:19:54.750009-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm peterallenwebb
|
||||||
|
PR: "8892"
|
||||||
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Update typing-extensions version to >=4.4
|
||||||
|
time: 2023-11-06T13:00:51.062386-08:00
|
||||||
|
custom:
|
||||||
|
Author: tlento
|
||||||
|
PR: "9012"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Corrected spelling of "Partiton"
|
|
||||||
time: 2023-07-15T20:09:07.057361092+02:00
|
|
||||||
custom:
|
|
||||||
Author: pgoslatara
|
|
||||||
Issue: "8100"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Remove static SQL codeblock for metrics
|
|
||||||
time: 2023-07-18T19:24:22.155323+02:00
|
|
||||||
custom:
|
|
||||||
Author: marcodamore
|
|
||||||
Issue: "436"
|
|
||||||
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: fix get_custom_database docstring
|
||||||
|
time: 2023-11-06T12:31:57.525711Z
|
||||||
|
custom:
|
||||||
|
Author: LeoTheGriff
|
||||||
|
Issue: "9003"
|
||||||
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: 'Allow adapters to include package logs in dbt standard logging '
|
||||||
|
time: 2023-09-15T12:37:33.862862-07:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "7859"
|
||||||
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add drop_schema_named macro
|
||||||
|
time: 2023-10-17T14:36:20.612289-07:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "8025"
|
||||||
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: migrate utils to common and adapters folders
|
||||||
|
time: 2023-10-26T11:08:21.458709-07:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "8924"
|
||||||
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Move Agate helper client into common
|
||||||
|
time: 2023-10-26T12:35:56.538587-07:00
|
||||||
|
custom:
|
||||||
|
Author: MichelleArk
|
||||||
|
Issue: "8926"
|
||||||
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: remove usage of dbt.config.PartialProject from dbt/adapters
|
||||||
|
time: 2023-10-26T12:39:13.904116-07:00
|
||||||
|
custom:
|
||||||
|
Author: MichelleArk
|
||||||
|
Issue: "8928"
|
||||||
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add exports to SavedQuery spec
|
||||||
|
time: 2023-10-31T13:20:22.448158-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm peterallenwebb
|
||||||
|
Issue: "8892"
|
||||||
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Remove legacy logger
|
||||||
|
time: 2023-11-07T13:56:35.186648-08:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "8027"
|
||||||
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support setting export configs hierarchically via saved query and project configs
|
||||||
|
time: 2023-11-10T15:42:55.042317-08:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "8956"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fixed double-underline
|
|
||||||
time: 2023-06-25T14:27:31.231253719+08:00
|
|
||||||
custom:
|
|
||||||
Author: lllong33
|
|
||||||
Issue: "5301"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Enable converting deprecation warnings to errors
|
|
||||||
time: 2023-07-18T12:55:18.03914-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "8130"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Add status to Parse Inline Error
|
|
||||||
time: 2023-07-20T12:27:23.085084-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "8173"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
|
||||||
time: 2023-07-20T16:15:13.761813-07:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "7694"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Stop detecting materialization macros based on macro name
|
|
||||||
time: 2023-07-20T17:01:12.496238-07:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "6231"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
|
||||||
time: 2023-07-20T17:24:22.969951-07:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "6653"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Improve handling of CTE injection with ephemeral models
|
|
||||||
time: 2023-07-26T10:44:48.888451-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "8213"
|
|
||||||
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: For packages installed with tarball method, fetch metadata to resolve nested dependencies
|
||||||
|
time: 2023-10-13T13:09:43.188308-04:00
|
||||||
|
custom:
|
||||||
|
Author: adamlopez
|
||||||
|
Issue: "8621"
|
||||||
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix partial parsing not working for semantic model change
|
||||||
|
time: 2023-10-16T16:39:53.05058-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8859"
|
||||||
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Handle unknown `type_code` for model contracts
|
||||||
|
time: 2023-10-24T11:01:51.980781-06:00
|
||||||
|
custom:
|
||||||
|
Author: dbeatty10
|
||||||
|
Issue: 8877 8353
|
||||||
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Add back contract enforcement for temporary tables on postgres
|
||||||
|
time: 2023-10-24T14:55:04.051683-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "8857"
|
||||||
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Rework get_catalog implementation to retain previous adapter interface semantics
|
||||||
|
time: 2023-10-24T15:54:00.628086-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "8846"
|
||||||
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Add version to fqn when version==0
|
||||||
|
time: 2023-10-26T00:25:36.259356-05:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "8836"
|
||||||
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix cased comparison in catalog-retrieval function.
|
||||||
|
time: 2023-10-30T09:37:34.258612-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "8939"
|
||||||
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Catalog queries now assign the correct type to materialized views
|
||||||
|
time: 2023-10-31T00:53:45.486203-04:00
|
||||||
|
custom:
|
||||||
|
Author: mikealfare
|
||||||
|
Issue: "8864"
|
||||||
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix compilation exception running empty seed file and support new Integer agate data_type
|
||||||
|
time: 2023-10-31T14:48:37.774871-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "8895"
|
||||||
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Make relation filtering None-tolerant for maximal flexibility across adapters.
|
||||||
|
time: 2023-11-01T15:58:24.552054-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "8974"
|
||||||
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Update run_results.json from previous versions of dbt to support deferral and
|
||||||
|
rerun from failure
|
||||||
|
time: 2023-11-06T15:59:33.677915-05:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6 peterallenwebb
|
||||||
|
Issue: "9010"
|
||||||
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix git repository with subdirectory for Deps
|
||||||
|
time: 2023-11-07T09:23:58.214271-08:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "9000"
|
||||||
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Use MANIFEST.in to recursively include all jinja templates; fixes issue where
|
||||||
|
some templates were not included in the distribution
|
||||||
|
time: 2023-11-07T09:41:30.121733-05:00
|
||||||
|
custom:
|
||||||
|
Author: mikealfare
|
||||||
|
Issue: "9016"
|
||||||
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix formatting of tarball information in packages-lock.yml
|
||||||
|
time: 2023-11-13T11:49:56.437007-08:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx QMalcolm
|
||||||
|
Issue: "9062"
|
||||||
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: 'deps: Lock git packages to commit SHA during resolution'
|
||||||
|
time: 2023-11-27T15:43:10.122069+01:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6
|
||||||
|
Issue: "9050"
|
||||||
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: 'deps: Use PackageRenderer to read package-lock.json'
|
||||||
|
time: 2023-11-27T15:43:47.842423+01:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6
|
||||||
|
Issue: "9127"
|
||||||
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: 'Get sources working again in dbt docs generate'
|
||||||
|
time: 2023-11-28T15:52:25.738256Z
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "9119"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Refactor flaky test pp_versioned_models
|
|
||||||
time: 2023-07-19T12:46:11.972481-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "7781"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: format exception from dbtPlugin.initialize
|
|
||||||
time: 2023-07-19T16:33:34.586377-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "8152"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: A way to control maxBytes for a single dbt.log file
|
|
||||||
time: 2023-07-24T15:06:54.263822-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "8199"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Ref expressions with version can now be processed by the latest version of the
|
|
||||||
high-performance dbt-extractor library.
|
|
||||||
time: 2023-07-25T10:26:09.902878-04:00
|
|
||||||
custom:
|
|
||||||
Author: peterallenwebb
|
|
||||||
Issue: "7688"
|
|
||||||
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Added more type annotations.
|
||||||
|
time: 2023-08-31T16:44:35.737954-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "8537"
|
||||||
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove usage of dbt.include.global_project in dbt/adapters
|
||||||
|
time: 2023-10-26T18:49:53.36449-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8925"
|
||||||
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Add a no-op runner for Saved Qeury
|
||||||
|
time: 2023-10-27T14:00:48.4755-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8893"
|
||||||
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: remove dbt.flags.MP_CONTEXT usage in dbt/adapters
|
||||||
|
time: 2023-11-01T10:27:58.790153-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8967"
|
||||||
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: 'Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters'
|
||||||
|
time: 2023-11-01T17:31:24.974093-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8969"
|
||||||
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Move CatalogRelationTypes test case to the shared test suite to be reused by
|
||||||
|
adapter maintainers
|
||||||
|
time: 2023-11-03T19:52:22.694394-04:00
|
||||||
|
custom:
|
||||||
|
Author: mikealfare
|
||||||
|
Issue: "8952"
|
||||||
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Treat SystemExit as an interrupt if raised during node execution.
|
||||||
|
time: 2023-11-06T08:04:22.022179-05:00
|
||||||
|
custom:
|
||||||
|
Author: benmosher
|
||||||
|
Issue: n/a
|
||||||
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Removing unused 'documentable'
|
||||||
|
time: 2023-11-06T10:57:30.694056-08:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "8871"
|
||||||
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove use of dbt/core exceptions in dbt/adapter
|
||||||
|
time: 2023-11-07T13:57:28.683727-08:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt MichelleArk
|
||||||
|
Issue: "8920"
|
||||||
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Cache dbt plugin modules to improve integration test performance
|
||||||
|
time: 2023-11-07T19:15:46.170151-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "9029"
|
||||||
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock
|
||||||
|
variance
|
||||||
|
time: 2023-11-11T17:53:50.098843-05:00
|
||||||
|
custom:
|
||||||
|
Author: mikealfare
|
||||||
|
Issue: "9057"
|
||||||
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific
|
||||||
|
event types and protos
|
||||||
|
time: 2023-11-16T17:42:51.005023-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: 8927 8918
|
||||||
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Clean up unused adaptor folders
|
||||||
|
time: 2023-11-20T13:47:35.923794-08:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "9123"
|
||||||
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Move column constraints into common/contracts, removing another dependency of
|
||||||
|
adapters on core.
|
||||||
|
time: 2023-11-20T18:32:14.859503-05:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "9024"
|
||||||
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Move dbt.semver to dbt.common.semver and update references.
|
||||||
|
time: 2023-11-28T17:07:32.172421-08:00
|
||||||
|
custom:
|
||||||
|
Author: versusfacit
|
||||||
|
Issue: "9039"
|
||||||
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Move lowercase utils method to common
|
||||||
|
time: 2023-11-30T13:54:32.561673-08:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "9180"
|
||||||
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove usages of dbt.clients.jinja in dbt/adapters
|
||||||
|
time: 2023-12-05T09:35:44.845352+09:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "9205"
|
||||||
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove usage of dbt.contracts in dbt/adapters
|
||||||
|
time: 2023-12-05T12:05:59.936775+09:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "9208"
|
||||||
6
.changes/unreleased/Under the Hood-20231205-165812.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-165812.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters
|
||||||
|
time: 2023-12-05T16:58:12.932172+09:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "9214"
|
||||||
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Introduce RelationConfig Protocol, consolidate Relation.create_from
|
||||||
|
time: 2023-12-05T17:07:25.33861+09:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "9215"
|
||||||
2
.flake8
2
.flake8
@@ -10,3 +10,5 @@ ignore =
|
|||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test/
|
exclude = test/
|
||||||
|
per-file-ignores =
|
||||||
|
*/__init__.py: F401
|
||||||
|
|||||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
|||||||
core/dbt/include/index.html binary
|
core/dbt/task/docs/index.html binary
|
||||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||||
core/dbt/docs/build/html/searchindex.js binary
|
core/dbt/docs/build/html/searchindex.js binary
|
||||||
core/dbt/docs/build/html/index.html binary
|
core/dbt/docs/build/html/index.html binary
|
||||||
|
|||||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -13,23 +13,6 @@
|
|||||||
# the core team as a whole will be assigned
|
# the core team as a whole will be assigned
|
||||||
* @dbt-labs/core-team
|
* @dbt-labs/core-team
|
||||||
|
|
||||||
### OSS Tooling Guild
|
|
||||||
|
|
||||||
/.github/ @dbt-labs/guild-oss-tooling
|
|
||||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
|
||||||
|
|
||||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
|
||||||
|
|
||||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
|
||||||
pytest.ini @dbt-labs/guild-oss-tooling
|
|
||||||
tox.ini @dbt-labs/guild-oss-tooling
|
|
||||||
|
|
||||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
|
||||||
requirements.txt @dbt-labs/guild-oss-tooling
|
|
||||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
|
||||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
|
||||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
|
||||||
|
|
||||||
### ADAPTERS
|
### ADAPTERS
|
||||||
|
|
||||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||||
@@ -40,7 +23,7 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
|||||||
|
|
||||||
# Postgres plugin
|
# Postgres plugin
|
||||||
/plugins/ @dbt-labs/core-adapters
|
/plugins/ @dbt-labs/core-adapters
|
||||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
/plugins/postgres/setup.py @dbt-labs/core-adapters
|
||||||
|
|
||||||
# Functional tests for adapter plugins
|
# Functional tests for adapter plugins
|
||||||
/tests/adapter @dbt-labs/core-adapters
|
/tests/adapter @dbt-labs/core-adapters
|
||||||
|
|||||||
24
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
24
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
@@ -1,7 +1,7 @@
|
|||||||
name: 🛠️ Implementation
|
name: 🛠️ Implementation
|
||||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||||
title: "[<project>] <title>"
|
title: "[<project>] <title>"
|
||||||
labels: ["user_docs"]
|
labels: ["user docs"]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,7 +11,7 @@ body:
|
|||||||
label: Housekeeping
|
label: Housekeeping
|
||||||
description: >
|
description: >
|
||||||
A couple friendly reminders:
|
A couple friendly reminders:
|
||||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||||
options:
|
options:
|
||||||
- label: I am a maintainer of dbt-core
|
- label: I am a maintainer of dbt-core
|
||||||
@@ -25,11 +25,29 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Acceptance critera
|
label: Acceptance criteria
|
||||||
description: |
|
description: |
|
||||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Impact to Other Teams
|
||||||
|
description: |
|
||||||
|
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||||
|
placeholder: |
|
||||||
|
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Will backports be required?
|
||||||
|
description: |
|
||||||
|
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||||
|
placeholder: |
|
||||||
|
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Context
|
label: Context
|
||||||
|
|||||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -28,3 +28,10 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
rebase-strategy: "disabled"
|
rebase-strategy: "disabled"
|
||||||
|
|
||||||
|
# github dependencies
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
rebase-strategy: "disabled"
|
||||||
|
|||||||
8
.github/pull_request_template.md
vendored
8
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
|||||||
resolves #
|
resolves #
|
||||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above if applicable.
|
Include the number of the issue addressed by this PR above if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
Include the number of the docs issue that was opened for this PR. If
|
Add the `user docs` label to this PR if it will need docs changes. An
|
||||||
this change has no user-facing implications, "N/A" suffices instead. New
|
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||||
docs tickets can be created by clicking the link above or by going to
|
|
||||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Problem
|
### Problem
|
||||||
@@ -33,3 +30,4 @@ resolves #
|
|||||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||||
|
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||||
|
|||||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
|||||||
# Checks that a file has been committed under the /.changes directory
|
# Checks that a file has been committed under the /.changes directory
|
||||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||||
# it is dynamically generated by change type and timestamp.
|
# it is dynamically generated by change type and timestamp.
|
||||||
# This workflow should not require any secrets since it runs for PRs
|
# This workflow runs on pull_request_target because it requires
|
||||||
# from forked repos.
|
# secrets to post comments.
|
||||||
# By default, secrets are not passed to workflows running from
|
|
||||||
# a forked repo.
|
|
||||||
|
|
||||||
# **why?**
|
# **why?**
|
||||||
# Ensure code change gets reflected in the CHANGELOG.
|
# Ensure code change gets reflected in the CHANGELOG.
|
||||||
@@ -19,7 +17,7 @@
|
|||||||
name: Check Changelog Entry
|
name: Check Changelog Entry
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request_target:
|
||||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|||||||
43
.github/workflows/docs-issue.yml
vendored
Normal file
43
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# **what?**
|
||||||
|
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# To reduce barriers for keeping docs up to date
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||||
|
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||||
|
|
||||||
|
|
||||||
|
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||||
|
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled, closed]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write # opens new issues
|
||||||
|
pull-requests: write # comments on PRs
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
open_issues:
|
||||||
|
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||||
|
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||||
|
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||||
|
# decide if it should run or not.
|
||||||
|
if: |
|
||||||
|
(github.event.pull_request.merged == true) &&
|
||||||
|
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||||
|
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||||
|
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||||
|
with:
|
||||||
|
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||||
|
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||||
|
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||||
|
secrets: inherit
|
||||||
26
.github/workflows/main.yml
vendored
26
.github/workflows/main.yml
vendored
@@ -36,7 +36,7 @@ defaults:
|
|||||||
# top-level adjustments can be made here
|
# top-level adjustments can be made here
|
||||||
env:
|
env:
|
||||||
# number of parallel processes to spawn for python integration testing
|
# number of parallel processes to spawn for python integration testing
|
||||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
code-quality:
|
code-quality:
|
||||||
@@ -108,8 +108,9 @@ jobs:
|
|||||||
- name: Upload Unit Test Coverage to Codecov
|
- name: Upload Unit Test Coverage to Codecov
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v3
|
||||||
env:
|
with:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
flags: unit
|
||||||
|
|
||||||
integration-metadata:
|
integration-metadata:
|
||||||
name: integration test metadata generation
|
name: integration test metadata generation
|
||||||
@@ -221,17 +222,26 @@ jobs:
|
|||||||
- name: Upload Integration Test Coverage to Codecov
|
- name: Upload Integration Test Coverage to Codecov
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
if: ${{ matrix.python-version == '3.11' }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v3
|
||||||
env:
|
with:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
flags: integration
|
||||||
|
|
||||||
integration-report:
|
integration-report:
|
||||||
name: integration test suite
|
if: ${{ always() }}
|
||||||
|
name: Integration Test Suite
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: integration
|
needs: integration
|
||||||
steps:
|
steps:
|
||||||
- name: "[Notification] Integration test suite passes"
|
- name: "Integration Tests Failed"
|
||||||
|
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||||
|
# when this is true the next step won't execute
|
||||||
run: |
|
run: |
|
||||||
echo "::notice title="Integration test suite passes""
|
echo "::notice title='Integration test suite failed'"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: "Integration Tests Passed"
|
||||||
|
run: |
|
||||||
|
echo "::notice title='Integration test suite passed'"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: build packages
|
name: build packages
|
||||||
|
|||||||
6
.github/workflows/release-docker.yml
vendored
6
.github/workflows/release-docker.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
|||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
file: docker/Dockerfile
|
file: docker/Dockerfile
|
||||||
push: True
|
push: True
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||||
|
|
||||||
- name: Build and push MINOR.latest tag
|
- name: Build and push MINOR.latest tag
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||||
with:
|
with:
|
||||||
file: docker/Dockerfile
|
file: docker/Dockerfile
|
||||||
@@ -106,7 +106,7 @@ jobs:
|
|||||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||||
|
|
||||||
- name: Build and push latest tag
|
- name: Build and push latest tag
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||||
with:
|
with:
|
||||||
file: docker/Dockerfile
|
file: docker/Dockerfile
|
||||||
|
|||||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# **what?**
|
||||||
|
# Cleanup branches left over from automation and testing. Also cleanup
|
||||||
|
# draft releases from release testing.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# The automations are leaving behind branches and releases that clutter
|
||||||
|
# the repository. Sometimes we need them to debug processes so we don't
|
||||||
|
# want them immediately deleted. Running on Saturday to avoid running
|
||||||
|
# at the same time as an actual release to prevent breaking a release
|
||||||
|
# mid-release.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# Mainly on a schedule of 12:00 Saturday.
|
||||||
|
# Manual trigger can also run on demand
|
||||||
|
|
||||||
|
name: Repository Cleanup
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||||
|
|
||||||
|
workflow_dispatch: # for manual triggering
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cleanup-repo:
|
||||||
|
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||||
|
secrets: inherit
|
||||||
@@ -21,7 +21,7 @@ permissions: read-all
|
|||||||
# top-level adjustments can be made here
|
# top-level adjustments can be made here
|
||||||
env:
|
env:
|
||||||
# number of parallel processes to spawn for python testing
|
# number of parallel processes to spawn for python testing
|
||||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
integration-metadata:
|
integration-metadata:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||||
|
|
||||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||||
|
|
||||||
# Force all unspecified python hooks to run python 3.8
|
# Force all unspecified python hooks to run python 3.8
|
||||||
default_language_version:
|
default_language_version:
|
||||||
@@ -37,7 +37,7 @@ repos:
|
|||||||
alias: flake8-check
|
alias: flake8-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: v1.4.0
|
rev: v1.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
# N.B.: Mypy is... a bit fragile.
|
# N.B.: Mypy is... a bit fragile.
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ Legacy tests are found in the 'test' directory:
|
|||||||
|
|
||||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||||
|
|
||||||
core/dbt/include/index.html
|
core/dbt/task/docs/index.html
|
||||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||||
|
|
||||||
## Adapters
|
## Adapters
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||||
|
|||||||
11
Makefile
11
Makefile
@@ -40,7 +40,16 @@ dev: dev_req ## Installs dbt-* packages in develop mode along with development d
|
|||||||
|
|
||||||
.PHONY: proto_types
|
.PHONY: proto_types
|
||||||
proto_types: ## generates google protobuf python file from types.proto
|
proto_types: ## generates google protobuf python file from types.proto
|
||||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
protoc -I=./core/dbt/common/events --python_out=./core/dbt/common/events ./core/dbt/common/events/types.proto
|
||||||
|
|
||||||
|
.PHONY: core_proto_types
|
||||||
|
core_proto_types: ## generates google protobuf python file from core_types.proto
|
||||||
|
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
||||||
|
|
||||||
|
.PHONY: adapter_proto_types
|
||||||
|
adapter_proto_types: ## generates google protobuf python file from core_types.proto
|
||||||
|
protoc -I=./core/dbt/adapters/events --python_out=./core/dbt/adapters/events ./core/dbt/adapters/events/adapter_types.proto
|
||||||
|
|
||||||
|
|
||||||
.PHONY: mypy
|
.PHONY: mypy
|
||||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||||
|
|||||||
13
codecov.yml
13
codecov.yml
@@ -0,0 +1,13 @@
|
|||||||
|
ignore:
|
||||||
|
- ".github"
|
||||||
|
- ".changes"
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: auto
|
||||||
|
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||||
|
patch:
|
||||||
|
default:
|
||||||
|
target: auto
|
||||||
|
threshold: 80%
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# these are all just exports, #noqa them so flake8 will be happy
|
# these are all just exports, #noqa them so flake8 will be happy
|
||||||
|
|
||||||
# TODO: Should we still include this in the `adapters` namespace?
|
# TODO: Should we still include this in the `adapters` namespace?
|
||||||
from dbt.contracts.connection import Credentials # noqa: F401
|
from dbt.adapters.contracts.connection import Credentials # noqa: F401
|
||||||
from dbt.adapters.base.meta import available # noqa: F401
|
from dbt.adapters.base.meta import available # noqa: F401
|
||||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||||
from dbt.adapters.base.relation import ( # noqa: F401
|
from dbt.adapters.base.relation import ( # noqa: F401
|
||||||
|
|||||||
@@ -2,17 +2,17 @@ from dataclasses import dataclass
|
|||||||
import re
|
import re
|
||||||
from typing import Dict, ClassVar, Any, Optional
|
from typing import Dict, ClassVar, Any, Optional
|
||||||
|
|
||||||
from dbt.exceptions import DbtRuntimeError
|
from dbt.common.exceptions import DbtRuntimeError
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Column:
|
class Column:
|
||||||
|
# Note: This is automatically used by contract code
|
||||||
|
# No-op conversions (INTEGER => INT) have been removed.
|
||||||
|
# Any adapter that wants to take advantage of "translate_type"
|
||||||
|
# should create a ClassVar with the appropriate conversions.
|
||||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||||
"STRING": "TEXT",
|
"STRING": "TEXT",
|
||||||
"TIMESTAMP": "TIMESTAMP",
|
|
||||||
"FLOAT": "FLOAT",
|
|
||||||
"INTEGER": "INT",
|
|
||||||
"BOOLEAN": "BOOLEAN",
|
|
||||||
}
|
}
|
||||||
column: str
|
column: str
|
||||||
dtype: str
|
dtype: str
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import traceback
|
|||||||
|
|
||||||
# multiprocessing.RLock is a function returning this type
|
# multiprocessing.RLock is a function returning this type
|
||||||
from multiprocessing.synchronize import RLock
|
from multiprocessing.synchronize import RLock
|
||||||
|
from multiprocessing.context import SpawnContext
|
||||||
from threading import get_ident
|
from threading import get_ident
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
@@ -23,8 +24,9 @@ from typing import (
|
|||||||
|
|
||||||
import agate
|
import agate
|
||||||
|
|
||||||
import dbt.exceptions
|
import dbt.adapters.exceptions
|
||||||
from dbt.contracts.connection import (
|
import dbt.common.exceptions.base
|
||||||
|
from dbt.adapters.contracts.connection import (
|
||||||
Connection,
|
Connection,
|
||||||
Identifier,
|
Identifier,
|
||||||
ConnectionState,
|
ConnectionState,
|
||||||
@@ -36,9 +38,9 @@ from dbt.contracts.graph.manifest import Manifest
|
|||||||
from dbt.adapters.base.query_headers import (
|
from dbt.adapters.base.query_headers import (
|
||||||
MacroQueryStringSetter,
|
MacroQueryStringSetter,
|
||||||
)
|
)
|
||||||
from dbt.events import AdapterLogger
|
from dbt.adapters.events.logging import AdapterLogger
|
||||||
from dbt.events.functions import fire_event
|
from dbt.common.events.functions import fire_event
|
||||||
from dbt.events.types import (
|
from dbt.adapters.events.types import (
|
||||||
NewConnection,
|
NewConnection,
|
||||||
ConnectionReused,
|
ConnectionReused,
|
||||||
ConnectionLeftOpenInCleanup,
|
ConnectionLeftOpenInCleanup,
|
||||||
@@ -48,9 +50,8 @@ from dbt.events.types import (
|
|||||||
Rollback,
|
Rollback,
|
||||||
RollbackFailed,
|
RollbackFailed,
|
||||||
)
|
)
|
||||||
from dbt.events.contextvars import get_node_info
|
from dbt.common.events.contextvars import get_node_info
|
||||||
from dbt import flags
|
from dbt.common.utils import cast_to_str
|
||||||
from dbt.utils import cast_to_str
|
|
||||||
|
|
||||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||||
@@ -72,10 +73,10 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
|
|
||||||
TYPE: str = NotImplemented
|
TYPE: str = NotImplemented
|
||||||
|
|
||||||
def __init__(self, profile: AdapterRequiredConfig):
|
def __init__(self, profile: AdapterRequiredConfig, mp_context: SpawnContext) -> None:
|
||||||
self.profile = profile
|
self.profile = profile
|
||||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
self.lock: RLock = mp_context.RLock()
|
||||||
self.query_header: Optional[MacroQueryStringSetter] = None
|
self.query_header: Optional[MacroQueryStringSetter] = None
|
||||||
|
|
||||||
def set_query_header(self, manifest: Manifest) -> None:
|
def set_query_header(self, manifest: Manifest) -> None:
|
||||||
@@ -91,13 +92,15 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
key = self.get_thread_identifier()
|
key = self.get_thread_identifier()
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if key not in self.thread_connections:
|
if key not in self.thread_connections:
|
||||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
raise dbt.adapters.exceptions.InvalidConnectionError(
|
||||||
|
key, list(self.thread_connections)
|
||||||
|
)
|
||||||
return self.thread_connections[key]
|
return self.thread_connections[key]
|
||||||
|
|
||||||
def set_thread_connection(self, conn: Connection) -> None:
|
def set_thread_connection(self, conn: Connection) -> None:
|
||||||
key = self.get_thread_identifier()
|
key = self.get_thread_identifier()
|
||||||
if key in self.thread_connections:
|
if key in self.thread_connections:
|
||||||
raise dbt.exceptions.DbtInternalError(
|
raise dbt.common.exceptions.DbtInternalError(
|
||||||
"In set_thread_connection, existing connection exists for {}"
|
"In set_thread_connection, existing connection exists for {}"
|
||||||
)
|
)
|
||||||
self.thread_connections[key] = conn
|
self.thread_connections[key] = conn
|
||||||
@@ -137,13 +140,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
:return: A context manager that handles exceptions raised by the
|
:return: A context manager that handles exceptions raised by the
|
||||||
underlying database.
|
underlying database.
|
||||||
"""
|
"""
|
||||||
raise dbt.exceptions.NotImplementedError(
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
"`exception_handler` is not implemented for this adapter!"
|
"`exception_handler` is not implemented for this adapter!"
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||||
'connection_named', called by 'connection_for(node)'.
|
'connection_named'.
|
||||||
Creates a connection for this thread if one doesn't already
|
Creates a connection for this thread if one doesn't already
|
||||||
exist, and will rename an existing connection."""
|
exist, and will rename an existing connection."""
|
||||||
|
|
||||||
@@ -220,14 +223,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||||
is a Callable. This parameter should not be set by the initial caller.
|
is a Callable. This parameter should not be set by the initial caller.
|
||||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
:raises dbt.adapters.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||||
successfully acquiring a handle.
|
successfully acquiring a handle.
|
||||||
:return: The given connection with its appropriate state and handle attributes set
|
:return: The given connection with its appropriate state and handle attributes set
|
||||||
depending on whether we successfully acquired a handle or not.
|
depending on whether we successfully acquired a handle or not.
|
||||||
"""
|
"""
|
||||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||||
if timeout < 0:
|
if timeout < 0:
|
||||||
raise dbt.exceptions.FailedToConnectError(
|
raise dbt.adapters.exceptions.FailedToConnectError(
|
||||||
"retry_timeout cannot be negative or return a negative time."
|
"retry_timeout cannot be negative or return a negative time."
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -235,7 +238,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||||
connection.handle = None
|
connection.handle = None
|
||||||
connection.state = ConnectionState.FAIL
|
connection.state = ConnectionState.FAIL
|
||||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
raise dbt.adapters.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
connection.handle = connect()
|
connection.handle = connect()
|
||||||
@@ -246,7 +249,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
if retry_limit <= 0:
|
if retry_limit <= 0:
|
||||||
connection.handle = None
|
connection.handle = None
|
||||||
connection.state = ConnectionState.FAIL
|
connection.state = ConnectionState.FAIL
|
||||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||||
@@ -268,12 +271,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
connection.handle = None
|
connection.handle = None
|
||||||
connection.state = ConnectionState.FAIL
|
connection.state = ConnectionState.FAIL
|
||||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def cancel_open(self) -> Optional[List[str]]:
|
def cancel_open(self) -> Optional[List[str]]:
|
||||||
"""Cancel all open connections on the adapter. (passable)"""
|
"""Cancel all open connections on the adapter. (passable)"""
|
||||||
raise dbt.exceptions.NotImplementedError(
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
"`cancel_open` is not implemented for this adapter!"
|
"`cancel_open` is not implemented for this adapter!"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -288,7 +291,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
This should be thread-safe, or hold the lock if necessary. The given
|
This should be thread-safe, or hold the lock if necessary. The given
|
||||||
connection should not be in either in_use or available.
|
connection should not be in either in_use or available.
|
||||||
"""
|
"""
|
||||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
|
"`open` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
def release(self) -> None:
|
def release(self) -> None:
|
||||||
with self.lock:
|
with self.lock:
|
||||||
@@ -320,12 +325,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def begin(self) -> None:
|
def begin(self) -> None:
|
||||||
"""Begin a transaction. (passable)"""
|
"""Begin a transaction. (passable)"""
|
||||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
|
"`begin` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def commit(self) -> None:
|
def commit(self) -> None:
|
||||||
"""Commit a transaction. (passable)"""
|
"""Commit a transaction. (passable)"""
|
||||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
|
"`commit` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _rollback_handle(cls, connection: Connection) -> None:
|
def _rollback_handle(cls, connection: Connection) -> None:
|
||||||
@@ -361,7 +370,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
def _rollback(cls, connection: Connection) -> None:
|
def _rollback(cls, connection: Connection) -> None:
|
||||||
"""Roll back the given connection."""
|
"""Roll back the given connection."""
|
||||||
if connection.transaction_open is False:
|
if connection.transaction_open is False:
|
||||||
raise dbt.exceptions.DbtInternalError(
|
raise dbt.common.exceptions.DbtInternalError(
|
||||||
f"Tried to rollback transaction on connection "
|
f"Tried to rollback transaction on connection "
|
||||||
f'"{connection.name}", but it does not have one open!'
|
f'"{connection.name}", but it does not have one open!'
|
||||||
)
|
)
|
||||||
@@ -400,7 +409,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def execute(
|
def execute(
|
||||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||||
) -> Tuple[AdapterResponse, agate.Table]:
|
) -> Tuple[AdapterResponse, agate.Table]:
|
||||||
"""Execute the given SQL.
|
"""Execute the given SQL.
|
||||||
|
|
||||||
@@ -408,7 +417,30 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||||
transaction, automatically begin one.
|
transaction, automatically begin one.
|
||||||
:param bool fetch: If set, fetch results.
|
:param bool fetch: If set, fetch results.
|
||||||
|
:param int limit: If set, limits the result set
|
||||||
:return: A tuple of the query status and results (empty if fetch=False).
|
:return: A tuple of the query status and results (empty if fetch=False).
|
||||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||||
"""
|
"""
|
||||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
|
"`execute` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||||
|
"""
|
||||||
|
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||||
|
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||||
|
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||||
|
|
||||||
|
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||||
|
"""
|
||||||
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
|
"`add_select_query` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||||
|
"""Get the string representation of the data type from the type_code."""
|
||||||
|
# https://peps.python.org/pep-0249/#type-objects
|
||||||
|
raise dbt.common.exceptions.base.NotImplementedError(
|
||||||
|
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from typing import (
|
|||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
Iterable,
|
|
||||||
Iterator,
|
Iterator,
|
||||||
List,
|
List,
|
||||||
Mapping,
|
Mapping,
|
||||||
@@ -17,39 +16,55 @@ from typing import (
|
|||||||
Set,
|
Set,
|
||||||
Tuple,
|
Tuple,
|
||||||
Type,
|
Type,
|
||||||
|
TypedDict,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
from multiprocessing.context import SpawnContext
|
||||||
|
|
||||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
from dbt.adapters.capability import Capability, CapabilityDict
|
||||||
|
from dbt.common.contracts.constraints import (
|
||||||
|
ColumnLevelConstraint,
|
||||||
|
ConstraintType,
|
||||||
|
ModelLevelConstraint,
|
||||||
|
)
|
||||||
|
from dbt.adapters.contracts.macros import MacroResolver
|
||||||
|
|
||||||
import agate
|
import agate
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from dbt.exceptions import (
|
from dbt.adapters.exceptions import (
|
||||||
|
SnapshotTargetIncompleteError,
|
||||||
|
SnapshotTargetNotSnapshotTableError,
|
||||||
|
NullRelationDropAttemptedError,
|
||||||
|
NullRelationCacheAttemptedError,
|
||||||
|
RelationReturnedMultipleResultsError,
|
||||||
|
UnexpectedNonTimestampError,
|
||||||
|
RenameToNoneAttemptedError,
|
||||||
|
QuoteConfigTypeError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from dbt.common.exceptions import (
|
||||||
|
NotImplementedError,
|
||||||
DbtInternalError,
|
DbtInternalError,
|
||||||
DbtRuntimeError,
|
DbtRuntimeError,
|
||||||
DbtValidationError,
|
DbtValidationError,
|
||||||
|
UnexpectedNullError,
|
||||||
MacroArgTypeError,
|
MacroArgTypeError,
|
||||||
MacroResultError,
|
MacroResultError,
|
||||||
NotImplementedError,
|
|
||||||
NullRelationCacheAttemptedError,
|
|
||||||
NullRelationDropAttemptedError,
|
|
||||||
QuoteConfigTypeError,
|
|
||||||
RelationReturnedMultipleResultsError,
|
|
||||||
RenameToNoneAttemptedError,
|
|
||||||
SnapshotTargetIncompleteError,
|
|
||||||
SnapshotTargetNotSnapshotTableError,
|
|
||||||
UnexpectedNonTimestampError,
|
|
||||||
UnexpectedNullError,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
from dbt.adapters.protocol import AdapterConfig
|
||||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
from dbt.common.clients.agate_helper import (
|
||||||
from dbt.clients.jinja import MacroGenerator
|
empty_table,
|
||||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
get_column_value_uncased,
|
||||||
from dbt.contracts.graph.nodes import ResultNode
|
merge_tables,
|
||||||
from dbt.events.functions import fire_event, warn_or_error
|
table_from_rows,
|
||||||
from dbt.events.types import (
|
Integer,
|
||||||
|
)
|
||||||
|
from dbt.common.clients.jinja import CallableMacroGenerator
|
||||||
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.common.events.functions import fire_event, warn_or_error
|
||||||
|
from dbt.adapters.events.types import (
|
||||||
CacheMiss,
|
CacheMiss,
|
||||||
ListRelations,
|
ListRelations,
|
||||||
CodeExecution,
|
CodeExecution,
|
||||||
@@ -58,9 +73,9 @@ from dbt.events.types import (
|
|||||||
ConstraintNotSupported,
|
ConstraintNotSupported,
|
||||||
ConstraintNotEnforced,
|
ConstraintNotEnforced,
|
||||||
)
|
)
|
||||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
from dbt.common.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||||
|
|
||||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
|
||||||
from dbt.adapters.base.meta import AdapterMeta, available
|
from dbt.adapters.base.meta import AdapterMeta, available
|
||||||
from dbt.adapters.base.relation import (
|
from dbt.adapters.base.relation import (
|
||||||
ComponentName,
|
ComponentName,
|
||||||
@@ -71,10 +86,13 @@ from dbt.adapters.base.relation import (
|
|||||||
from dbt.adapters.base import Column as BaseColumn
|
from dbt.adapters.base import Column as BaseColumn
|
||||||
from dbt.adapters.base import Credentials
|
from dbt.adapters.base import Credentials
|
||||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||||
from dbt import deprecations
|
from dbt.adapters.events.types import CollectFreshnessReturnSignature
|
||||||
|
|
||||||
|
|
||||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||||
|
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
|
||||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||||
|
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
|
||||||
|
|
||||||
|
|
||||||
class ConstraintSupport(str, Enum):
|
class ConstraintSupport(str, Enum):
|
||||||
@@ -109,7 +127,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
|||||||
return test
|
return test
|
||||||
|
|
||||||
|
|
||||||
def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
|
def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
|
||||||
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
||||||
assume the datetime is already for UTC and add the timezone.
|
assume the datetime is already for UTC and add the timezone.
|
||||||
"""
|
"""
|
||||||
@@ -161,6 +179,12 @@ class PythonJobHelper:
|
|||||||
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
||||||
|
|
||||||
|
|
||||||
|
class FreshnessResponse(TypedDict):
|
||||||
|
max_loaded_at: datetime
|
||||||
|
snapshotted_at: datetime
|
||||||
|
age: float # age in seconds
|
||||||
|
|
||||||
|
|
||||||
class BaseAdapter(metaclass=AdapterMeta):
|
class BaseAdapter(metaclass=AdapterMeta):
|
||||||
"""The BaseAdapter provides an abstract base class for adapters.
|
"""The BaseAdapter provides an abstract base class for adapters.
|
||||||
|
|
||||||
@@ -208,7 +232,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
|
|
||||||
Relation: Type[BaseRelation] = BaseRelation
|
Relation: Type[BaseRelation] = BaseRelation
|
||||||
Column: Type[BaseColumn] = BaseColumn
|
Column: Type[BaseColumn] = BaseColumn
|
||||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
ConnectionManager: Type[BaseConnectionManager]
|
||||||
|
|
||||||
# A set of clobber config fields accepted by this adapter
|
# A set of clobber config fields accepted by this adapter
|
||||||
# for use in materializations
|
# for use in materializations
|
||||||
@@ -222,11 +246,28 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, config):
|
# This static member variable can be overriden in concrete adapter
|
||||||
|
# implementations to indicate adapter support for optional capabilities.
|
||||||
|
_capabilities = CapabilityDict({})
|
||||||
|
|
||||||
|
def __init__(self, config, mp_context: SpawnContext) -> None:
|
||||||
self.config = config
|
self.config = config
|
||||||
self.cache = RelationsCache()
|
self.cache = RelationsCache(log_cache_events=config.log_cache_events)
|
||||||
self.connections = self.ConnectionManager(config)
|
self.connections = self.ConnectionManager(config, mp_context)
|
||||||
self._macro_manifest_lazy: Optional[MacroManifest] = None
|
self._macro_resolver: Optional[MacroResolver] = None
|
||||||
|
|
||||||
|
###
|
||||||
|
# Methods to set / access a macro resolver
|
||||||
|
###
|
||||||
|
def set_macro_resolver(self, macro_resolver: MacroResolver) -> None:
|
||||||
|
self._macro_resolver = macro_resolver
|
||||||
|
|
||||||
|
def get_macro_resolver(self) -> Optional[MacroResolver]:
|
||||||
|
return self._macro_resolver
|
||||||
|
|
||||||
|
def clear_macro_resolver(self) -> None:
|
||||||
|
if self._macro_resolver is not None:
|
||||||
|
self._macro_resolver = None
|
||||||
|
|
||||||
###
|
###
|
||||||
# Methods that pass through to the connection manager
|
# Methods that pass through to the connection manager
|
||||||
@@ -256,10 +297,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
return conn.name
|
return conn.name
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
|
def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]:
|
||||||
try:
|
try:
|
||||||
if self.connections.query_header is not None:
|
if self.connections.query_header is not None:
|
||||||
self.connections.query_header.set(name, node)
|
self.connections.query_header.set(name, query_header_context)
|
||||||
self.acquire_connection(name)
|
self.acquire_connection(name)
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
@@ -267,11 +308,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
if self.connections.query_header is not None:
|
if self.connections.query_header is not None:
|
||||||
self.connections.query_header.reset()
|
self.connections.query_header.reset()
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def connection_for(self, node: ResultNode) -> Iterator[None]:
|
|
||||||
with self.connection_named(node.unique_id, node):
|
|
||||||
yield
|
|
||||||
|
|
||||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||||
def execute(
|
def execute(
|
||||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||||
@@ -315,14 +351,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
|
|
||||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
"""
|
||||||
|
TODO: Can we move this to dbt-bigquery?
|
||||||
|
Obtain partitions metadata for a BigQuery partitioned table.
|
||||||
|
|
||||||
:param str table_id: a partitioned table id, in standard SQL format.
|
:param str table: a partitioned table id, in standard SQL format.
|
||||||
:return: a partition metadata tuple, as described in
|
:return: a partition metadata tuple, as described in
|
||||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||||
:rtype: agate.Table
|
:rtype: agate.Table
|
||||||
"""
|
"""
|
||||||
|
if hasattr(self.connections, "get_partitions_metadata"):
|
||||||
return self.connections.get_partitions_metadata(table=table)
|
return self.connections.get_partitions_metadata(table=table)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||||
|
)
|
||||||
|
|
||||||
###
|
###
|
||||||
# Methods that should never be overridden
|
# Methods that should never be overridden
|
||||||
@@ -337,39 +380,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
"""
|
"""
|
||||||
return cls.ConnectionManager.TYPE
|
return cls.ConnectionManager.TYPE
|
||||||
|
|
||||||
@property
|
|
||||||
def _macro_manifest(self) -> MacroManifest:
|
|
||||||
if self._macro_manifest_lazy is None:
|
|
||||||
return self.load_macro_manifest()
|
|
||||||
return self._macro_manifest_lazy
|
|
||||||
|
|
||||||
def check_macro_manifest(self) -> Optional[MacroManifest]:
|
|
||||||
"""Return the internal manifest (used for executing macros) if it's
|
|
||||||
been initialized, otherwise return None.
|
|
||||||
"""
|
|
||||||
return self._macro_manifest_lazy
|
|
||||||
|
|
||||||
def load_macro_manifest(self, base_macros_only=False) -> MacroManifest:
|
|
||||||
# base_macros_only is for the test framework
|
|
||||||
if self._macro_manifest_lazy is None:
|
|
||||||
# avoid a circular import
|
|
||||||
from dbt.parser.manifest import ManifestLoader
|
|
||||||
|
|
||||||
manifest = ManifestLoader.load_macros(
|
|
||||||
self.config,
|
|
||||||
self.connections.set_query_header,
|
|
||||||
base_macros_only=base_macros_only,
|
|
||||||
)
|
|
||||||
# TODO CT-211
|
|
||||||
self._macro_manifest_lazy = manifest # type: ignore[assignment]
|
|
||||||
# TODO CT-211
|
|
||||||
return self._macro_manifest_lazy # type: ignore[return-value]
|
|
||||||
|
|
||||||
def clear_macro_manifest(self):
|
|
||||||
if self._macro_manifest_lazy is not None:
|
|
||||||
self._macro_manifest_lazy = None
|
|
||||||
|
|
||||||
###
|
|
||||||
# Caching methods
|
# Caching methods
|
||||||
###
|
###
|
||||||
def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
|
def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
|
||||||
@@ -393,7 +403,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
"""
|
"""
|
||||||
# the cache only cares about executable nodes
|
# the cache only cares about executable nodes
|
||||||
return {
|
return {
|
||||||
self.Relation.create_from(self.config, node).without_identifier()
|
self.Relation.create_from(self.config, node).without_identifier() # type: ignore[arg-type]
|
||||||
for node in manifest.nodes.values()
|
for node in manifest.nodes.values()
|
||||||
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
|
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
|
||||||
}
|
}
|
||||||
@@ -408,7 +418,30 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
lowercase strings.
|
lowercase strings.
|
||||||
"""
|
"""
|
||||||
info_schema_name_map = SchemaSearchMap()
|
info_schema_name_map = SchemaSearchMap()
|
||||||
nodes: Iterator[ResultNode] = chain(
|
relations = self._get_catalog_relations(manifest)
|
||||||
|
for relation in relations:
|
||||||
|
info_schema_name_map.add(relation)
|
||||||
|
# result is a map whose keys are information_schema Relations without
|
||||||
|
# identifiers that have appropriate database prefixes, and whose values
|
||||||
|
# are sets of lowercase schema names that are valid members of those
|
||||||
|
# databases
|
||||||
|
return info_schema_name_map
|
||||||
|
|
||||||
|
def _get_catalog_relations_by_info_schema(
|
||||||
|
self, relations
|
||||||
|
) -> Dict[InformationSchema, List[BaseRelation]]:
|
||||||
|
relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
|
||||||
|
for relation in relations:
|
||||||
|
info_schema = relation.information_schema_only()
|
||||||
|
if info_schema not in relations_by_info_schema:
|
||||||
|
relations_by_info_schema[info_schema] = []
|
||||||
|
relations_by_info_schema[info_schema].append(relation)
|
||||||
|
|
||||||
|
return relations_by_info_schema
|
||||||
|
|
||||||
|
def _get_catalog_relations(self, manifest: Manifest) -> List[BaseRelation]:
|
||||||
|
|
||||||
|
nodes = chain(
|
||||||
[
|
[
|
||||||
node
|
node
|
||||||
for node in manifest.nodes.values()
|
for node in manifest.nodes.values()
|
||||||
@@ -416,14 +449,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
],
|
],
|
||||||
manifest.sources.values(),
|
manifest.sources.values(),
|
||||||
)
|
)
|
||||||
for node in nodes:
|
|
||||||
relation = self.Relation.create_from(self.config, node)
|
relations = [self.Relation.create_from(self.config, n) for n in nodes] # type: ignore[arg-type]
|
||||||
info_schema_name_map.add(relation)
|
return relations
|
||||||
# result is a map whose keys are information_schema Relations without
|
|
||||||
# identifiers that have appropriate database prefixes, and whose values
|
|
||||||
# are sets of lowercase schema names that are valid members of those
|
|
||||||
# databases
|
|
||||||
return info_schema_name_map
|
|
||||||
|
|
||||||
def _relations_cache_for_schemas(
|
def _relations_cache_for_schemas(
|
||||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||||
@@ -453,8 +481,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
# it's possible that there were no relations in some schemas. We want
|
# it's possible that there were no relations in some schemas. We want
|
||||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||||
# so we can check it later
|
# so we can check it later
|
||||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||||
for relation in cache_schemas:
|
for relation in cache_schemas:
|
||||||
|
if relation.schema:
|
||||||
cache_update.add((relation.database, relation.schema))
|
cache_update.add((relation.database, relation.schema))
|
||||||
self.cache.update_schemas(cache_update)
|
self.cache.update_schemas(cache_update)
|
||||||
|
|
||||||
@@ -917,6 +946,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||||
|
"""Return the type in the database that best maps to the agate.Number
|
||||||
|
type for the given agate table and column index.
|
||||||
|
|
||||||
|
:param agate_table: The table
|
||||||
|
:param col_idx: The index into the agate table for the column.
|
||||||
|
:return: The name of the type in the database
|
||||||
|
"""
|
||||||
|
return "integer"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||||
@@ -974,6 +1014,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||||
agate_type: Type = agate_table.column_types[col_idx]
|
agate_type: Type = agate_table.column_types[col_idx]
|
||||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||||
|
(Integer, cls.convert_integer_type),
|
||||||
(agate.Text, cls.convert_text_type),
|
(agate.Text, cls.convert_text_type),
|
||||||
(agate.Number, cls.convert_number_type),
|
(agate.Number, cls.convert_number_type),
|
||||||
(agate.Boolean, cls.convert_boolean_type),
|
(agate.Boolean, cls.convert_boolean_type),
|
||||||
@@ -993,11 +1034,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
def execute_macro(
|
def execute_macro(
|
||||||
self,
|
self,
|
||||||
macro_name: str,
|
macro_name: str,
|
||||||
manifest: Optional[Manifest] = None,
|
macro_resolver: Optional[MacroResolver] = None,
|
||||||
project: Optional[str] = None,
|
project: Optional[str] = None,
|
||||||
context_override: Optional[Dict[str, Any]] = None,
|
context_override: Optional[Dict[str, Any]] = None,
|
||||||
kwargs: Optional[Dict[str, Any]] = None,
|
kwargs: Optional[Dict[str, Any]] = None,
|
||||||
text_only_columns: Optional[Iterable[str]] = None,
|
|
||||||
) -> AttrDict:
|
) -> AttrDict:
|
||||||
"""Look macro_name up in the manifest and execute its results.
|
"""Look macro_name up in the manifest and execute its results.
|
||||||
|
|
||||||
@@ -1017,13 +1057,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
if context_override is None:
|
if context_override is None:
|
||||||
context_override = {}
|
context_override = {}
|
||||||
|
|
||||||
if manifest is None:
|
resolver = macro_resolver or self._macro_resolver
|
||||||
# TODO CT-211
|
if resolver is None:
|
||||||
manifest = self._macro_manifest # type: ignore[assignment]
|
raise DbtInternalError("macro resolver was None when calling execute_macro!")
|
||||||
# TODO CT-211
|
|
||||||
macro = manifest.find_macro_by_name( # type: ignore[union-attr]
|
macro = resolver.find_macro_by_name(macro_name, self.config.project_name, project)
|
||||||
macro_name, self.config.project_name, project
|
|
||||||
)
|
|
||||||
if macro is None:
|
if macro is None:
|
||||||
if project is None:
|
if project is None:
|
||||||
package_name = "any package"
|
package_name = "any package"
|
||||||
@@ -1043,12 +1081,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
# TODO CT-211
|
# TODO CT-211
|
||||||
macro=macro,
|
macro=macro,
|
||||||
config=self.config,
|
config=self.config,
|
||||||
manifest=manifest, # type: ignore[arg-type]
|
manifest=resolver, # type: ignore[arg-type]
|
||||||
package_name=project,
|
package_name=project,
|
||||||
)
|
)
|
||||||
macro_context.update(context_override)
|
macro_context.update(context_override)
|
||||||
|
|
||||||
macro_function = MacroGenerator(macro, macro_context)
|
macro_function = CallableMacroGenerator(macro, macro_context)
|
||||||
|
|
||||||
with self.connections.exception_handler(f"macro {macro_name}"):
|
with self.connections.exception_handler(f"macro {macro_name}"):
|
||||||
result = macro_function(**kwargs)
|
result = macro_function(**kwargs)
|
||||||
@@ -1079,29 +1117,112 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
kwargs=kwargs,
|
kwargs=kwargs,
|
||||||
# pass in the full manifest so we get any local project
|
# pass in the full manifest so we get any local project
|
||||||
# overrides
|
# overrides
|
||||||
manifest=manifest,
|
macro_resolver=manifest,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
def _get_one_catalog_by_relations(
|
||||||
schema_map = self._get_catalog_schemas(manifest)
|
self,
|
||||||
|
information_schema: InformationSchema,
|
||||||
|
relations: List[BaseRelation],
|
||||||
|
manifest: Manifest,
|
||||||
|
) -> agate.Table:
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"information_schema": information_schema,
|
||||||
|
"relations": relations,
|
||||||
|
}
|
||||||
|
table = self.execute_macro(
|
||||||
|
GET_CATALOG_RELATIONS_MACRO_NAME,
|
||||||
|
kwargs=kwargs,
|
||||||
|
# pass in the full manifest, so we get any local project
|
||||||
|
# overrides
|
||||||
|
macro_resolver=manifest,
|
||||||
|
)
|
||||||
|
|
||||||
|
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_filtered_catalog(
|
||||||
|
self, manifest: Manifest, relations: Optional[Set[BaseRelation]] = None
|
||||||
|
):
|
||||||
|
catalogs: agate.Table
|
||||||
|
if (
|
||||||
|
relations is None
|
||||||
|
or len(relations) > 100
|
||||||
|
or not self.supports(Capability.SchemaMetadataByRelations)
|
||||||
|
):
|
||||||
|
# Do it the traditional way. We get the full catalog.
|
||||||
|
catalogs, exceptions = self.get_catalog(manifest)
|
||||||
|
else:
|
||||||
|
# Do it the new way. We try to save time by selecting information
|
||||||
|
# only for the exact set of relations we are interested in.
|
||||||
|
catalogs, exceptions = self.get_catalog_by_relations(manifest, relations)
|
||||||
|
|
||||||
|
if relations and catalogs:
|
||||||
|
relation_map = {
|
||||||
|
(
|
||||||
|
r.database.casefold() if r.database else None,
|
||||||
|
r.schema.casefold() if r.schema else None,
|
||||||
|
r.identifier.casefold() if r.identifier else None,
|
||||||
|
)
|
||||||
|
for r in relations
|
||||||
|
}
|
||||||
|
|
||||||
|
def in_map(row: agate.Row):
|
||||||
|
d = _expect_row_value("table_database", row)
|
||||||
|
s = _expect_row_value("table_schema", row)
|
||||||
|
i = _expect_row_value("table_name", row)
|
||||||
|
d = d.casefold() if d is not None else None
|
||||||
|
s = s.casefold() if s is not None else None
|
||||||
|
i = i.casefold() if i is not None else None
|
||||||
|
return (d, s, i) in relation_map
|
||||||
|
|
||||||
|
catalogs = catalogs.where(in_map)
|
||||||
|
|
||||||
|
return catalogs, exceptions
|
||||||
|
|
||||||
|
def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||||
with executor(self.config) as tpe:
|
with executor(self.config) as tpe:
|
||||||
futures: List[Future[agate.Table]] = []
|
futures: List[Future[agate.Table]] = []
|
||||||
|
schema_map: SchemaSearchMap = self._get_catalog_schemas(manifest)
|
||||||
for info, schemas in schema_map.items():
|
for info, schemas in schema_map.items():
|
||||||
if len(schemas) == 0:
|
if len(schemas) == 0:
|
||||||
continue
|
continue
|
||||||
name = ".".join([str(info.database), "information_schema"])
|
name = ".".join([str(info.database), "information_schema"])
|
||||||
|
|
||||||
fut = tpe.submit_connected(
|
fut = tpe.submit_connected(
|
||||||
self, name, self._get_one_catalog, info, schemas, manifest
|
self, name, self._get_one_catalog, info, schemas, manifest
|
||||||
)
|
)
|
||||||
futures.append(fut)
|
futures.append(fut)
|
||||||
|
|
||||||
catalogs, exceptions = catch_as_completed(futures)
|
catalogs, exceptions = catch_as_completed(futures)
|
||||||
|
return catalogs, exceptions
|
||||||
|
|
||||||
|
def get_catalog_by_relations(
|
||||||
|
self, manifest: Manifest, relations: Set[BaseRelation]
|
||||||
|
) -> Tuple[agate.Table, List[Exception]]:
|
||||||
|
with executor(self.config) as tpe:
|
||||||
|
futures: List[Future[agate.Table]] = []
|
||||||
|
relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
|
||||||
|
for info_schema in relations_by_schema:
|
||||||
|
name = ".".join([str(info_schema.database), "information_schema"])
|
||||||
|
relations = set(relations_by_schema[info_schema])
|
||||||
|
fut = tpe.submit_connected(
|
||||||
|
self,
|
||||||
|
name,
|
||||||
|
self._get_one_catalog_by_relations,
|
||||||
|
info_schema,
|
||||||
|
relations,
|
||||||
|
manifest,
|
||||||
|
)
|
||||||
|
futures.append(fut)
|
||||||
|
|
||||||
|
catalogs, exceptions = catch_as_completed(futures)
|
||||||
return catalogs, exceptions
|
return catalogs, exceptions
|
||||||
|
|
||||||
def cancel_open_connections(self):
|
def cancel_open_connections(self):
|
||||||
@@ -1114,7 +1235,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
loaded_at_field: str,
|
loaded_at_field: str,
|
||||||
filter: Optional[str],
|
filter: Optional[str],
|
||||||
manifest: Optional[Manifest] = None,
|
manifest: Optional[Manifest] = None,
|
||||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||||
"""Calculate the freshness of sources in dbt, and return it"""
|
"""Calculate the freshness of sources in dbt, and return it"""
|
||||||
kwargs: Dict[str, Any] = {
|
kwargs: Dict[str, Any] = {
|
||||||
"source": source,
|
"source": source,
|
||||||
@@ -1129,9 +1250,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
AttrDict, # current: contains AdapterResponse + agate.Table
|
AttrDict, # current: contains AdapterResponse + agate.Table
|
||||||
agate.Table, # previous: just table
|
agate.Table, # previous: just table
|
||||||
]
|
]
|
||||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=manifest)
|
||||||
if isinstance(result, agate.Table):
|
if isinstance(result, agate.Table):
|
||||||
deprecations.warn("collect-freshness-return-signature")
|
warn_or_error(CollectFreshnessReturnSignature())
|
||||||
adapter_response = None
|
adapter_response = None
|
||||||
table = result
|
table = result
|
||||||
else:
|
else:
|
||||||
@@ -1149,13 +1270,52 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
|
|
||||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||||
freshness = {
|
freshness: FreshnessResponse = {
|
||||||
"max_loaded_at": max_loaded_at,
|
"max_loaded_at": max_loaded_at,
|
||||||
"snapshotted_at": snapshotted_at,
|
"snapshotted_at": snapshotted_at,
|
||||||
"age": age,
|
"age": age,
|
||||||
}
|
}
|
||||||
return adapter_response, freshness
|
return adapter_response, freshness
|
||||||
|
|
||||||
|
def calculate_freshness_from_metadata(
|
||||||
|
self,
|
||||||
|
source: BaseRelation,
|
||||||
|
manifest: Optional[Manifest] = None,
|
||||||
|
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||||
|
kwargs: Dict[str, Any] = {
|
||||||
|
"information_schema": source.information_schema_only(),
|
||||||
|
"relations": [source],
|
||||||
|
}
|
||||||
|
result = self.execute_macro(
|
||||||
|
GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, macro_resolver=manifest
|
||||||
|
)
|
||||||
|
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
try:
|
||||||
|
row = table[0]
|
||||||
|
last_modified_val = get_column_value_uncased("last_modified", row)
|
||||||
|
snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
|
||||||
|
except Exception:
|
||||||
|
raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
|
||||||
|
|
||||||
|
if last_modified_val is None:
|
||||||
|
# Interpret missing value as "infinitely long ago"
|
||||||
|
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
|
||||||
|
else:
|
||||||
|
max_loaded_at = _utc(last_modified_val, None, "last_modified")
|
||||||
|
|
||||||
|
snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
|
||||||
|
|
||||||
|
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||||
|
|
||||||
|
freshness: FreshnessResponse = {
|
||||||
|
"max_loaded_at": max_loaded_at,
|
||||||
|
"snapshotted_at": snapshotted_at,
|
||||||
|
"age": age,
|
||||||
|
}
|
||||||
|
|
||||||
|
return adapter_response, freshness
|
||||||
|
|
||||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||||
"""A hook for running some operation before the model materialization
|
"""A hook for running some operation before the model materialization
|
||||||
runs. The hook can assume it has a connection available.
|
runs. The hook can assume it has a connection available.
|
||||||
@@ -1181,11 +1341,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_compiler(self):
|
|
||||||
from dbt.compilation import Compiler
|
|
||||||
|
|
||||||
return Compiler(self.config)
|
|
||||||
|
|
||||||
# Methods used in adapter tests
|
# Methods used in adapter tests
|
||||||
def update_column_sql(
|
def update_column_sql(
|
||||||
self,
|
self,
|
||||||
@@ -1305,7 +1460,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
|
|
||||||
strategy = strategy.replace("+", "_")
|
strategy = strategy.replace("+", "_")
|
||||||
macro_name = f"get_incremental_{strategy}_sql"
|
macro_name = f"get_incremental_{strategy}_sql"
|
||||||
# The model_context should have MacroGenerator callable objects for all macros
|
# The model_context should have callable objects for all macros
|
||||||
if macro_name not in model_context:
|
if macro_name not in model_context:
|
||||||
raise DbtRuntimeError(
|
raise DbtRuntimeError(
|
||||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||||
@@ -1429,6 +1584,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def capabilities(cls) -> CapabilityDict:
|
||||||
|
return cls._capabilities
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def supports(cls, capability: Capability) -> bool:
|
||||||
|
return bool(cls.capabilities()[capability])
|
||||||
|
|
||||||
|
|
||||||
COLUMNS_EQUAL_SQL = """
|
COLUMNS_EQUAL_SQL = """
|
||||||
with diff_count as (
|
with diff_count as (
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import abc
|
import abc
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
||||||
|
from dbt.common.events.functions import warn_or_error
|
||||||
from dbt.deprecations import warn, renamed_method
|
from dbt.adapters.events.types import AdapterDeprecationWarning
|
||||||
|
|
||||||
|
|
||||||
Decorator = Callable[[Any], Callable]
|
Decorator = Callable[[Any], Callable]
|
||||||
|
|
||||||
@@ -62,11 +61,12 @@ class _Available:
|
|||||||
|
|
||||||
def wrapper(func):
|
def wrapper(func):
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
renamed_method(func_name, supported_name)
|
|
||||||
|
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
warn("adapter:{}".format(func_name))
|
warn_or_error(
|
||||||
|
AdapterDeprecationWarning(old_name=func_name, new_name=supported_name)
|
||||||
|
)
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
if parse_replacement:
|
if parse_replacement:
|
||||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
|||||||
_available_: FrozenSet[str]
|
_available_: FrozenSet[str]
|
||||||
_parse_replacements_: Dict[str, Callable]
|
_parse_replacements_: Dict[str, Callable]
|
||||||
|
|
||||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||||
# I'm not sure there is any benefit to it after poking around a bit,
|
# I'm not sure there is any benefit to it after poking around a bit,
|
||||||
|
|||||||
@@ -1,20 +1,10 @@
|
|||||||
from typing import List, Optional, Type
|
from typing import List, Optional, Type
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from dbt.adapters.base import Credentials
|
from dbt.adapters.base import Credentials
|
||||||
from dbt.exceptions import CompilationError
|
|
||||||
from dbt.adapters.protocol import AdapterProtocol
|
from dbt.adapters.protocol import AdapterProtocol
|
||||||
|
|
||||||
|
|
||||||
def project_name_from_path(include_path: str) -> str:
|
|
||||||
# avoid an import cycle
|
|
||||||
from dbt.config.project import PartialProject
|
|
||||||
|
|
||||||
partial = PartialProject.from_project_root(include_path)
|
|
||||||
if partial.project_name is None:
|
|
||||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
|
||||||
return partial.project_name
|
|
||||||
|
|
||||||
|
|
||||||
class AdapterPlugin:
|
class AdapterPlugin:
|
||||||
"""Defines the basic requirements for a dbt adapter plugin.
|
"""Defines the basic requirements for a dbt adapter plugin.
|
||||||
|
|
||||||
@@ -29,12 +19,13 @@ class AdapterPlugin:
|
|||||||
credentials: Type[Credentials],
|
credentials: Type[Credentials],
|
||||||
include_path: str,
|
include_path: str,
|
||||||
dependencies: Optional[List[str]] = None,
|
dependencies: Optional[List[str]] = None,
|
||||||
):
|
project_name: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
self.adapter: Type[AdapterProtocol] = adapter
|
self.adapter: Type[AdapterProtocol] = adapter
|
||||||
self.credentials: Type[Credentials] = credentials
|
self.credentials: Type[Credentials] = credentials
|
||||||
self.include_path: str = include_path
|
self.include_path: str = include_path
|
||||||
self.project_name: str = project_name_from_path(include_path)
|
self.project_name: str = project_name or f"dbt_{Path(include_path).name}"
|
||||||
self.dependencies: List[str]
|
self.dependencies: List[str]
|
||||||
if dependencies is None:
|
if dependencies is None:
|
||||||
self.dependencies = []
|
self.dependencies = []
|
||||||
|
|||||||
@@ -1,21 +1,20 @@
|
|||||||
from threading import local
|
from threading import local
|
||||||
from typing import Optional, Callable, Dict, Any
|
from typing import Optional, Callable, Dict, Any
|
||||||
|
|
||||||
from dbt.clients.jinja import QueryStringGenerator
|
from dbt.adapters.clients.jinja import QueryStringGenerator
|
||||||
|
|
||||||
from dbt.context.manifest import generate_query_header_context
|
from dbt.context.manifest import generate_query_header_context
|
||||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||||
from dbt.contracts.graph.nodes import ResultNode
|
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
from dbt.exceptions import DbtRuntimeError
|
from dbt.common.exceptions import DbtRuntimeError
|
||||||
|
|
||||||
|
|
||||||
class NodeWrapper:
|
class QueryHeaderContextWrapper:
|
||||||
def __init__(self, node):
|
def __init__(self, context) -> None:
|
||||||
self._inner_node = node
|
self._inner_context = context
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
return getattr(self._inner_node, name, "")
|
return getattr(self._inner_context, name, "")
|
||||||
|
|
||||||
|
|
||||||
class _QueryComment(local):
|
class _QueryComment(local):
|
||||||
@@ -25,9 +24,9 @@ class _QueryComment(local):
|
|||||||
- a source_name indicating what set the current thread's query comment
|
- a source_name indicating what set the current thread's query comment
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, initial):
|
def __init__(self, initial) -> None:
|
||||||
self.query_comment: Optional[str] = initial
|
self.query_comment: Optional[str] = initial
|
||||||
self.append = False
|
self.append: bool = False
|
||||||
|
|
||||||
def add(self, sql: str) -> str:
|
def add(self, sql: str) -> str:
|
||||||
if not self.query_comment:
|
if not self.query_comment:
|
||||||
@@ -53,11 +52,11 @@ class _QueryComment(local):
|
|||||||
self.append = append
|
self.append = append
|
||||||
|
|
||||||
|
|
||||||
QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
QueryStringFunc = Callable[[str, Optional[QueryHeaderContextWrapper]], str]
|
||||||
|
|
||||||
|
|
||||||
class MacroQueryStringSetter:
|
class MacroQueryStringSetter:
|
||||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None:
|
||||||
self.manifest = manifest
|
self.manifest = manifest
|
||||||
self.config = config
|
self.config = config
|
||||||
|
|
||||||
@@ -90,10 +89,10 @@ class MacroQueryStringSetter:
|
|||||||
def reset(self):
|
def reset(self):
|
||||||
self.set("master", None)
|
self.set("master", None)
|
||||||
|
|
||||||
def set(self, name: str, node: Optional[ResultNode]):
|
def set(self, name: str, query_header_context: Any):
|
||||||
wrapped: Optional[NodeWrapper] = None
|
wrapped: Optional[QueryHeaderContextWrapper] = None
|
||||||
if node is not None:
|
if query_header_context is not None:
|
||||||
wrapped = NodeWrapper(node)
|
wrapped = QueryHeaderContextWrapper(query_header_context)
|
||||||
comment_str = self.generator(name, wrapped)
|
comment_str = self.generator(name, wrapped)
|
||||||
|
|
||||||
append = False
|
append = False
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from collections.abc import Hashable
|
from collections.abc import Hashable
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet
|
||||||
|
|
||||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
from dbt.adapters.contracts.relation import (
|
||||||
from dbt.contracts.relation import (
|
RelationConfig,
|
||||||
RelationType,
|
RelationType,
|
||||||
ComponentName,
|
ComponentName,
|
||||||
HasQuoting,
|
HasQuoting,
|
||||||
@@ -11,18 +11,15 @@ from dbt.contracts.relation import (
|
|||||||
Policy,
|
Policy,
|
||||||
Path,
|
Path,
|
||||||
)
|
)
|
||||||
from dbt.exceptions import (
|
from dbt.adapters.exceptions import MultipleDatabasesNotAllowedError, ApproximateMatchError
|
||||||
ApproximateMatchError,
|
from dbt.common.utils import filter_null_values, deep_merge
|
||||||
DbtInternalError,
|
from dbt.adapters.utils import classproperty
|
||||||
MultipleDatabasesNotAllowedError,
|
|
||||||
)
|
|
||||||
from dbt.node_types import NodeType
|
|
||||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
|
||||||
|
|
||||||
import dbt.exceptions
|
import dbt.common.exceptions
|
||||||
|
|
||||||
|
|
||||||
Self = TypeVar("Self", bound="BaseRelation")
|
Self = TypeVar("Self", bound="BaseRelation")
|
||||||
|
SerializableIterable = Union[Tuple, FrozenSet]
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, eq=False, repr=False)
|
@dataclass(frozen=True, eq=False, repr=False)
|
||||||
@@ -36,6 +33,18 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||||
dbt_created: bool = False
|
dbt_created: bool = False
|
||||||
|
|
||||||
|
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
||||||
|
# adding a relation type here also requires defining the associated rename macro
|
||||||
|
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||||
|
# include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql()
|
||||||
|
renameable_relations: SerializableIterable = ()
|
||||||
|
|
||||||
|
# register relation types that are atomically replaceable, e.g. they have "create or replace" syntax
|
||||||
|
# adding a relation type here also requires defining the associated replace macro
|
||||||
|
# e.g. adding RelationType.View in dbt-postgres requires that you define:
|
||||||
|
# include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql()
|
||||||
|
replaceable_relations: SerializableIterable = ()
|
||||||
|
|
||||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||||
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
if self.dbt_created and self.quote_policy.get_part(field) is False:
|
||||||
return self.path.get_lowered_part(field) == value.lower()
|
return self.path.get_lowered_part(field) == value.lower()
|
||||||
@@ -87,7 +96,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
|
|
||||||
if not search:
|
if not search:
|
||||||
# nothing was passed in
|
# nothing was passed in
|
||||||
raise dbt.exceptions.DbtRuntimeError(
|
raise dbt.common.exceptions.DbtRuntimeError(
|
||||||
"Tried to match relation, but no search path was passed!"
|
"Tried to match relation, but no search path was passed!"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -169,7 +178,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
return self.include(identifier=False).replace_path(identifier=None)
|
return self.include(identifier=False).replace_path(identifier=None)
|
||||||
|
|
||||||
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
|
||||||
|
|
||||||
for key in ComponentName:
|
for key in ComponentName:
|
||||||
path_part: Optional[str] = None
|
path_part: Optional[str] = None
|
||||||
if self.include_policy.get_part(key):
|
if self.include_policy.get_part(key):
|
||||||
@@ -188,83 +196,50 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
|
||||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
|
||||||
source_quoting.pop("column", None)
|
|
||||||
quote_policy = deep_merge(
|
|
||||||
cls.get_default_quote_policy().to_dict(omit_none=True),
|
|
||||||
source_quoting,
|
|
||||||
kwargs.get("quote_policy", {}),
|
|
||||||
)
|
|
||||||
|
|
||||||
return cls.create(
|
|
||||||
database=source.database,
|
|
||||||
schema=source.schema,
|
|
||||||
identifier=source.identifier,
|
|
||||||
quote_policy=quote_policy,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_ephemeral_prefix(name: str):
|
def add_ephemeral_prefix(name: str):
|
||||||
return f"__dbt__cte__{name}"
|
return f"__dbt__cte__{name}"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_ephemeral_from_node(
|
def create_ephemeral_from(
|
||||||
cls: Type[Self],
|
cls: Type[Self],
|
||||||
config: HasQuoting,
|
relation_config: RelationConfig,
|
||||||
node: ManifestNode,
|
|
||||||
) -> Self:
|
) -> Self:
|
||||||
# Note that ephemeral models are based on the name.
|
# Note that ephemeral models are based on the name.
|
||||||
identifier = cls.add_ephemeral_prefix(node.name)
|
identifier = cls.add_ephemeral_prefix(relation_config.name)
|
||||||
return cls.create(
|
return cls.create(
|
||||||
type=cls.CTE,
|
type=cls.CTE,
|
||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
).quote(identifier=False)
|
).quote(identifier=False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from_node(
|
def create_from(
|
||||||
cls: Type[Self],
|
cls: Type[Self],
|
||||||
config: HasQuoting,
|
quoting: HasQuoting,
|
||||||
node,
|
relation_config: RelationConfig,
|
||||||
quote_policy: Optional[Dict[str, bool]] = None,
|
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> Self:
|
) -> Self:
|
||||||
if quote_policy is None:
|
quote_policy = kwargs.pop("quote_policy", {})
|
||||||
quote_policy = {}
|
|
||||||
|
|
||||||
quote_policy = dbt.utils.merge(config.quoting, quote_policy)
|
config_quoting = relation_config.quoting_dict
|
||||||
|
config_quoting.pop("column", None)
|
||||||
|
|
||||||
|
# precedence: kwargs quoting > relation config quoting > base quoting > default quoting
|
||||||
|
quote_policy = deep_merge(
|
||||||
|
cls.get_default_quote_policy().to_dict(omit_none=True),
|
||||||
|
quoting.quoting,
|
||||||
|
config_quoting,
|
||||||
|
quote_policy,
|
||||||
|
)
|
||||||
|
|
||||||
return cls.create(
|
return cls.create(
|
||||||
database=node.database,
|
database=relation_config.database,
|
||||||
schema=node.schema,
|
schema=relation_config.schema,
|
||||||
identifier=node.alias,
|
identifier=relation_config.identifier,
|
||||||
quote_policy=quote_policy,
|
quote_policy=quote_policy,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_from(
|
|
||||||
cls: Type[Self],
|
|
||||||
config: HasQuoting,
|
|
||||||
node: ResultNode,
|
|
||||||
**kwargs: Any,
|
|
||||||
) -> Self:
|
|
||||||
if node.resource_type == NodeType.Source:
|
|
||||||
if not isinstance(node, SourceDefinition):
|
|
||||||
raise DbtInternalError(
|
|
||||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
|
||||||
)
|
|
||||||
return cls.create_from_source(node, **kwargs)
|
|
||||||
else:
|
|
||||||
# Can't use ManifestNode here because of parameterized generics
|
|
||||||
if not isinstance(node, (ParsedNode)):
|
|
||||||
raise DbtInternalError(
|
|
||||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
|
||||||
)
|
|
||||||
return cls.create_from_node(config, node, **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
cls: Type[Self],
|
cls: Type[Self],
|
||||||
@@ -286,6 +261,14 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|||||||
)
|
)
|
||||||
return cls.from_dict(kwargs)
|
return cls.from_dict(kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def can_be_renamed(self) -> bool:
|
||||||
|
return self.type in self.renameable_relations
|
||||||
|
|
||||||
|
@property
|
||||||
|
def can_be_replaced(self) -> bool:
|
||||||
|
return self.type in self.replaceable_relations
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "<{} {}>".format(self.__class__.__name__, self.render())
|
return "<{} {}>".format(self.__class__.__name__, self.render())
|
||||||
|
|
||||||
@@ -366,7 +349,7 @@ class InformationSchema(BaseRelation):
|
|||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||||
raise dbt.exceptions.CompilationError(
|
raise dbt.common.exceptions.CompilationError(
|
||||||
"Got an invalid name: {}".format(self.information_schema_view)
|
"Got an invalid name: {}".format(self.information_schema_view)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -439,11 +422,11 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
|||||||
self[key].add(schema)
|
self[key].add(schema)
|
||||||
|
|
||||||
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
|
||||||
for information_schema_name, schemas in self.items():
|
for information_schema, schemas in self.items():
|
||||||
for schema in schemas:
|
for schema in schemas:
|
||||||
yield information_schema_name, schema
|
yield information_schema, schema
|
||||||
|
|
||||||
def flatten(self, allow_multiple_databases: bool = False):
|
def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap":
|
||||||
new = self.__class__()
|
new = self.__class__()
|
||||||
|
|
||||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||||
|
|||||||
@@ -7,17 +7,16 @@ from dbt.adapters.reference_keys import (
|
|||||||
_make_ref_key_dict,
|
_make_ref_key_dict,
|
||||||
_ReferenceKey,
|
_ReferenceKey,
|
||||||
)
|
)
|
||||||
from dbt.exceptions import (
|
from dbt.common.exceptions.cache import (
|
||||||
DependentLinkNotCachedError,
|
|
||||||
NewNameAlreadyInCacheError,
|
NewNameAlreadyInCacheError,
|
||||||
NoneRelationFoundError,
|
|
||||||
ReferencedLinkNotCachedError,
|
ReferencedLinkNotCachedError,
|
||||||
|
DependentLinkNotCachedError,
|
||||||
TruncatedModelNameCausedCollisionError,
|
TruncatedModelNameCausedCollisionError,
|
||||||
|
NoneRelationFoundError,
|
||||||
)
|
)
|
||||||
from dbt.events.functions import fire_event, fire_event_if
|
from dbt.common.events.functions import fire_event, fire_event_if
|
||||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
from dbt.adapters.events.types import CacheAction, CacheDumpGraph
|
||||||
from dbt.flags import get_flags
|
from dbt.common.utils.formatting import lowercase
|
||||||
from dbt.utils import lowercase
|
|
||||||
|
|
||||||
|
|
||||||
def dot_separated(key: _ReferenceKey) -> str:
|
def dot_separated(key: _ReferenceKey) -> str:
|
||||||
@@ -38,8 +37,8 @@ class _CachedRelation:
|
|||||||
:attr BaseRelation inner: The underlying dbt relation.
|
:attr BaseRelation inner: The underlying dbt relation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, inner):
|
def __init__(self, inner) -> None:
|
||||||
self.referenced_by = {}
|
self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||||
self.inner = inner
|
self.inner = inner
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
@@ -165,10 +164,11 @@ class RelationsCache:
|
|||||||
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self, log_cache_events: bool = False) -> None:
|
||||||
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
||||||
self.lock = threading.RLock()
|
self.lock = threading.RLock()
|
||||||
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||||
|
self.log_cache_events = log_cache_events
|
||||||
|
|
||||||
def add_schema(
|
def add_schema(
|
||||||
self,
|
self,
|
||||||
@@ -318,10 +318,9 @@ class RelationsCache:
|
|||||||
|
|
||||||
:param BaseRelation relation: The underlying relation.
|
:param BaseRelation relation: The underlying relation.
|
||||||
"""
|
"""
|
||||||
flags = get_flags()
|
|
||||||
cached = _CachedRelation(relation)
|
cached = _CachedRelation(relation)
|
||||||
fire_event_if(
|
fire_event_if(
|
||||||
flags.LOG_CACHE_EVENTS,
|
self.log_cache_events,
|
||||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||||
)
|
)
|
||||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||||
@@ -329,7 +328,7 @@ class RelationsCache:
|
|||||||
with self.lock:
|
with self.lock:
|
||||||
self._setdefault(cached)
|
self._setdefault(cached)
|
||||||
fire_event_if(
|
fire_event_if(
|
||||||
flags.LOG_CACHE_EVENTS,
|
self.log_cache_events,
|
||||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -454,9 +453,8 @@ class RelationsCache:
|
|||||||
ref_key_2=new_key._asdict(),
|
ref_key_2=new_key._asdict(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
flags = get_flags()
|
|
||||||
fire_event_if(
|
fire_event_if(
|
||||||
flags.LOG_CACHE_EVENTS,
|
self.log_cache_events,
|
||||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -467,7 +465,7 @@ class RelationsCache:
|
|||||||
self._setdefault(_CachedRelation(new))
|
self._setdefault(_CachedRelation(new))
|
||||||
|
|
||||||
fire_event_if(
|
fire_event_if(
|
||||||
flags.LOG_CACHE_EVENTS,
|
self.log_cache_events,
|
||||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
52
core/dbt/adapters/capability.py
Normal file
52
core/dbt/adapters/capability.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Optional, DefaultDict, Mapping
|
||||||
|
|
||||||
|
|
||||||
|
class Capability(str, Enum):
|
||||||
|
"""Enumeration of optional adapter features which can be probed using BaseAdapter.capabilities()"""
|
||||||
|
|
||||||
|
SchemaMetadataByRelations = "SchemaMetadataByRelations"
|
||||||
|
"""Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving
|
||||||
|
all the relations in a schema."""
|
||||||
|
|
||||||
|
TableLastModifiedMetadata = "TableLastModifiedMetadata"
|
||||||
|
"""Indicates support for determining the time of the last table modification by querying database metadata."""
|
||||||
|
|
||||||
|
|
||||||
|
class Support(str, Enum):
|
||||||
|
Unknown = "Unknown"
|
||||||
|
"""The adapter has not declared whether this capability is a feature of the underlying DBMS."""
|
||||||
|
|
||||||
|
Unsupported = "Unsupported"
|
||||||
|
"""This capability is not possible with the underlying DBMS, so the adapter does not implement related macros."""
|
||||||
|
|
||||||
|
NotImplemented = "NotImplemented"
|
||||||
|
"""This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter."""
|
||||||
|
|
||||||
|
Versioned = "Versioned"
|
||||||
|
"""Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||||
|
macros needed to use it."""
|
||||||
|
|
||||||
|
Full = "Full"
|
||||||
|
"""All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
||||||
|
macros needed to use it."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CapabilitySupport:
|
||||||
|
support: Support
|
||||||
|
first_version: Optional[str] = None
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return self.support == Support.Versioned or self.support == Support.Full
|
||||||
|
|
||||||
|
|
||||||
|
class CapabilityDict(DefaultDict[Capability, CapabilitySupport]):
|
||||||
|
def __init__(self, vals: Mapping[Capability, CapabilitySupport]):
|
||||||
|
super().__init__(self._default)
|
||||||
|
self.update(vals)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _default():
|
||||||
|
return CapabilitySupport(support=Support.Unknown)
|
||||||
23
core/dbt/adapters/clients/jinja.py
Normal file
23
core/dbt/adapters/clients/jinja.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from typing import Dict, Any
|
||||||
|
from dbt.common.clients.jinja import BaseMacroGenerator, get_environment
|
||||||
|
|
||||||
|
|
||||||
|
class QueryStringGenerator(BaseMacroGenerator):
|
||||||
|
def __init__(self, template_str: str, context: Dict[str, Any]) -> None:
|
||||||
|
super().__init__(context)
|
||||||
|
self.template_str: str = template_str
|
||||||
|
env = get_environment()
|
||||||
|
self.template = env.from_string(
|
||||||
|
self.template_str,
|
||||||
|
globals=self.context,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_name(self) -> str:
|
||||||
|
return "query_comment_macro"
|
||||||
|
|
||||||
|
def get_template(self):
|
||||||
|
"""Don't use the template cache, we don't have a node"""
|
||||||
|
return self.template
|
||||||
|
|
||||||
|
def __call__(self, connection_name: str, node) -> str:
|
||||||
|
return str(self.call_macro(connection_name, node))
|
||||||
0
core/dbt/adapters/contracts/__init__.py
Normal file
0
core/dbt/adapters/contracts/__init__.py
Normal file
@@ -11,31 +11,32 @@ from typing import (
|
|||||||
List,
|
List,
|
||||||
Callable,
|
Callable,
|
||||||
)
|
)
|
||||||
from dbt.exceptions import DbtInternalError
|
from typing_extensions import Protocol, Annotated
|
||||||
from dbt.utils import translate_aliases, md5
|
|
||||||
from dbt.events.functions import fire_event
|
from mashumaro.jsonschema.annotations import Pattern
|
||||||
from dbt.events.types import NewConnectionOpening
|
|
||||||
from dbt.events.contextvars import get_node_info
|
from dbt.adapters.utils import translate_aliases
|
||||||
from typing_extensions import Protocol
|
from dbt.common.exceptions import DbtInternalError
|
||||||
from dbt.dataclass_schema import (
|
from dbt.common.dataclass_schema import (
|
||||||
dbtClassMixin,
|
dbtClassMixin,
|
||||||
StrEnum,
|
StrEnum,
|
||||||
ExtensibleDbtClassMixin,
|
ExtensibleDbtClassMixin,
|
||||||
HyphenatedDbtClassMixin,
|
|
||||||
ValidatedStringMixin,
|
ValidatedStringMixin,
|
||||||
register_pattern,
|
|
||||||
)
|
)
|
||||||
from dbt.contracts.util import Replaceable
|
from dbt.common.contracts.util import Replaceable
|
||||||
|
from dbt.common.utils import md5
|
||||||
|
|
||||||
|
from dbt.common.events.functions import fire_event
|
||||||
|
from dbt.adapters.events.types import NewConnectionOpening
|
||||||
|
|
||||||
|
# TODO: this is a very bad dependency - shared global state
|
||||||
|
from dbt.common.events.contextvars import get_node_info
|
||||||
|
|
||||||
|
|
||||||
class Identifier(ValidatedStringMixin):
|
class Identifier(ValidatedStringMixin):
|
||||||
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
|
||||||
|
|
||||||
|
|
||||||
# we need register_pattern for jsonschema validation
|
|
||||||
register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$")
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class AdapterResponse(dbtClassMixin):
|
class AdapterResponse(dbtClassMixin):
|
||||||
_message: str
|
_message: str
|
||||||
@@ -55,7 +56,8 @@ class ConnectionState(StrEnum):
|
|||||||
|
|
||||||
@dataclass(init=False)
|
@dataclass(init=False)
|
||||||
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||||
type: Identifier
|
# Annotated is used by mashumaro for jsonschema generation
|
||||||
|
type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
state: ConnectionState = ConnectionState.INIT
|
state: ConnectionState = ConnectionState.INIT
|
||||||
transaction_open: bool = False
|
transaction_open: bool = False
|
||||||
@@ -108,7 +110,7 @@ class LazyHandle:
|
|||||||
connection, updating the handle on the Connection.
|
connection, updating the handle on the Connection.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
def __init__(self, opener: Callable[[Connection], Connection]) -> None:
|
||||||
self.opener = opener
|
self.opener = opener
|
||||||
|
|
||||||
def resolve(self, connection: Connection) -> Connection:
|
def resolve(self, connection: Connection) -> Connection:
|
||||||
@@ -161,6 +163,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def __pre_deserialize__(cls, data):
|
def __pre_deserialize__(cls, data):
|
||||||
data = super().__pre_deserialize__(data)
|
data = super().__pre_deserialize__(data)
|
||||||
|
# Need to fixup dbname => database, pass => password
|
||||||
data = cls.translate_aliases(data)
|
data = cls.translate_aliases(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@@ -220,10 +223,10 @@ DEFAULT_QUERY_COMMENT = """
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class QueryComment(HyphenatedDbtClassMixin):
|
class QueryComment(dbtClassMixin):
|
||||||
comment: str = DEFAULT_QUERY_COMMENT
|
comment: str = DEFAULT_QUERY_COMMENT
|
||||||
append: bool = False
|
append: bool = False
|
||||||
job_label: bool = False
|
job_label: bool = field(default=False, metadata={"alias": "job-label"})
|
||||||
|
|
||||||
|
|
||||||
class AdapterRequiredConfig(HasCredentials, Protocol):
|
class AdapterRequiredConfig(HasCredentials, Protocol):
|
||||||
@@ -231,3 +234,4 @@ class AdapterRequiredConfig(HasCredentials, Protocol):
|
|||||||
query_comment: QueryComment
|
query_comment: QueryComment
|
||||||
cli_vars: Dict[str, Any]
|
cli_vars: Dict[str, Any]
|
||||||
target_path: str
|
target_path: str
|
||||||
|
log_cache_events: bool
|
||||||
11
core/dbt/adapters/contracts/macros.py
Normal file
11
core/dbt/adapters/contracts/macros.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from typing_extensions import Protocol
|
||||||
|
|
||||||
|
from dbt.common.clients.jinja import MacroProtocol
|
||||||
|
|
||||||
|
|
||||||
|
class MacroResolver(Protocol):
|
||||||
|
def find_macro_by_name(
|
||||||
|
self, name: str, root_project_name: str, package: Optional[str]
|
||||||
|
) -> Optional[MacroProtocol]:
|
||||||
|
raise NotImplementedError("find_macro_by_name not implemented")
|
||||||
@@ -6,11 +6,11 @@ from typing import (
|
|||||||
)
|
)
|
||||||
from typing_extensions import Protocol
|
from typing_extensions import Protocol
|
||||||
|
|
||||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
from dbt.common.dataclass_schema import dbtClassMixin, StrEnum
|
||||||
|
|
||||||
from dbt.contracts.util import Replaceable
|
from dbt.common.contracts.util import Replaceable
|
||||||
from dbt.exceptions import CompilationError, DataclassNotDictError
|
from dbt.common.exceptions import CompilationError, DataclassNotDictError
|
||||||
from dbt.utils import deep_merge
|
from dbt.common.utils import deep_merge
|
||||||
|
|
||||||
|
|
||||||
class RelationType(StrEnum):
|
class RelationType(StrEnum):
|
||||||
@@ -19,6 +19,15 @@ class RelationType(StrEnum):
|
|||||||
CTE = "cte"
|
CTE = "cte"
|
||||||
MaterializedView = "materialized_view"
|
MaterializedView = "materialized_view"
|
||||||
External = "external"
|
External = "external"
|
||||||
|
Ephemeral = "ephemeral"
|
||||||
|
|
||||||
|
|
||||||
|
class RelationConfig(Protocol):
|
||||||
|
name: str
|
||||||
|
database: str
|
||||||
|
schema: str
|
||||||
|
identifier: str
|
||||||
|
quoting_dict: Dict[str, bool]
|
||||||
|
|
||||||
|
|
||||||
class ComponentName(StrEnum):
|
class ComponentName(StrEnum):
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user