forked from repo-mirrors/dbt-core
Compare commits
283 Commits
arky/add-p
...
moveAdapte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f113d380e2 | ||
|
|
ba1a911c42 | ||
|
|
3ddfcdd8ef | ||
|
|
fe82ef2c46 | ||
|
|
a68e427b74 | ||
|
|
870bc37270 | ||
|
|
977a8421d2 | ||
|
|
c7b9b1a209 | ||
|
|
e88138a838 | ||
|
|
4d16524f5a | ||
|
|
26ddaaf51f | ||
|
|
f1c2f06d1e | ||
|
|
2811ccd460 | ||
|
|
c2734c503a | ||
|
|
45b35701d4 | ||
|
|
c03823f0e1 | ||
|
|
eb96e3deec | ||
|
|
ed8f5d38e4 | ||
|
|
0ab954e1af | ||
|
|
e01eb30884 | ||
|
|
4da67bf176 | ||
|
|
00f49206e9 | ||
|
|
1bca662883 | ||
|
|
41ac915949 | ||
|
|
373125ecb8 | ||
|
|
5488dfb992 | ||
|
|
09355701f6 | ||
|
|
294ad82e50 | ||
|
|
12bd1e87fb | ||
|
|
3187deda43 | ||
|
|
8bad75c65b | ||
|
|
c836b7585e | ||
|
|
220f56d8d2 | ||
|
|
32fde75504 | ||
|
|
bf079b1777 | ||
|
|
615ad1fe2d | ||
|
|
81236a3dca | ||
|
|
6d834a18ed | ||
|
|
2ab0f7b26b | ||
|
|
9bb970e6ef | ||
|
|
e56a5dae8b | ||
|
|
1c9cec1787 | ||
|
|
4d02ef637b | ||
|
|
19f027b7a7 | ||
|
|
1d0a3e92c8 | ||
|
|
ab90c777d0 | ||
|
|
3902137dfc | ||
|
|
0131feac68 | ||
|
|
017faf4bd1 | ||
|
|
c2f7d75e9e | ||
|
|
51b94b26cc | ||
|
|
e24f9b3da7 | ||
|
|
b58e8e3ffc | ||
|
|
f45b013321 | ||
|
|
e547c0ec64 | ||
|
|
6871fc46b5 | ||
|
|
931b2dbe40 | ||
|
|
bb35b3eb87 | ||
|
|
01d481bc8d | ||
|
|
46b9a1d621 | ||
|
|
839c720e91 | ||
|
|
d88c6987a2 | ||
|
|
4ee950427a | ||
|
|
6c1822f186 | ||
|
|
c7c3ac872c | ||
|
|
ff9d519510 | ||
|
|
c4ff280436 | ||
|
|
7fddd6e448 | ||
|
|
1260782bd2 | ||
|
|
333120b111 | ||
|
|
bb21403c9e | ||
|
|
ac972948b8 | ||
|
|
211392c4a4 | ||
|
|
7317de23a3 | ||
|
|
252e3e31b5 | ||
|
|
af916666a2 | ||
|
|
7de8930d1d | ||
|
|
200bcdcd9f | ||
|
|
b9a603e3aa | ||
|
|
1a825484fb | ||
|
|
a2a7b7d795 | ||
|
|
4122f6c308 | ||
|
|
6aeebc4c76 | ||
|
|
07743b7740 | ||
|
|
f44d704801 | ||
|
|
dbd02e54c2 | ||
|
|
2474722870 | ||
|
|
a89642a6f9 | ||
|
|
c141148616 | ||
|
|
469a9aca06 | ||
|
|
98310b6612 | ||
|
|
ef9d6a870f | ||
|
|
35f46dac8c | ||
|
|
efa6339e18 | ||
|
|
1baebb423c | ||
|
|
462df8395e | ||
|
|
35f214d9db | ||
|
|
af0cbcb6a5 | ||
|
|
2e35426d11 | ||
|
|
bf10a29f06 | ||
|
|
a7e2d9bc40 | ||
|
|
a3777496b5 | ||
|
|
edf6aedc51 | ||
|
|
53845d0277 | ||
|
|
3d27483658 | ||
|
|
4f9bd0cb38 | ||
|
|
3f7f7de179 | ||
|
|
6461f5aacf | ||
|
|
339957b42c | ||
|
|
4391dc1a63 | ||
|
|
964e0e4e8a | ||
|
|
549dbf3390 | ||
|
|
70b2e15a25 | ||
|
|
bb249d612c | ||
|
|
17773bdb94 | ||
|
|
f30293359c | ||
|
|
0c85e6149f | ||
|
|
ec57d7af94 | ||
|
|
df791f729c | ||
|
|
c6ff3abecd | ||
|
|
eac13e3bd3 | ||
|
|
46ee3f3d9c | ||
|
|
5e1f0c5fbc | ||
|
|
c4f09b160a | ||
|
|
48c97e86dd | ||
|
|
416bc845ad | ||
|
|
408a78985a | ||
|
|
0c965c8115 | ||
|
|
f65e4b6940 | ||
|
|
a2d4424f92 | ||
|
|
997f839cd6 | ||
|
|
556fad50df | ||
|
|
bb4214b5c2 | ||
|
|
f17c1f3fe7 | ||
|
|
d4fe9a8ad4 | ||
|
|
2910aa29e4 | ||
|
|
89cc073ea8 | ||
|
|
aa86fdfe71 | ||
|
|
48e9ced781 | ||
|
|
7b02bd1f02 | ||
|
|
417fc2a735 | ||
|
|
317128f790 | ||
|
|
e3dfb09b10 | ||
|
|
d912654110 | ||
|
|
34ab4cf9be | ||
|
|
d597b80486 | ||
|
|
3f5ebe81b9 | ||
|
|
f52bd9287b | ||
|
|
f5baeeea1c | ||
|
|
3cc7044fb3 | ||
|
|
26c7675c28 | ||
|
|
8aaed0e29f | ||
|
|
5182e3c40c | ||
|
|
1e252c7664 | ||
|
|
05ef3b6e44 | ||
|
|
ad04012b63 | ||
|
|
c93cba4603 | ||
|
|
971669016f | ||
|
|
6c6f245914 | ||
|
|
b39eeb328c | ||
|
|
be94bf1f3c | ||
|
|
e24a952e98 | ||
|
|
89f20d12cf | ||
|
|
ebeb0f1154 | ||
|
|
d66fe214d9 | ||
|
|
75781503b8 | ||
|
|
9aff3ca274 | ||
|
|
7e2a08f3a5 | ||
|
|
a0e13561b1 | ||
|
|
7eedfcd274 | ||
|
|
da779ac77c | ||
|
|
adfa3226e3 | ||
|
|
e5e1a272ff | ||
|
|
d8e8a78368 | ||
|
|
7ae3de1fa0 | ||
|
|
72898c7211 | ||
|
|
fc1a14a0e3 | ||
|
|
f063e4e01c | ||
|
|
07372db906 | ||
|
|
48d04e8141 | ||
|
|
6234267242 | ||
|
|
1afbb87e99 | ||
|
|
d18a74ddb7 | ||
|
|
4d3c6d9c7c | ||
|
|
10f9724827 | ||
|
|
582faa129e | ||
|
|
4ec87a01e0 | ||
|
|
ff98685dd6 | ||
|
|
424f3d218a | ||
|
|
661623f9f7 | ||
|
|
49397b4d7b | ||
|
|
0553fd817c | ||
|
|
7ad971f720 | ||
|
|
f485c13035 | ||
|
|
c30b691164 | ||
|
|
d088d4493e | ||
|
|
770f804325 | ||
|
|
37a29073de | ||
|
|
17cd145f09 | ||
|
|
ac539fd5cf | ||
|
|
048553ddc3 | ||
|
|
dfe6b71fd9 | ||
|
|
18ee93ca3a | ||
|
|
cb4bc2d6e9 | ||
|
|
b0451806ef | ||
|
|
b514e4c249 | ||
|
|
8350dfead3 | ||
|
|
34e6edbb13 | ||
|
|
27be92903e | ||
|
|
9388030182 | ||
|
|
b7aee3f5a4 | ||
|
|
83ff38ab24 | ||
|
|
6603a44151 | ||
|
|
e69d4e7f14 | ||
|
|
506f65e880 | ||
|
|
41bb52762b | ||
|
|
8c98ef3e70 | ||
|
|
44d1e73b4f | ||
|
|
53794fbaba | ||
|
|
556b4043e9 | ||
|
|
424c636533 | ||
|
|
f63709260e | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
22c40a4766 | ||
|
|
bcf140b3c1 | ||
|
|
e3692a6a3d | ||
|
|
e7489383a2 | ||
|
|
70246c3f86 | ||
|
|
0796c84da5 | ||
|
|
718482fb02 | ||
|
|
a3fb66daa4 | ||
|
|
da34b80c26 | ||
|
|
ba5ab21140 | ||
|
|
65f41a1e36 | ||
|
|
0930c9c059 | ||
|
|
1d193a9ab9 | ||
|
|
3adc6dca61 | ||
|
|
36d9f841d6 | ||
|
|
48ad13de00 | ||
|
|
42935cce05 | ||
|
|
e77f1c3b0f | ||
|
|
388838aa99 | ||
|
|
d4d0990072 | ||
|
|
4210d17f14 | ||
|
|
fbd12e78c9 | ||
|
|
83d3421e72 | ||
|
|
8bcbf73aaa | ||
|
|
cc5f15885d | ||
|
|
20fdf55bf6 | ||
|
|
955dcec68b | ||
|
|
2b8564b16f | ||
|
|
57da3e51cd | ||
|
|
dede0e9747 | ||
|
|
35d2fc1158 | ||
|
|
c5267335a3 | ||
|
|
15c7b589c2 | ||
|
|
0ada5e8bf7 | ||
|
|
412ac8d1b9 | ||
|
|
5df501a281 | ||
|
|
3e4c61d020 | ||
|
|
cc39fe51b3 | ||
|
|
89cd24388d | ||
|
|
d5da0a8093 | ||
|
|
88ae1f8871 | ||
|
|
50b3d1deaa | ||
|
|
3b3def5b8a | ||
|
|
4f068a45ff | ||
|
|
23a9504a51 | ||
|
|
d0d4eba477 | ||
|
|
a3fab0b5a9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.8.0a1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
kind: Breaking Changes
|
||||
body: Remove adapter.get_compiler interface
|
||||
time: 2023-11-27T11:47:57.443202-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9148"
|
||||
@@ -0,0 +1,6 @@
|
||||
kind: Breaking Changes
|
||||
body: Move AdapterLogger to adapters folder
|
||||
time: 2023-11-28T13:43:56.853925-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9151"
|
||||
@@ -0,0 +1,7 @@
|
||||
kind: Breaking Changes
|
||||
body: move event manager setup back to core, remove ref to global EVENT_MANAGER and
|
||||
clean up event manager functions
|
||||
time: 2023-11-30T13:53:48.645192-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9150"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
6
.changes/unreleased/Dependencies-20231031-131954.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Begin using DSI 0.4.x
|
||||
time: 2023-10-31T13:19:54.750009-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
PR: "8892"
|
||||
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
6
.changes/unreleased/Dependencies-20231106-130051.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update typing-extensions version to >=4.4
|
||||
time: 2023-11-06T13:00:51.062386-08:00
|
||||
custom:
|
||||
Author: tlento
|
||||
PR: "9012"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
6
.changes/unreleased/Docs-20231106-123157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: fix get_custom_database docstring
|
||||
time: 2023-11-06T12:31:57.525711Z
|
||||
custom:
|
||||
Author: LeoTheGriff
|
||||
Issue: "9003"
|
||||
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
6
.changes/unreleased/Features-20230915-123733.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Allow adapters to include package logs in dbt standard logging '
|
||||
time: 2023-09-15T12:37:33.862862-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "7859"
|
||||
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
6
.changes/unreleased/Features-20231017-143620.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add drop_schema_named macro
|
||||
time: 2023-10-17T14:36:20.612289-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8025"
|
||||
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
6
.changes/unreleased/Features-20231026-110821.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: migrate utils to common and adapters folders
|
||||
time: 2023-10-26T11:08:21.458709-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8924"
|
||||
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
6
.changes/unreleased/Features-20231026-123556.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Move Agate helper client into common
|
||||
time: 2023-10-26T12:35:56.538587-07:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "8926"
|
||||
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
6
.changes/unreleased/Features-20231026-123913.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: remove usage of dbt.config.PartialProject from dbt/adapters
|
||||
time: 2023-10-26T12:39:13.904116-07:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "8928"
|
||||
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
6
.changes/unreleased/Features-20231031-132022.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add exports to SavedQuery spec
|
||||
time: 2023-10-31T13:20:22.448158-07:00
|
||||
custom:
|
||||
Author: QMalcolm peterallenwebb
|
||||
Issue: "8892"
|
||||
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
6
.changes/unreleased/Features-20231107-135635.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Remove legacy logger
|
||||
time: 2023-11-07T13:56:35.186648-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "8027"
|
||||
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
6
.changes/unreleased/Features-20231110-154255.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support setting export configs hierarchically via saved query and project configs
|
||||
time: 2023-11-10T15:42:55.042317-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8956"
|
||||
6
.changes/unreleased/Features-20231116-234049.yaml
Normal file
6
.changes/unreleased/Features-20231116-234049.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support --empty flag for schema-only dry runs
|
||||
time: 2023-11-16T23:40:49.96651-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8971"
|
||||
7
.changes/unreleased/Features-20231206-181458.yaml
Normal file
7
.changes/unreleased/Features-20231206-181458.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Adds support for parsing conversion metric related properties for the semantic
|
||||
layer.
|
||||
time: 2023-12-06T18:14:58.688221-05:00
|
||||
custom:
|
||||
Author: WilliamDee
|
||||
Issue: "9203"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
6
.changes/unreleased/Fixes-20231013-130943.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: For packages installed with tarball method, fetch metadata to resolve nested dependencies
|
||||
time: 2023-10-13T13:09:43.188308-04:00
|
||||
custom:
|
||||
Author: adamlopez
|
||||
Issue: "8621"
|
||||
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
6
.changes/unreleased/Fixes-20231016-163953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix partial parsing not working for semantic model change
|
||||
time: 2023-10-16T16:39:53.05058-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8859"
|
||||
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-110151.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Handle unknown `type_code` for model contracts
|
||||
time: 2023-10-24T11:01:51.980781-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: 8877 8353
|
||||
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-145504.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add back contract enforcement for temporary tables on postgres
|
||||
time: 2023-10-24T14:55:04.051683-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8857"
|
||||
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
6
.changes/unreleased/Fixes-20231024-155400.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Rework get_catalog implementation to retain previous adapter interface semantics
|
||||
time: 2023-10-24T15:54:00.628086-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8846"
|
||||
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
6
.changes/unreleased/Fixes-20231026-002536.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add version to fqn when version==0
|
||||
time: 2023-10-26T00:25:36.259356-05:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8836"
|
||||
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
6
.changes/unreleased/Fixes-20231030-093734.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix cased comparison in catalog-retrieval function.
|
||||
time: 2023-10-30T09:37:34.258612-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8939"
|
||||
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-005345.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Catalog queries now assign the correct type to materialized views
|
||||
time: 2023-10-31T00:53:45.486203-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8864"
|
||||
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
6
.changes/unreleased/Fixes-20231031-144837.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix compilation exception running empty seed file and support new Integer agate data_type
|
||||
time: 2023-10-31T14:48:37.774871-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8895"
|
||||
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
6
.changes/unreleased/Fixes-20231101-155824.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make relation filtering None-tolerant for maximal flexibility across adapters.
|
||||
time: 2023-11-01T15:58:24.552054-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8974"
|
||||
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
7
.changes/unreleased/Fixes-20231106-155933.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Update run_results.json from previous versions of dbt to support deferral and
|
||||
rerun from failure
|
||||
time: 2023-11-06T15:59:33.677915-05:00
|
||||
custom:
|
||||
Author: jtcohen6 peterallenwebb
|
||||
Issue: "9010"
|
||||
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
6
.changes/unreleased/Fixes-20231107-092358.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix git repository with subdirectory for Deps
|
||||
time: 2023-11-07T09:23:58.214271-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9000"
|
||||
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
7
.changes/unreleased/Fixes-20231107-094130.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Use MANIFEST.in to recursively include all jinja templates; fixes issue where
|
||||
some templates were not included in the distribution
|
||||
time: 2023-11-07T09:41:30.121733-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9016"
|
||||
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
6
.changes/unreleased/Fixes-20231113-114956.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix formatting of tarball information in packages-lock.yml
|
||||
time: 2023-11-13T11:49:56.437007-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx QMalcolm
|
||||
Issue: "9062"
|
||||
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154310.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'deps: Lock git packages to commit SHA during resolution'
|
||||
time: 2023-11-27T15:43:10.122069+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "9050"
|
||||
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
6
.changes/unreleased/Fixes-20231127-154347.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'deps: Use PackageRenderer to read package-lock.json'
|
||||
time: 2023-11-27T15:43:47.842423+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "9127"
|
||||
7
.changes/unreleased/Fixes-20231127-165244.yaml
Normal file
7
.changes/unreleased/Fixes-20231127-165244.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure we produce valid jsonschema schemas for manifest, catalog, run-results,
|
||||
and sources
|
||||
time: 2023-11-27T16:52:44.590313-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8991"
|
||||
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
6
.changes/unreleased/Fixes-20231128-155225.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: 'Get sources working again in dbt docs generate'
|
||||
time: 2023-11-28T15:52:25.738256Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "9119"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230831-164435.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Added more type annotations.
|
||||
time: 2023-08-31T16:44:35.737954-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8537"
|
||||
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231026-184953.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.include.global_project in dbt/adapters
|
||||
time: 2023-10-26T18:49:53.36449-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8925"
|
||||
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231027-140048.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add a no-op runner for Saved Qeury
|
||||
time: 2023-10-27T14:00:48.4755-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8893"
|
||||
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-102758.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: remove dbt.flags.MP_CONTEXT usage in dbt/adapters
|
||||
time: 2023-11-01T10:27:58.790153-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8967"
|
||||
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231101-173124.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: 'Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters'
|
||||
time: 2023-11-01T17:31:24.974093-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8969"
|
||||
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231103-195222.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move CatalogRelationTypes test case to the shared test suite to be reused by
|
||||
adapter maintainers
|
||||
time: 2023-11-03T19:52:22.694394-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8952"
|
||||
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-080422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Treat SystemExit as an interrupt if raised during node execution.
|
||||
time: 2023-11-06T08:04:22.022179-05:00
|
||||
custom:
|
||||
Author: benmosher
|
||||
Issue: n/a
|
||||
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231106-105730.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Removing unused 'documentable'
|
||||
time: 2023-11-06T10:57:30.694056-08:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8871"
|
||||
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-135728.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove use of dbt/core exceptions in dbt/adapter
|
||||
time: 2023-11-07T13:57:28.683727-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt MichelleArk
|
||||
Issue: "8920"
|
||||
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231107-191546.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Cache dbt plugin modules to improve integration test performance
|
||||
time: 2023-11-07T19:15:46.170151-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9029"
|
||||
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231111-175350.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix test_current_timestamp_matches_utc test; allow for MacOS runner system clock
|
||||
variance
|
||||
time: 2023-11-11T17:53:50.098843-05:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "9057"
|
||||
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231116-174251.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific
|
||||
event types and protos
|
||||
time: 2023-11-16T17:42:51.005023-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: 8927 8918
|
||||
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231120-134735.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Clean up unused adaptor folders
|
||||
time: 2023-11-20T13:47:35.923794-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "9123"
|
||||
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231120-183214.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move column constraints into common/contracts, removing another dependency of
|
||||
adapters on core.
|
||||
time: 2023-11-20T18:32:14.859503-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "9024"
|
||||
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231128-170732.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move dbt.semver to dbt.common.semver and update references.
|
||||
time: 2023-11-28T17:07:32.172421-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "9039"
|
||||
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231130-135432.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move lowercase utils method to common
|
||||
time: 2023-11-30T13:54:32.561673-08:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "9180"
|
||||
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-093544.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usages of dbt.clients.jinja in dbt/adapters
|
||||
time: 2023-12-05T09:35:44.845352+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9205"
|
||||
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-120559.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.contracts in dbt/adapters
|
||||
time: 2023-12-05T12:05:59.936775+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9208"
|
||||
6
.changes/unreleased/Under the Hood-20231205-165812.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-165812.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters
|
||||
time: 2023-12-05T16:58:12.932172+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9214"
|
||||
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-170725.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Introduce RelationConfig Protocol, consolidate Relation.create_from
|
||||
time: 2023-12-05T17:07:25.33861+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9215"
|
||||
7
.changes/unreleased/Under the Hood-20231205-185022.yaml
Normal file
7
.changes/unreleased/Under the Hood-20231205-185022.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Move BaseConfig, Metadata and various other contract classes from model_config
|
||||
to common/contracts/config
|
||||
time: 2023-12-05T18:50:22.321229-08:00
|
||||
custom:
|
||||
Author: colin-rorgers-dbt
|
||||
Issue: "8919"
|
||||
6
.changes/unreleased/Under the Hood-20231205-235830.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231205-235830.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: remove manifest from adapter.set_relations_cache signature
|
||||
time: 2023-12-05T23:58:30.920144+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9217"
|
||||
6
.changes/unreleased/Under the Hood-20231206-000343.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231206-000343.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: ' remove manifest from adapter catalog method signatures'
|
||||
time: 2023-12-06T00:03:43.824252+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9218"
|
||||
6
.changes/unreleased/Under the Hood-20231207-111554.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231207-111554.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro
|
||||
time: 2023-12-07T11:15:54.427818+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9244"
|
||||
6
.changes/unreleased/Under the Hood-20231207-224139.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231207-224139.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: pass query header context to MacroQueryStringSetter
|
||||
time: 2023-12-07T22:41:39.498024+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: 9249 9250
|
||||
6
.changes/unreleased/Under the Hood-20231208-004854.yaml
Normal file
6
.changes/unreleased/Under the Hood-20231208-004854.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: add macro_context_generator on adapter
|
||||
time: 2023-12-08T00:48:54.506911+09:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9247"
|
||||
2
.flake8
2
.flake8
@@ -10,3 +10,5 @@ ignore =
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
per-file-ignores =
|
||||
*/__init__.py: F401
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
||||
core/dbt/include/index.html binary
|
||||
core/dbt/task/docs/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
|
||||
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -13,23 +13,6 @@
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
@@ -40,7 +23,7 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
24
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user_docs"]
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -11,7 +11,7 @@ body:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
@@ -25,11 +25,29 @@ body:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance critera
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -28,3 +28,10 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# github dependencies
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,15 +1,12 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Add the `user docs` label to this PR if it will need docs changes. An
|
||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
@@ -33,3 +30,4 @@ resolves #
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
8
.github/workflows/changelog-existence.yml
vendored
8
.github/workflows/changelog-existence.yml
vendored
@@ -2,10 +2,8 @@
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow should not require any secrets since it runs for PRs
|
||||
# from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
@@ -19,7 +17,7 @@
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
43
.github/workflows/docs-issue.yml
vendored
Normal file
43
.github/workflows/docs-issue.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
# we only want to run this when the PR has been merged or the label in the labeled event is `user docs`. Otherwise it runs the
|
||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
||||
# decide if it should run or not.
|
||||
if: |
|
||||
(github.event.pull_request.merged == true) &&
|
||||
((github.event.action == 'closed' && contains( github.event.pull_request.labels.*.name, 'user docs')) ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'user docs'))
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
26
.github/workflows/main.yml
vendored
26
.github/workflows/main.yml
vendored
@@ -36,7 +36,7 @@ defaults:
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
@@ -108,8 +108,9 @@ jobs:
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
@@ -221,17 +222,26 @@ jobs:
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
name: integration test suite
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "[Notification] Integration test suite passes"
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title="Integration test suite passes""
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
6
.github/workflows/release-docker.yml
vendored
6
.github/workflows/release-docker.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
30
.github/workflows/repository-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# **what?**
|
||||
# Cleanup branches left over from automation and testing. Also cleanup
|
||||
# draft releases from release testing.
|
||||
|
||||
# **why?**
|
||||
# The automations are leaving behind branches and releases that clutter
|
||||
# the repository. Sometimes we need them to debug processes so we don't
|
||||
# want them immediately deleted. Running on Saturday to avoid running
|
||||
# at the same time as an actual release to prevent breaking a release
|
||||
# mid-release.
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 12:00 Saturday.
|
||||
# Manual trigger can also run on demand
|
||||
|
||||
name: Repository Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cleanup-repo:
|
||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
||||
secrets: inherit
|
||||
@@ -21,7 +21,7 @@ permissions: read-all
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.4.0
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -26,7 +26,7 @@ Legacy tests are found in the 'test' directory:
|
||||
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
|
||||
core/dbt/include/index.html
|
||||
core/dbt/task/docs/index.html
|
||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||
|
||||
## Adapters
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
|
||||
11
Makefile
11
Makefile
@@ -40,7 +40,16 @@ dev: dev_req ## Installs dbt-* packages in develop mode along with development d
|
||||
|
||||
.PHONY: proto_types
|
||||
proto_types: ## generates google protobuf python file from types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||
protoc -I=./core/dbt/common/events --python_out=./core/dbt/common/events ./core/dbt/common/events/types.proto
|
||||
|
||||
.PHONY: core_proto_types
|
||||
core_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
||||
|
||||
.PHONY: adapter_proto_types
|
||||
adapter_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/adapters/events --python_out=./core/dbt/adapters/events ./core/dbt/adapters/events/adapter_types.proto
|
||||
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
|
||||
13
codecov.yml
13
codecov.yml
@@ -0,0 +1,13 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.base.meta import available # noqa: F401
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||
from dbt.adapters.base.relation import ( # noqa: F401
|
||||
|
||||
@@ -2,17 +2,17 @@ from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
# Note: This is automatically used by contract code
|
||||
# No-op conversions (INTEGER => INT) have been removed.
|
||||
# Any adapter that wants to take advantage of "translate_type"
|
||||
# should create a ClassVar with the appropriate conversions.
|
||||
TYPE_LABELS: ClassVar[Dict[str, str]] = {
|
||||
"STRING": "TEXT",
|
||||
"TIMESTAMP": "TIMESTAMP",
|
||||
"FLOAT": "FLOAT",
|
||||
"INTEGER": "INT",
|
||||
"BOOLEAN": "BOOLEAN",
|
||||
}
|
||||
column: str
|
||||
dtype: str
|
||||
|
||||
@@ -6,6 +6,7 @@ import traceback
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from multiprocessing.context import SpawnContext
|
||||
from threading import get_ident
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -23,8 +24,9 @@ from typing import (
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.connection import (
|
||||
import dbt.adapters.exceptions
|
||||
import dbt.common.exceptions.base
|
||||
from dbt.adapters.contracts.connection import (
|
||||
Connection,
|
||||
Identifier,
|
||||
ConnectionState,
|
||||
@@ -32,13 +34,12 @@ from dbt.contracts.connection import (
|
||||
LazyHandle,
|
||||
AdapterResponse,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.events import AdapterLogger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
from dbt.adapters.events.logging import AdapterLogger
|
||||
from dbt.common.events.functions import fire_event
|
||||
from dbt.adapters.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpenInCleanup,
|
||||
@@ -48,9 +49,8 @@ from dbt.events.types import (
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
from dbt.common.events.contextvars import get_node_info
|
||||
from dbt.common.utils import cast_to_str
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
@@ -72,14 +72,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
TYPE: str = NotImplemented
|
||||
|
||||
def __init__(self, profile: AdapterRequiredConfig):
|
||||
def __init__(self, profile: AdapterRequiredConfig, mp_context: SpawnContext) -> None:
|
||||
self.profile = profile
|
||||
self.thread_connections: Dict[Hashable, Connection] = {}
|
||||
self.lock: RLock = flags.MP_CONTEXT.RLock()
|
||||
self.lock: RLock = mp_context.RLock()
|
||||
self.query_header: Optional[MacroQueryStringSetter] = None
|
||||
|
||||
def set_query_header(self, manifest: Manifest) -> None:
|
||||
self.query_header = MacroQueryStringSetter(self.profile, manifest)
|
||||
def set_query_header(self, query_header_context: Dict[str, Any]) -> None:
|
||||
self.query_header = MacroQueryStringSetter(self.profile, query_header_context)
|
||||
|
||||
@staticmethod
|
||||
def get_thread_identifier() -> Hashable:
|
||||
@@ -91,13 +91,15 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
raise dbt.adapters.exceptions.InvalidConnectionError(
|
||||
key, list(self.thread_connections)
|
||||
)
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.common.exceptions.DbtInternalError(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
@@ -137,13 +139,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
'connection_named'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
|
||||
@@ -220,14 +222,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
:raises dbt.adapters.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
@@ -235,7 +237,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
raise dbt.adapters.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
@@ -246,7 +248,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
@@ -268,12 +270,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.adapters.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -288,7 +290,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -320,12 +324,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`begin` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`commit` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
@@ -361,7 +369,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.common.exceptions.DbtInternalError(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
@@ -400,7 +408,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
@@ -408,7 +416,30 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param int limit: If set, limits the result set
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
"""
|
||||
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
|
||||
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
|
||||
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
|
||||
|
||||
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
|
||||
"""
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`add_select_query` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.common.exceptions.base.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -4,12 +4,10 @@ from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
import time
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
@@ -17,39 +15,59 @@ from typing import (
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
FrozenSet,
|
||||
Iterable,
|
||||
)
|
||||
from multiprocessing.context import SpawnContext
|
||||
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
from dbt.adapters.capability import Capability, CapabilityDict
|
||||
from dbt.common.contracts.constraints import (
|
||||
ColumnLevelConstraint,
|
||||
ConstraintType,
|
||||
ModelLevelConstraint,
|
||||
)
|
||||
from dbt.adapters.contracts.macros import MacroResolverProtocol
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
from dbt.adapters.exceptions import (
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
NullRelationDropAttemptedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
UnexpectedNonTimestampError,
|
||||
RenameToNoneAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
)
|
||||
|
||||
from dbt.common.exceptions import (
|
||||
NotImplementedError,
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
DbtValidationError,
|
||||
UnexpectedNullError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNonTimestampError,
|
||||
UnexpectedNullError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import (
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterConfig,
|
||||
MacroContextGeneratorCallable,
|
||||
)
|
||||
from dbt.common.clients.agate_helper import (
|
||||
empty_table,
|
||||
get_column_value_uncased,
|
||||
merge_tables,
|
||||
table_from_rows,
|
||||
Integer,
|
||||
)
|
||||
from dbt.common.clients.jinja import CallableMacroGenerator
|
||||
from dbt.common.events.functions import fire_event, warn_or_error
|
||||
from dbt.adapters.events.types import (
|
||||
CacheMiss,
|
||||
ListRelations,
|
||||
CodeExecution,
|
||||
@@ -58,9 +76,14 @@ from dbt.events.types import (
|
||||
ConstraintNotSupported,
|
||||
ConstraintNotEnforced,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
from dbt.common.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.contracts.relation import RelationConfig
|
||||
from dbt.adapters.base.connections import (
|
||||
Connection,
|
||||
AdapterResponse,
|
||||
BaseConnectionManager,
|
||||
)
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName,
|
||||
@@ -71,10 +94,13 @@ from dbt.adapters.base.relation import (
|
||||
from dbt.adapters.base import Column as BaseColumn
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.events.types import CollectFreshnessReturnSignature
|
||||
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
@@ -91,11 +117,13 @@ def _expect_row_value(key: str, row: agate.Row):
|
||||
return row[key]
|
||||
|
||||
|
||||
def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
||||
def _catalog_filter_schemas(
|
||||
used_schemas: FrozenSet[Tuple[str, str]]
|
||||
) -> Callable[[agate.Row], bool]:
|
||||
"""Return a function that takes a row and decides if the row should be
|
||||
included in the catalog output.
|
||||
"""
|
||||
schemas = frozenset((d.lower(), s.lower()) for d, s in manifest.get_used_schemas())
|
||||
schemas = frozenset((d.lower(), s.lower()) for d, s in used_schemas)
|
||||
|
||||
def test(row: agate.Row) -> bool:
|
||||
table_database = _expect_row_value("table_database", row)
|
||||
@@ -109,7 +137,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
|
||||
return test
|
||||
|
||||
|
||||
def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
|
||||
def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
|
||||
"""If dt has a timezone, return a new datetime that's in UTC. Otherwise,
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
@@ -161,6 +189,12 @@ class PythonJobHelper:
|
||||
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
|
||||
|
||||
|
||||
class FreshnessResponse(TypedDict):
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float # age in seconds
|
||||
|
||||
|
||||
class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""The BaseAdapter provides an abstract base class for adapters.
|
||||
|
||||
@@ -208,7 +242,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Relation: Type[BaseRelation] = BaseRelation
|
||||
Column: Type[BaseColumn] = BaseColumn
|
||||
ConnectionManager: Type[ConnectionManagerProtocol]
|
||||
ConnectionManager: Type[BaseConnectionManager]
|
||||
|
||||
# A set of clobber config fields accepted by this adapter
|
||||
# for use in materializations
|
||||
@@ -222,11 +256,35 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
# This static member variable can be overriden in concrete adapter
|
||||
# implementations to indicate adapter support for optional capabilities.
|
||||
_capabilities = CapabilityDict({})
|
||||
|
||||
def __init__(self, config, mp_context: SpawnContext) -> None:
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
self.connections = self.ConnectionManager(config)
|
||||
self._macro_manifest_lazy: Optional[MacroManifest] = None
|
||||
self.cache = RelationsCache(log_cache_events=config.log_cache_events)
|
||||
self.connections = self.ConnectionManager(config, mp_context)
|
||||
self._macro_resolver: Optional[MacroResolverProtocol] = None
|
||||
self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None
|
||||
|
||||
###
|
||||
# Methods to set / access a macro resolver
|
||||
###
|
||||
def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None:
|
||||
self._macro_resolver = macro_resolver
|
||||
|
||||
def get_macro_resolver(self) -> Optional[MacroResolverProtocol]:
|
||||
return self._macro_resolver
|
||||
|
||||
def clear_macro_resolver(self) -> None:
|
||||
if self._macro_resolver is not None:
|
||||
self._macro_resolver = None
|
||||
|
||||
def set_macro_context_generator(
|
||||
self,
|
||||
macro_context_generator: MacroContextGeneratorCallable,
|
||||
) -> None:
|
||||
self._macro_context_generator = macro_context_generator
|
||||
|
||||
###
|
||||
# Methods that pass through to the connection manager
|
||||
@@ -256,10 +314,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return conn.name
|
||||
|
||||
@contextmanager
|
||||
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
|
||||
def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]:
|
||||
try:
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.set(name, node)
|
||||
self.connections.query_header.set(name, query_header_context)
|
||||
self.acquire_connection(name)
|
||||
yield
|
||||
finally:
|
||||
@@ -267,11 +325,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.reset()
|
||||
|
||||
@contextmanager
|
||||
def connection_for(self, node: ResultNode) -> Iterator[None]:
|
||||
with self.connection_named(node.unique_id, node):
|
||||
yield
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
@@ -315,14 +368,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
"""
|
||||
TODO: Can we move this to dbt-bigquery?
|
||||
Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:param str table: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
if hasattr(self.connections, "get_partitions_metadata"):
|
||||
return self.connections.get_partitions_metadata(table=table)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"`get_partitions_metadata` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
@@ -337,39 +397,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
return cls.ConnectionManager.TYPE
|
||||
|
||||
@property
|
||||
def _macro_manifest(self) -> MacroManifest:
|
||||
if self._macro_manifest_lazy is None:
|
||||
return self.load_macro_manifest()
|
||||
return self._macro_manifest_lazy
|
||||
|
||||
def check_macro_manifest(self) -> Optional[MacroManifest]:
|
||||
"""Return the internal manifest (used for executing macros) if it's
|
||||
been initialized, otherwise return None.
|
||||
"""
|
||||
return self._macro_manifest_lazy
|
||||
|
||||
def load_macro_manifest(self, base_macros_only=False) -> MacroManifest:
|
||||
# base_macros_only is for the test framework
|
||||
if self._macro_manifest_lazy is None:
|
||||
# avoid a circular import
|
||||
from dbt.parser.manifest import ManifestLoader
|
||||
|
||||
manifest = ManifestLoader.load_macros(
|
||||
self.config,
|
||||
self.connections.set_query_header,
|
||||
base_macros_only=base_macros_only,
|
||||
)
|
||||
# TODO CT-211
|
||||
self._macro_manifest_lazy = manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
return self._macro_manifest_lazy # type: ignore[return-value]
|
||||
|
||||
def clear_macro_manifest(self):
|
||||
if self._macro_manifest_lazy is not None:
|
||||
self._macro_manifest_lazy = None
|
||||
|
||||
###
|
||||
# Caching methods
|
||||
###
|
||||
def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
|
||||
@@ -387,18 +414,16 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return True
|
||||
|
||||
def _get_cache_schemas(self, manifest: Manifest) -> Set[BaseRelation]:
|
||||
def _get_cache_schemas(self, relation_configs: Iterable[RelationConfig]) -> Set[BaseRelation]:
|
||||
"""Get the set of schema relations that the cache logic needs to
|
||||
populate. This means only executable nodes are included.
|
||||
populate.
|
||||
"""
|
||||
# the cache only cares about executable nodes
|
||||
return {
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
|
||||
self.Relation.create_from(quoting=self.config, relation_config=relation_config)
|
||||
for relation_config in relation_configs
|
||||
}
|
||||
|
||||
def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:
|
||||
def _get_catalog_schemas(self, relation_configs: Iterable[RelationConfig]) -> SchemaSearchMap:
|
||||
"""Get a mapping of each node's "information_schema" relations to a
|
||||
set of all schemas expected in that information_schema.
|
||||
|
||||
@@ -408,16 +433,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model)
|
||||
],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
relation = self.Relation.create_from(self.config, node)
|
||||
relations = self._get_catalog_relations(relation_configs)
|
||||
for relation in relations:
|
||||
info_schema_name_map.add(relation)
|
||||
# result is a map whose keys are information_schema Relations without
|
||||
# identifiers that have appropriate database prefixes, and whose values
|
||||
@@ -425,14 +442,37 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# databases
|
||||
return info_schema_name_map
|
||||
|
||||
def _get_catalog_relations_by_info_schema(
|
||||
self, relations
|
||||
) -> Dict[InformationSchema, List[BaseRelation]]:
|
||||
relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
|
||||
for relation in relations:
|
||||
info_schema = relation.information_schema_only()
|
||||
if info_schema not in relations_by_info_schema:
|
||||
relations_by_info_schema[info_schema] = []
|
||||
relations_by_info_schema[info_schema].append(relation)
|
||||
|
||||
return relations_by_info_schema
|
||||
|
||||
def _get_catalog_relations(
|
||||
self, relation_configs: Iterable[RelationConfig]
|
||||
) -> List[BaseRelation]:
|
||||
relations = [
|
||||
self.Relation.create_from(quoting=self.config, relation_config=relation_config)
|
||||
for relation_config in relation_configs
|
||||
]
|
||||
return relations
|
||||
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||
self,
|
||||
relation_configs: Iterable[RelationConfig],
|
||||
cache_schemas: Optional[Set[BaseRelation]] = None,
|
||||
) -> None:
|
||||
"""Populate the relations cache for the given schemas. Returns an
|
||||
iterable of the schemas populated, as strings.
|
||||
"""
|
||||
if not cache_schemas:
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
cache_schemas = self._get_cache_schemas(relation_configs)
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[List[BaseRelation]]] = []
|
||||
for cache_schema in cache_schemas:
|
||||
@@ -453,14 +493,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
|
||||
cache_update: Set[Tuple[Optional[str], str]] = set()
|
||||
for relation in cache_schemas:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
if relation.schema:
|
||||
cache_update.add((relation.database, relation.schema))
|
||||
self.cache.update_schemas(cache_update)
|
||||
|
||||
def set_relations_cache(
|
||||
self,
|
||||
manifest: Manifest,
|
||||
relation_configs: Iterable[RelationConfig],
|
||||
clear: bool = False,
|
||||
required_schemas: Optional[Set[BaseRelation]] = None,
|
||||
) -> None:
|
||||
@@ -470,7 +511,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
with self.cache.lock:
|
||||
if clear:
|
||||
self.cache.clear()
|
||||
self._relations_cache_for_schemas(manifest, required_schemas)
|
||||
self._relations_cache_for_schemas(relation_configs, required_schemas)
|
||||
|
||||
@available
|
||||
def cache_added(self, relation: Optional[BaseRelation]) -> str:
|
||||
@@ -917,6 +958,17 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
|
||||
:param agate_table: The table
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
return "integer"
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
@@ -974,6 +1026,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
|
||||
agate_type: Type = agate_table.column_types[col_idx]
|
||||
conversions: List[Tuple[Type, Callable[..., str]]] = [
|
||||
(Integer, cls.convert_integer_type),
|
||||
(agate.Text, cls.convert_text_type),
|
||||
(agate.Number, cls.convert_number_type),
|
||||
(agate.Boolean, cls.convert_boolean_type),
|
||||
@@ -993,11 +1046,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def execute_macro(
|
||||
self,
|
||||
macro_name: str,
|
||||
manifest: Optional[Manifest] = None,
|
||||
macro_resolver: Optional[MacroResolverProtocol] = None,
|
||||
project: Optional[str] = None,
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Optional[Dict[str, Any]] = None,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> AttrDict:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
|
||||
@@ -1017,13 +1069,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
if context_override is None:
|
||||
context_override = {}
|
||||
|
||||
if manifest is None:
|
||||
# TODO CT-211
|
||||
manifest = self._macro_manifest # type: ignore[assignment]
|
||||
# TODO CT-211
|
||||
macro = manifest.find_macro_by_name( # type: ignore[union-attr]
|
||||
macro_name, self.config.project_name, project
|
||||
)
|
||||
resolver = macro_resolver or self._macro_resolver
|
||||
if resolver is None:
|
||||
raise DbtInternalError("Macro resolver was None when calling execute_macro!")
|
||||
|
||||
if self._macro_context_generator is None:
|
||||
raise DbtInternalError("Macro context generator was None when calling execute_macro!")
|
||||
|
||||
macro = resolver.find_macro_by_name(macro_name, self.config.project_name, project)
|
||||
if macro is None:
|
||||
if project is None:
|
||||
package_name = "any package"
|
||||
@@ -1035,27 +1088,20 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
macro_name, package_name
|
||||
)
|
||||
)
|
||||
# This causes a reference cycle, as generate_runtime_macro_context()
|
||||
# ends up calling get_adapter, so the import has to be here.
|
||||
from dbt.context.providers import generate_runtime_macro_context
|
||||
|
||||
macro_context = generate_runtime_macro_context(
|
||||
# TODO CT-211
|
||||
macro=macro,
|
||||
config=self.config,
|
||||
manifest=manifest, # type: ignore[arg-type]
|
||||
package_name=project,
|
||||
)
|
||||
macro_context = self._macro_context_generator(macro, self.config, resolver, project)
|
||||
macro_context.update(context_override)
|
||||
|
||||
macro_function = MacroGenerator(macro, macro_context)
|
||||
macro_function = CallableMacroGenerator(macro, macro_context)
|
||||
|
||||
with self.connections.exception_handler(f"macro {macro_name}"):
|
||||
result = macro_function(**kwargs)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate.Table:
|
||||
def _catalog_filter_table(
|
||||
cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]]
|
||||
) -> agate.Table:
|
||||
"""Filter the table as appropriate for catalog entries. Subclasses can
|
||||
override this to change filtering rules on a per-adapter basis.
|
||||
"""
|
||||
@@ -1065,44 +1111,122 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
table.column_names,
|
||||
text_only_columns=["table_database", "table_schema", "table_name"],
|
||||
)
|
||||
return table.where(_catalog_filter_schemas(manifest))
|
||||
return table.where(_catalog_filter_schemas(used_schemas))
|
||||
|
||||
def _get_one_catalog(
|
||||
self,
|
||||
information_schema: InformationSchema,
|
||||
schemas: Set[str],
|
||||
manifest: Manifest,
|
||||
used_schemas: FrozenSet[Tuple[str, str]],
|
||||
) -> agate.Table:
|
||||
kwargs = {"information_schema": information_schema, "schemas": schemas}
|
||||
table = self.execute_macro(
|
||||
GET_CATALOG_MACRO_NAME,
|
||||
kwargs=kwargs,
|
||||
# pass in the full manifest so we get any local project
|
||||
# overrides
|
||||
manifest=manifest,
|
||||
)
|
||||
table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
schema_map = self._get_catalog_schemas(manifest)
|
||||
def _get_one_catalog_by_relations(
|
||||
self,
|
||||
information_schema: InformationSchema,
|
||||
relations: List[BaseRelation],
|
||||
used_schemas: FrozenSet[Tuple[str, str]],
|
||||
) -> agate.Table:
|
||||
|
||||
kwargs = {
|
||||
"information_schema": information_schema,
|
||||
"relations": relations,
|
||||
}
|
||||
table = self.execute_macro(GET_CATALOG_RELATIONS_MACRO_NAME, kwargs=kwargs)
|
||||
|
||||
results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_filtered_catalog(
|
||||
self,
|
||||
relation_configs: Iterable[RelationConfig],
|
||||
used_schemas: FrozenSet[Tuple[str, str]],
|
||||
relations: Optional[Set[BaseRelation]] = None,
|
||||
):
|
||||
catalogs: agate.Table
|
||||
if (
|
||||
relations is None
|
||||
or len(relations) > 100
|
||||
or not self.supports(Capability.SchemaMetadataByRelations)
|
||||
):
|
||||
# Do it the traditional way. We get the full catalog.
|
||||
catalogs, exceptions = self.get_catalog(relation_configs, used_schemas)
|
||||
else:
|
||||
# Do it the new way. We try to save time by selecting information
|
||||
# only for the exact set of relations we are interested in.
|
||||
catalogs, exceptions = self.get_catalog_by_relations(used_schemas, relations)
|
||||
|
||||
if relations and catalogs:
|
||||
relation_map = {
|
||||
(
|
||||
r.database.casefold() if r.database else None,
|
||||
r.schema.casefold() if r.schema else None,
|
||||
r.identifier.casefold() if r.identifier else None,
|
||||
)
|
||||
for r in relations
|
||||
}
|
||||
|
||||
def in_map(row: agate.Row):
|
||||
d = _expect_row_value("table_database", row)
|
||||
s = _expect_row_value("table_schema", row)
|
||||
i = _expect_row_value("table_name", row)
|
||||
d = d.casefold() if d is not None else None
|
||||
s = s.casefold() if s is not None else None
|
||||
i = i.casefold() if i is not None else None
|
||||
return (d, s, i) in relation_map
|
||||
|
||||
catalogs = catalogs.where(in_map)
|
||||
|
||||
return catalogs, exceptions
|
||||
|
||||
def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
|
||||
pass
|
||||
|
||||
def get_catalog(
|
||||
self,
|
||||
relation_configs: Iterable[RelationConfig],
|
||||
used_schemas: FrozenSet[Tuple[str, str]],
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
schema_map: SchemaSearchMap = self._get_catalog_schemas(relation_configs)
|
||||
for info, schemas in schema_map.items():
|
||||
if len(schemas) == 0:
|
||||
continue
|
||||
name = ".".join([str(info.database), "information_schema"])
|
||||
|
||||
fut = tpe.submit_connected(
|
||||
self, name, self._get_one_catalog, info, schemas, manifest
|
||||
self, name, self._get_one_catalog, info, schemas, used_schemas
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
return catalogs, exceptions
|
||||
|
||||
def get_catalog_by_relations(
|
||||
self, used_schemas: FrozenSet[Tuple[str, str]], relations: Set[BaseRelation]
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[agate.Table]] = []
|
||||
relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
|
||||
for info_schema in relations_by_schema:
|
||||
name = ".".join([str(info_schema.database), "information_schema"])
|
||||
relations = set(relations_by_schema[info_schema])
|
||||
fut = tpe.submit_connected(
|
||||
self,
|
||||
name,
|
||||
self._get_one_catalog_by_relations,
|
||||
info_schema,
|
||||
relations,
|
||||
used_schemas,
|
||||
)
|
||||
futures.append(fut)
|
||||
|
||||
catalogs, exceptions = catch_as_completed(futures)
|
||||
|
||||
return catalogs, exceptions
|
||||
return catalogs, exceptions
|
||||
|
||||
def cancel_open_connections(self):
|
||||
"""Cancel all open connections."""
|
||||
@@ -1113,8 +1237,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
source: BaseRelation,
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
macro_resolver: Optional[MacroResolverProtocol] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1129,9 +1253,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
AttrDict, # current: contains AdapterResponse + agate.Table
|
||||
agate.Table, # previous: just table
|
||||
]
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
result = self.execute_macro(
|
||||
FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver
|
||||
)
|
||||
if isinstance(result, agate.Table):
|
||||
deprecations.warn("collect-freshness-return-signature")
|
||||
warn_or_error(CollectFreshnessReturnSignature())
|
||||
adapter_response = None
|
||||
table = result
|
||||
else:
|
||||
@@ -1149,13 +1275,52 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
freshness = {
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def calculate_freshness_from_metadata(
|
||||
self,
|
||||
source: BaseRelation,
|
||||
macro_resolver: Optional[MacroResolverProtocol] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
|
||||
kwargs: Dict[str, Any] = {
|
||||
"information_schema": source.information_schema_only(),
|
||||
"relations": [source],
|
||||
}
|
||||
result = self.execute_macro(
|
||||
GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver
|
||||
)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
|
||||
try:
|
||||
row = table[0]
|
||||
last_modified_val = get_column_value_uncased("last_modified", row)
|
||||
snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
|
||||
except Exception:
|
||||
raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
|
||||
|
||||
if last_modified_val is None:
|
||||
# Interpret missing value as "infinitely long ago"
|
||||
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
|
||||
else:
|
||||
max_loaded_at = _utc(last_modified_val, None, "last_modified")
|
||||
|
||||
snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
|
||||
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
|
||||
freshness: FreshnessResponse = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
runs. The hook can assume it has a connection available.
|
||||
@@ -1181,11 +1346,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_compiler(self):
|
||||
from dbt.compilation import Compiler
|
||||
|
||||
return Compiler(self.config)
|
||||
|
||||
# Methods used in adapter tests
|
||||
def update_column_sql(
|
||||
self,
|
||||
@@ -1305,7 +1465,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
strategy = strategy.replace("+", "_")
|
||||
macro_name = f"get_incremental_{strategy}_sql"
|
||||
# The model_context should have MacroGenerator callable objects for all macros
|
||||
# The model_context should have callable objects for all macros
|
||||
if macro_name not in model_context:
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||
@@ -1429,6 +1589,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def capabilities(cls) -> CapabilityDict:
|
||||
return cls._capabilities
|
||||
|
||||
@classmethod
|
||||
def supports(cls, capability: Capability) -> bool:
|
||||
return bool(cls.capabilities()[capability])
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import abc
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional, Any, FrozenSet, Dict, Set
|
||||
|
||||
from dbt.deprecations import warn, renamed_method
|
||||
|
||||
from dbt.common.events.functions import warn_or_error
|
||||
from dbt.adapters.events.types import AdapterDeprecationWarning
|
||||
|
||||
Decorator = Callable[[Any], Callable]
|
||||
|
||||
@@ -62,11 +61,12 @@ class _Available:
|
||||
|
||||
def wrapper(func):
|
||||
func_name = func.__name__
|
||||
renamed_method(func_name, supported_name)
|
||||
|
||||
@wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
warn("adapter:{}".format(func_name))
|
||||
warn_or_error(
|
||||
AdapterDeprecationWarning(old_name=func_name, new_name=supported_name)
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if parse_replacement:
|
||||
@@ -93,7 +93,7 @@ class AdapterMeta(abc.ABCMeta):
|
||||
_available_: FrozenSet[str]
|
||||
_parse_replacements_: Dict[str, Callable]
|
||||
|
||||
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||
def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta":
|
||||
# mypy does not like the `**kwargs`. But `ABCMeta` itself takes
|
||||
# `**kwargs` in its argspec here (and passes them to `type.__new__`.
|
||||
# I'm not sure there is any benefit to it after poking around a bit,
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
from typing import List, Optional, Type
|
||||
from pathlib import Path
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import PartialProject
|
||||
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
class AdapterPlugin:
|
||||
"""Defines the basic requirements for a dbt adapter plugin.
|
||||
|
||||
@@ -29,12 +19,13 @@ class AdapterPlugin:
|
||||
credentials: Type[Credentials],
|
||||
include_path: str,
|
||||
dependencies: Optional[List[str]] = None,
|
||||
):
|
||||
project_name: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
self.adapter: Type[AdapterProtocol] = adapter
|
||||
self.credentials: Type[Credentials] = credentials
|
||||
self.include_path: str = include_path
|
||||
self.project_name: str = project_name_from_path(include_path)
|
||||
self.project_name: str = project_name or f"dbt_{Path(include_path).name}"
|
||||
self.dependencies: List[str]
|
||||
if dependencies is None:
|
||||
self.dependencies = []
|
||||
|
||||
@@ -1,21 +1,17 @@
|
||||
from threading import local
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
|
||||
from dbt.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.adapters.clients.jinja import QueryStringGenerator
|
||||
from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.common.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
def __init__(self, node):
|
||||
self._inner_node = node
|
||||
class QueryHeaderContextWrapper:
|
||||
def __init__(self, context) -> None:
|
||||
self._inner_context = context
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._inner_node, name, "")
|
||||
return getattr(self._inner_context, name, "")
|
||||
|
||||
|
||||
class _QueryComment(local):
|
||||
@@ -25,9 +21,9 @@ class _QueryComment(local):
|
||||
- a source_name indicating what set the current thread's query comment
|
||||
"""
|
||||
|
||||
def __init__(self, initial):
|
||||
def __init__(self, initial) -> None:
|
||||
self.query_comment: Optional[str] = initial
|
||||
self.append = False
|
||||
self.append: bool = False
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
if not self.query_comment:
|
||||
@@ -53,13 +49,15 @@ class _QueryComment(local):
|
||||
self.append = append
|
||||
|
||||
|
||||
QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str]
|
||||
QueryStringFunc = Callable[[str, Optional[QueryHeaderContextWrapper]], str]
|
||||
|
||||
|
||||
class MacroQueryStringSetter:
|
||||
def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
|
||||
self.manifest = manifest
|
||||
def __init__(
|
||||
self, config: AdapterRequiredConfig, query_header_context: Dict[str, Any]
|
||||
) -> None:
|
||||
self.config = config
|
||||
self._query_header_context = query_header_context
|
||||
|
||||
comment_macro = self._get_comment_macro()
|
||||
self.generator: QueryStringFunc = lambda name, model: ""
|
||||
@@ -82,7 +80,7 @@ class MacroQueryStringSetter:
|
||||
return self.config.query_comment.comment
|
||||
|
||||
def _get_context(self) -> Dict[str, Any]:
|
||||
return generate_query_header_context(self.config, self.manifest)
|
||||
return self._query_header_context
|
||||
|
||||
def add(self, sql: str) -> str:
|
||||
return self.comment.add(sql)
|
||||
@@ -90,10 +88,10 @@ class MacroQueryStringSetter:
|
||||
def reset(self):
|
||||
self.set("master", None)
|
||||
|
||||
def set(self, name: str, node: Optional[ResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
def set(self, name: str, query_header_context: Any):
|
||||
wrapped: Optional[QueryHeaderContextWrapper] = None
|
||||
if query_header_context is not None:
|
||||
wrapped = QueryHeaderContextWrapper(query_header_context)
|
||||
comment_str = self.generator(name, wrapped)
|
||||
|
||||
append = False
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user