forked from repo-mirrors/dbt-core
Compare commits
125 Commits
jerco/hack
...
before_cli
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83c5a8c24b | ||
|
|
6d78e5e640 | ||
|
|
f54a876f65 | ||
|
|
8b2c9bf39d | ||
|
|
298bf8a1d4 | ||
|
|
abbece8876 | ||
|
|
3ad40372e6 | ||
|
|
df64511feb | ||
|
|
ccb4fa26cd | ||
|
|
4c63b630de | ||
|
|
b2ea2b8b25 | ||
|
|
2245d8d710 | ||
|
|
d9424cc710 | ||
|
|
1a6e4a00c7 | ||
|
|
42b7caae19 | ||
|
|
622e5fd71d | ||
|
|
d2f3cdd6de | ||
|
|
92d1ef8482 | ||
|
|
a8abc49632 | ||
|
|
c653330911 | ||
|
|
82d9b2fa87 | ||
|
|
3f96fad4f9 | ||
|
|
c2c4757a2b | ||
|
|
c65ba11ae6 | ||
|
|
b0651b13b5 | ||
|
|
a34521ec07 | ||
|
|
da47b90503 | ||
|
|
db99e2f68d | ||
|
|
cbb9117ab9 | ||
|
|
e2ccf011d9 | ||
|
|
17014bfad3 | ||
|
|
7b464b8a49 | ||
|
|
5c765bf3e2 | ||
|
|
93619a9a37 | ||
|
|
a181cee6ae | ||
|
|
3aeab73740 | ||
|
|
9801eebc58 | ||
|
|
6954c4df1b | ||
|
|
f841a7ca76 | ||
|
|
07a004b301 | ||
|
|
b05582de39 | ||
|
|
fa7c4d19f0 | ||
|
|
066346faa2 | ||
|
|
0a03355ceb | ||
|
|
43e24c5ae6 | ||
|
|
89d111a5f6 | ||
|
|
e1b5e68904 | ||
|
|
065ab2ebc2 | ||
|
|
20c95a4993 | ||
|
|
c40b488cb4 | ||
|
|
585e7c59e8 | ||
|
|
7077c47551 | ||
|
|
f789b2535a | ||
|
|
2bfc6917e2 | ||
|
|
d74ae19523 | ||
|
|
1c7c23ac73 | ||
|
|
86e8722cd8 | ||
|
|
7a61602738 | ||
|
|
dd4b47d8b1 | ||
|
|
eb200b4687 | ||
|
|
0fc080d222 | ||
|
|
5da63602b3 | ||
|
|
457ff3ef48 | ||
|
|
0dbdecef10 | ||
|
|
b13b0e9492 | ||
|
|
b9fdfd9e36 | ||
|
|
4d6352db14 | ||
|
|
9eb82c6497 | ||
|
|
89cc89dfdf | ||
|
|
2b0f6597a4 | ||
|
|
294def205f | ||
|
|
34fa703466 | ||
|
|
ab3f8dcbfd | ||
|
|
02c20477b9 | ||
|
|
d9a4ee126a | ||
|
|
94d6d19fb4 | ||
|
|
d43c070007 | ||
|
|
9ef236601b | ||
|
|
9d6f961d2b | ||
|
|
5453840950 | ||
|
|
d453964546 | ||
|
|
748a932811 | ||
|
|
8217ad4722 | ||
|
|
6ef3fbbf76 | ||
|
|
76fd12c7cd | ||
|
|
9ecb6e50e4 | ||
|
|
ce9d0afb8a | ||
|
|
c39ea807e8 | ||
|
|
1e35339389 | ||
|
|
304797b099 | ||
|
|
b9bdb775ab | ||
|
|
df93858b4b | ||
|
|
e8da84fb9e | ||
|
|
7e90e067af | ||
|
|
5e4e917de5 | ||
|
|
05dc0212e7 | ||
|
|
c00052cbfb | ||
|
|
3d54a83822 | ||
|
|
fafd5edbda | ||
|
|
8478262580 | ||
|
|
83b1fee062 | ||
|
|
0fbbc896b2 | ||
|
|
0544b08543 | ||
|
|
bef6edb942 | ||
|
|
99f27de934 | ||
|
|
9c91f3a7bd | ||
|
|
1b6fed2ffd | ||
|
|
0721f2c1b7 | ||
|
|
b9a35da118 | ||
|
|
60f80056b1 | ||
|
|
540c3b79aa | ||
|
|
16f529e1d4 | ||
|
|
ebfcf2a9ef | ||
|
|
67a8138b65 | ||
|
|
85d0b5afc7 | ||
|
|
1fbcaa4484 | ||
|
|
481235a943 | ||
|
|
2289e45571 | ||
|
|
b5d303f12a | ||
|
|
c3be975783 | ||
|
|
47c2edb42a | ||
|
|
b3440417ad | ||
|
|
020f639c7a | ||
|
|
55db15aba8 | ||
|
|
bce0e7c096 |
@@ -1,13 +1,21 @@
|
||||
[bumpversion]
|
||||
current_version = 1.4.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
current_version = 1.5.0a1
|
||||
|
||||
# `parse` allows parsing the version into the parts we need to check. There are some
|
||||
# unnamed groups and that's okay because they do not need to be audited. If any part
|
||||
# of the version passed and does not match the regex, it will fail.
|
||||
# expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly`
|
||||
# excepted failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
(((?P<prekind>a|b|rc) # optional pre-release type
|
||||
?(?P<num>[\d]+?)) # optional pre-release version number
|
||||
\.?(?P<nightly>[a-z0-9]+\+[a-z]+)? # optional nightly release indicator
|
||||
)?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||
{major}.{minor}.{patch}{prekind}{num}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
@@ -21,9 +29,11 @@ values =
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:pre]
|
||||
[bumpversion:part:num]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:nightly]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
|
||||
6
.changes/unreleased/Dependencies-20230206-000926.yaml
Normal file
6
.changes/unreleased/Dependencies-20230206-000926.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump ubuntu from 22.04 to 23.04"
|
||||
time: 2023-02-06T00:09:26.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 6865
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: "Dependency"
|
||||
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
|
||||
time: 2022-09-23T00:06:46.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 5917
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: "Dependency"
|
||||
body: "Bump black from 22.8.0 to 22.10.0"
|
||||
time: 2022-10-07T00:08:48.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6019
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: "Dependency"
|
||||
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
|
||||
time: 2022-10-26T00:09:10.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6144
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Docs
|
||||
body: minor doc correction
|
||||
time: 2022-09-08T15:41:57.689162-04:00
|
||||
custom:
|
||||
Author: andy-clapson
|
||||
Issue: "5791"
|
||||
PR: "5684"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Docs
|
||||
body: Generate API docs for new CLI interface
|
||||
time: 2022-10-07T09:06:56.446078-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5528"
|
||||
PR: "6022"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
time: 2022-10-17T17:14:11.715348-05:00
|
||||
custom:
|
||||
Author: paulbenschmidt
|
||||
Issue: "5880"
|
||||
PR: "324"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix rendering of sample code for metrics
|
||||
time: 2022-11-16T15:57:43.204201+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "323"
|
||||
PR: "346"
|
||||
6
.changes/unreleased/Docs-20230207-123807.yaml
Normal file
6
.changes/unreleased/Docs-20230207-123807.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: update link to installation instructions
|
||||
time: 2023-02-07T12:38:07.336783-05:00
|
||||
custom:
|
||||
Author: ryancharris
|
||||
Issue: None
|
||||
6
.changes/unreleased/Docs-20230209-082901.yaml
Normal file
6
.changes/unreleased/Docs-20230209-082901.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Fix JSON path to overview docs
|
||||
time: 2023-02-09T08:29:01.432616-07:00
|
||||
custom:
|
||||
Author: halvorlu
|
||||
Issue: "366"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Features
|
||||
body: Added favor-state flag to optionally favor state nodes even if unselected node
|
||||
exists
|
||||
time: 2022-04-08T16:54:59.696564+01:00
|
||||
custom:
|
||||
Author: daniel-murray josephberni
|
||||
Issue: "2968"
|
||||
PR: "5859"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Proto logging messages
|
||||
time: 2022-08-17T15:48:57.225267-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5610"
|
||||
PR: "5643"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Friendlier error messages when packages.yml is malformed
|
||||
time: 2022-09-12T12:59:35.121188+01:00
|
||||
custom:
|
||||
Author: jared-rimmer
|
||||
Issue: "5486"
|
||||
PR: "5812"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Migrate dbt-utils current_timestamp macros into core + adapters
|
||||
time: 2022-09-14T09:56:25.97818-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "5521"
|
||||
PR: "5838"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Allow partitions in external tables to be supplied as a list
|
||||
time: 2022-09-25T21:16:51.051239654+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "5929"
|
||||
PR: "5930"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: extend -f flag shorthand for seed command
|
||||
time: 2022-10-03T11:07:05.381632-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "5990"
|
||||
PR: "5991"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Features
|
||||
body: This pulls the profile name from args when constructing a RuntimeConfig in lib.py,
|
||||
enabling the dbt-server to override the value that's in the dbt_project.yml
|
||||
time: 2022-11-02T15:00:03.000805-05:00
|
||||
custom:
|
||||
Author: racheldaniel
|
||||
Issue: "6201"
|
||||
PR: "6202"
|
||||
6
.changes/unreleased/Features-20230107-003157.yaml
Normal file
6
.changes/unreleased/Features-20230107-003157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Have dbt debug spit out structured json logs with flags enabled.
|
||||
time: 2023-01-07T00:31:57.516063-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "5353"
|
||||
6
.changes/unreleased/Features-20230118-233801.yaml
Normal file
6
.changes/unreleased/Features-20230118-233801.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: add adapter_response to dbt test and freshness result
|
||||
time: 2023-01-18T23:38:01.857342+08:00
|
||||
custom:
|
||||
Author: aezomz
|
||||
Issue: "2964"
|
||||
6
.changes/unreleased/Features-20230120-112921.yaml
Normal file
6
.changes/unreleased/Features-20230120-112921.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Improve error message for packages missing `dbt_project.yml`
|
||||
time: 2023-01-20T11:29:21.509967-07:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "6663"
|
||||
6
.changes/unreleased/Features-20230126-154716.yaml
Normal file
6
.changes/unreleased/Features-20230126-154716.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Adjust makefile to have clearer instructions for CI env var changes.
|
||||
time: 2023-01-26T15:47:16.887327-08:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "6689"
|
||||
6
.changes/unreleased/Features-20230127-162812.yaml
Normal file
6
.changes/unreleased/Features-20230127-162812.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Stand-alone Python module for PostgresColumn
|
||||
time: 2023-01-27T16:28:12.212427-08:00
|
||||
custom:
|
||||
Author: nssalian
|
||||
Issue: "6772"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Account for disabled flags on models in schema files more completely
|
||||
time: 2022-09-16T10:48:54.162273-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "3992"
|
||||
PR: "5868"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add validation of enabled config for metrics, exposures and sources
|
||||
time: 2022-10-10T11:32:18.752322-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6030"
|
||||
PR: "6038"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: check length of args of python model function before accessing it
|
||||
time: 2022-10-11T16:07:15.464093-04:00
|
||||
custom:
|
||||
Author: chamini2
|
||||
Issue: "6041"
|
||||
PR: "6042"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add functors to ensure event types with str-type attributes are initialized
|
||||
to spec, even when provided non-str type params.
|
||||
time: 2022-10-16T17:37:42.846683-07:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "5436"
|
||||
PR: "5874"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Allow hooks to fail without halting execution flow
|
||||
time: 2022-11-07T09:53:14.340257-06:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "5625"
|
||||
PR: "6059"
|
||||
6
.changes/unreleased/Fixes-20230116-123645.yaml
Normal file
6
.changes/unreleased/Fixes-20230116-123645.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Respect quoting config for dbt.ref(), dbt.source(), and dbt.this() in dbt-py models
|
||||
time: 2023-01-16T12:36:45.63092+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: 6103 6619
|
||||
6
.changes/unreleased/Fixes-20230117-101342.yaml
Normal file
6
.changes/unreleased/Fixes-20230117-101342.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Provide backward compatibility for `get_merge_sql` arguments
|
||||
time: 2023-01-17T10:13:42.118336-06:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "6625"
|
||||
6
.changes/unreleased/Fixes-20230123-132814.yaml
Normal file
6
.changes/unreleased/Fixes-20230123-132814.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: add merge_exclude_columns adapter tests
|
||||
time: 2023-01-23T13:28:14.808748-06:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "6699"
|
||||
6
.changes/unreleased/Fixes-20230124-115837.yaml
Normal file
6
.changes/unreleased/Fixes-20230124-115837.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Include adapter_response in NodeFinished run_result log event
|
||||
time: 2023-01-24T11:58:37.74179-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6703"
|
||||
6
.changes/unreleased/Fixes-20230124-141943.yaml
Normal file
6
.changes/unreleased/Fixes-20230124-141943.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Sort cli vars before hashing for partial parsing
|
||||
time: 2023-01-24T14:19:43.333628-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6710"
|
||||
6
.changes/unreleased/Fixes-20230125-191739.yaml
Normal file
6
.changes/unreleased/Fixes-20230125-191739.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: '[Regression] exposure_content referenced incorrectly'
|
||||
time: 2023-01-25T19:17:39.942081-05:00
|
||||
custom:
|
||||
Author: Mathyoub
|
||||
Issue: "6738"
|
||||
6
.changes/unreleased/Fixes-20230201-154418.yaml
Normal file
6
.changes/unreleased/Fixes-20230201-154418.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remove pin on packaging and stop using it for prerelease comparisons
|
||||
time: 2023-02-01T15:44:18.279158-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6834"
|
||||
6
.changes/unreleased/Fixes-20230203-135557.yaml
Normal file
6
.changes/unreleased/Fixes-20230203-135557.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros
|
||||
time: 2023-02-03T13:55:57.853715+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "6806"
|
||||
6
.changes/unreleased/Fixes-20230207-143544.yaml
Normal file
6
.changes/unreleased/Fixes-20230207-143544.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix regression of --quiet cli parameter behavior
|
||||
time: 2023-02-07T14:35:44.160163-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6749"
|
||||
6
.changes/unreleased/Fixes-20230208-110551.yaml
Normal file
6
.changes/unreleased/Fixes-20230208-110551.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure results from hooks contain nodes when processing them
|
||||
time: 2023-02-08T11:05:51.952494-06:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6796"
|
||||
6
.changes/unreleased/Fixes-20230208-154935.yaml
Normal file
6
.changes/unreleased/Fixes-20230208-154935.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Always flush stdout after logging
|
||||
time: 2023-02-08T15:49:35.175874-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6901"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Put black config in explicit config
|
||||
time: 2022-09-27T19:42:59.241433-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "5946"
|
||||
PR: "5947"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Added flat_graph attribute the Manifest class's deepcopy() coverage
|
||||
time: 2022-09-29T13:44:06.275941-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "5809"
|
||||
PR: "5975"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add mypy configs so `mypy` passes from CLI
|
||||
time: 2022-10-05T12:03:10.061263-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "5983"
|
||||
PR: "5983"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Exception message cleanup.
|
||||
time: 2022-10-07T09:46:27.682872-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6023"
|
||||
PR: "6024"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add dmypy cache to gitignore
|
||||
time: 2022-10-07T14:00:44.227644-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "6028"
|
||||
PR: "5978"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Provide useful errors when the value of 'materialized' is invalid
|
||||
time: 2022-10-13T18:19:12.167548-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "5229"
|
||||
PR: "6025"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fixed extra whitespace in strings introduced by black.
|
||||
time: 2022-10-17T15:15:11.499246-05:00
|
||||
custom:
|
||||
Author: luke-bassett
|
||||
Issue: "1350"
|
||||
PR: "6086"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Clean up string formatting
|
||||
time: 2022-10-17T15:58:44.676549-04:00
|
||||
custom:
|
||||
Author: eve-johns
|
||||
Issue: "6068"
|
||||
PR: "6082"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Remove the 'root_path' field from most nodes
|
||||
time: 2022-10-28T10:48:37.687886-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6171"
|
||||
PR: "6172"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Combine certain logging events with different levels
|
||||
time: 2022-10-28T11:03:44.887836-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6173"
|
||||
PR: "6174"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert threading tests to pytest
|
||||
time: 2022-11-08T07:45:50.589147-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
PR: "6226"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert postgres index tests to pytest
|
||||
time: 2022-11-08T11:56:33.743042-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5770"
|
||||
PR: "6228"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert use color tests to pytest
|
||||
time: 2022-11-08T13:31:04.788547-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5771"
|
||||
PR: "6230"
|
||||
6
.changes/unreleased/Under the Hood-20230113-132513.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230113-132513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fix use of ConnectionReused logging event
|
||||
time: 2023-01-13T13:25:13.023168-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6168"
|
||||
6
.changes/unreleased/Under the Hood-20230113-150700.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230113-150700.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Port docs tests to pytest
|
||||
time: 2023-01-13T15:07:00.477038-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6573"
|
||||
6
.changes/unreleased/Under the Hood-20230117-111737.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230117-111737.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Update deprecated github action command
|
||||
time: 2023-01-17T11:17:37.046095-06:00
|
||||
custom:
|
||||
Author: davidbloss
|
||||
Issue: "6153"
|
||||
7
.changes/unreleased/Under the Hood-20230120-172254.yaml
Normal file
7
.changes/unreleased/Under the Hood-20230120-172254.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Replaced the EmptyLine event with a more general Formatting event, and added
|
||||
a Note event.
|
||||
time: 2023-01-20T17:22:54.45828-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6481"
|
||||
6
.changes/unreleased/Under the Hood-20230122-215235.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230122-215235.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Small optimization on manifest parsing benefitting large DAGs
|
||||
time: 2023-01-22T21:52:35.549814+01:00
|
||||
custom:
|
||||
Author: boxysean
|
||||
Issue: "6697"
|
||||
6
.changes/unreleased/Under the Hood-20230124-153553.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230124-153553.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Revised and simplified various structured logging events
|
||||
time: 2023-01-24T15:35:53.065356-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: 6664 6665 6666
|
||||
6
.changes/unreleased/Under the Hood-20230126-135939.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230126-135939.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: ' Optimized GraphQueue to remove graph analysis bottleneck in large dags.'
|
||||
time: 2023-01-26T13:59:39.518345-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6759"
|
||||
6
.changes/unreleased/Under the Hood-20230126-164741.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230126-164741.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: '[CT-1841] Convert custom target test to Pytest'
|
||||
time: 2023-01-26T16:47:41.198714-08:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "6638"
|
||||
6
.changes/unreleased/Under the Hood-20230203-143551.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230203-143551.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Moving simple_seed to adapter zone to help adapter test conversions
|
||||
time: 2023-02-03T14:35:51.481856-08:00
|
||||
custom:
|
||||
Author: nssalian
|
||||
Issue: CT-1959
|
||||
113
.changie.yaml
113
.changie.yaml
@@ -6,19 +6,67 @@ changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $IssueList := list }}
|
||||
{{- $changes := splitList " " $.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
|
||||
kinds:
|
||||
- label: Breaking Changes
|
||||
- label: Features
|
||||
- label: Fixes
|
||||
- label: Docs
|
||||
changeFormat: '- {{.Body}} ([dbt-docs/#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-docs/issues/{{.Custom.Issue}}), [dbt-docs/#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-docs/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $IssueList := list }}
|
||||
{{- $changes := splitList " " $.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
- label: Security
|
||||
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
newlines:
|
||||
afterChangelogHeader: 1
|
||||
@@ -33,42 +81,47 @@ custom:
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: Issue
|
||||
label: GitHub Issue Number
|
||||
type: int
|
||||
minInt: 1
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number
|
||||
type: int
|
||||
minInt: 1
|
||||
label: GitHub Issue Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a PR */}}
|
||||
{{- /* loop through all authors for a single changelog */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core PRs */}}
|
||||
{{- $prLink := $change.Kind }}
|
||||
{{- if eq $change.Kind "Docs" }}
|
||||
{{- $prLink = "[dbt-docs/#pr](https://github.com/dbt-labs/dbt-docs/pull/pr)" | replace "pr" $change.Custom.PR }}
|
||||
{{- else }}
|
||||
{{- $prLink = "[#pr](https://github.com/dbt-labs/dbt-core/pull/pr)" | replace "pr" $change.Custom.PR }}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other PRs associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $prList := get $contributorDict $author }}
|
||||
{{- $prList = append $prList $prLink }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- else }}
|
||||
{{- $prList := list $prLink }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $contributionList := get $contributorDict $author }}
|
||||
{{- $contributionList = concat $contributionList $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- else }}
|
||||
{{- $contributionList := $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
{{- end }}
|
||||
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
||||
|
||||
2
.flake8
2
.flake8
@@ -9,4 +9,4 @@ ignore =
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test
|
||||
exclude = test/
|
||||
|
||||
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
core/dbt/include/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
20
.github/_README.md
vendored
20
.github/_README.md
vendored
@@ -63,12 +63,12 @@ permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
```
|
||||
|
||||
|
||||
### Secrets
|
||||
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
||||
|
||||
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
||||
|
||||
|
||||
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
||||
|
||||
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
||||
@@ -105,7 +105,7 @@ Some triggers of note that we use:
|
||||
|
||||
```
|
||||
# **what?**
|
||||
# Describe what the action does.
|
||||
# Describe what the action does.
|
||||
|
||||
# **why?**
|
||||
# Why does this action exist?
|
||||
@@ -138,7 +138,7 @@ Some triggers of note that we use:
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "::set-output name=FILEPATH::$FILEPATH"
|
||||
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
||||
```
|
||||
|
||||
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
||||
@@ -158,14 +158,14 @@ Some triggers of note that we use:
|
||||
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
||||
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
||||
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
||||
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set Variables
|
||||
id: variables
|
||||
run: |
|
||||
echo "::set-output name=important_path::'performance/runner/Cargo.toml'"
|
||||
echo "::set-output name=release_id::${{github.event.inputs.release_id}}"
|
||||
echo "::set-output name=open_prs::${{github.event.inputs.open_prs}}"
|
||||
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
||||
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
||||
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
||||
|
||||
job2:
|
||||
needs: [job1]
|
||||
@@ -190,7 +190,7 @@ ___
|
||||
### Actions from the Marketplace
|
||||
- Don’t use external actions for things that can easily be accomplished manually.
|
||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||
- Pin actions _we don't control_ to tags.
|
||||
- Pin actions _we don't control_ to tags.
|
||||
|
||||
### Connecting to AWS
|
||||
- Authenticate with the aws managed workflow
|
||||
@@ -208,7 +208,7 @@ ___
|
||||
|
||||
```yaml
|
||||
- name: Copy Artifacts from S3 via CLI
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
17
.github/actions/latest-wrangler/main.py
vendored
17
.github/actions/latest-wrangler/main.py
vendored
@@ -28,11 +28,12 @@ if __name__ == "__main__":
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
else:
|
||||
# everything is the latest if the package doesn't exist
|
||||
print(f"::set-output name=latest::{True}")
|
||||
print(f"::set-output name=minor_latest::{True}")
|
||||
sys.exit(0)
|
||||
# everything is the latest if the package doesn't exist
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write("latest=True")
|
||||
gh_output.write("minor_latest=True")
|
||||
sys.exit(0)
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
@@ -91,5 +92,7 @@ if __name__ == "__main__":
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
|
||||
print(f"::set-output name=latest::{latest}")
|
||||
print(f"::set-output name=minor_latest::{minor_latest}")
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write(f"latest={latest}")
|
||||
gh_output.write(f"minor_latest={minor_latest}")
|
||||
|
||||
4
.github/workflows/bot-changelog.yml
vendored
4
.github/workflows/bot-changelog.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- label: "dependencies"
|
||||
changie_kind: "Dependency"
|
||||
changie_kind: "Dependencies"
|
||||
- label: "snyk"
|
||||
changie_kind: "Security"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -58,4 +58,4 @@ jobs:
|
||||
commit_message: "Add automated changelog yaml from template for bot PR"
|
||||
changie_kind: ${{ matrix.changie_kind }}
|
||||
label: ${{ matrix.label }}
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: 4904\n PR: ${{ github.event.pull_request.number }}"
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
|
||||
|
||||
165
.github/workflows/generate-cli-api-docs.yml
vendored
Normal file
165
.github/workflows/generate-cli-api-docs.yml
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
# **what?**
|
||||
# On push, if anything in core/dbt/docs or core/dbt/cli has been
|
||||
# created or modified, regenerate the CLI API docs using sphinx.
|
||||
|
||||
# **why?**
|
||||
# We watch for changes in core/dbt/cli because the CLI API docs rely on click
|
||||
# and all supporting flags/params to be generated. We watch for changes in
|
||||
# core/dbt/docs since any changes to sphinx configuration or any of the
|
||||
# .rst files there could result in a differently build final index.html file.
|
||||
|
||||
# **when?**
|
||||
# Whenever a change has been pushed to a branch, and only if there is a diff
|
||||
# between the PR branch and main's core/dbt/cli and or core/dbt/docs dirs.
|
||||
|
||||
# TODO: add bot comment to PR informing contributor that the docs have been committed
|
||||
# TODO: figure out why github action triggered pushes cause github to fail to report
|
||||
# the status of jobs
|
||||
|
||||
name: Generate CLI API docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
CLI_DIR: ${{ github.workspace }}/core/dbt/cli
|
||||
DOCS_DIR: ${{ github.workspace }}/core/dbt/docs
|
||||
DOCS_BUILD_DIR: ${{ github.workspace }}/core/dbt/docs/build
|
||||
|
||||
jobs:
|
||||
check_gen:
|
||||
name: check if generation needed
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == false }}
|
||||
outputs:
|
||||
cli_dir_changed: ${{ steps.check_cli.outputs.cli_dir_changed }}
|
||||
docs_dir_changed: ${{ steps.check_docs.outputs.docs_dir_changed }}
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] print variables"
|
||||
run: |
|
||||
echo "env.CLI_DIR: ${{ env.CLI_DIR }}"
|
||||
echo "env.DOCS_BUILD_DIR: ${{ env.DOCS_BUILD_DIR }}"
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: set shas
|
||||
id: set_shas
|
||||
run: |
|
||||
THIS_SHA=$(git rev-parse @)
|
||||
LAST_SHA=$(git rev-parse @~1)
|
||||
|
||||
echo "this sha: $THIS_SHA"
|
||||
echo "last sha: $LAST_SHA"
|
||||
|
||||
echo "this_sha=$THIS_SHA" >> $GITHUB_OUTPUT
|
||||
echo "last_sha=$LAST_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: check for changes in core/dbt/cli
|
||||
id: check_cli
|
||||
run: |
|
||||
CLI_DIR_CHANGES=$(git diff \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.CLI_DIR }})
|
||||
|
||||
if [ -n "$CLI_DIR_CHANGES" ]; then
|
||||
echo "changes found"
|
||||
echo $CLI_DIR_CHANGES
|
||||
echo "cli_dir_changed=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "cli_dir_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "no changes found"
|
||||
|
||||
- name: check for changes in core/dbt/docs
|
||||
id: check_docs
|
||||
if: steps.check_cli.outputs.cli_dir_changed == 'false'
|
||||
run: |
|
||||
DOCS_DIR_CHANGES=$(git diff --name-only \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.DOCS_DIR }} ':!${{ env.DOCS_BUILD_DIR }}')
|
||||
|
||||
DOCS_BUILD_DIR_CHANGES=$(git diff --name-only \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.DOCS_BUILD_DIR }})
|
||||
|
||||
if [ -n "$DOCS_DIR_CHANGES" ] && [ -z "$DOCS_BUILD_DIR_CHANGES" ]; then
|
||||
echo "changes found"
|
||||
echo $DOCS_DIR_CHANGES
|
||||
echo "docs_dir_changed=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "docs_dir_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "no changes found"
|
||||
|
||||
gen_docs:
|
||||
name: generate docs
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check_gen]
|
||||
if: |
|
||||
needs.check_gen.outputs.cli_dir_changed == 'true'
|
||||
|| needs.check_gen.outputs.docs_dir_changed == 'true'
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] print variables"
|
||||
run: |
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
echo "github head_ref: ${{ github.head_ref }}"
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: install python
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: install dev requirements
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt -r dev-requirements.txt
|
||||
|
||||
- name: generate docs
|
||||
run: |
|
||||
source env/bin/activate
|
||||
cd ${{ env.DOCS_DIR }}
|
||||
|
||||
echo "cleaning existing docs"
|
||||
make clean
|
||||
|
||||
echo "creating docs"
|
||||
make html
|
||||
|
||||
- name: debug
|
||||
run: |
|
||||
echo ">>>>> status"
|
||||
git status
|
||||
echo ">>>>> remotes"
|
||||
git remote -v
|
||||
echo ">>>>> branch"
|
||||
git branch -v
|
||||
echo ">>>>> log"
|
||||
git log --pretty=oneline | head -5
|
||||
|
||||
- name: commit docs
|
||||
run: |
|
||||
git config user.name 'Github Build Bot'
|
||||
git config user.email 'buildbot@fishtownanalytics.com'
|
||||
git commit -am "Add generated CLI API docs"
|
||||
git push -u origin ${{ github.head_ref }}
|
||||
14
.github/workflows/main.yml
vendored
14
.github/workflows/main.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
@@ -101,7 +101,9 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
@@ -118,8 +120,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-latest]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
@@ -168,7 +170,9 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
|
||||
109
.github/workflows/nightly-release.yml
vendored
Normal file
109
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# **what?**
|
||||
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||
# - pass data to release workflow;
|
||||
# - night release will be pushed to GitHub as a draft release;
|
||||
# - night build will be pushed to test PyPI;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process for nightly builds
|
||||
#
|
||||
# **when?**
|
||||
# This workflow runs on schedule or can be run manually on demand.
|
||||
|
||||
name: Nightly Test Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch: # for manual triggering
|
||||
schedule:
|
||||
- cron: 0 9 * * *
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
RELEASE_BRANCH: "main"
|
||||
|
||||
jobs:
|
||||
aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.RELEASE_BRANCH }}
|
||||
|
||||
- name: "Resolve Commit To Release"
|
||||
id: resolve-commit-sha
|
||||
run: |
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Current Version Number"
|
||||
id: version-number-sources
|
||||
run: |
|
||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||
|
||||
- name: "Get Current Date"
|
||||
id: current-date
|
||||
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Generate Nightly Release Version Number"
|
||||
id: nightly-release-version
|
||||
run: |
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly"
|
||||
echo "number=$number" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||
|
||||
- name: "Set Release Branch"
|
||||
id: release-branch
|
||||
run: |
|
||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
log-outputs-aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] Log Outputs"
|
||||
run: |
|
||||
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
|
||||
release-github-pypi:
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: true
|
||||
nightly_release: true
|
||||
secrets: inherit
|
||||
30
.github/workflows/release-branch-tests.yml
vendored
30
.github/workflows/release-branch-tests.yml
vendored
@@ -28,7 +28,33 @@ on:
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
fetch-latest-branches:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
latest-branches: ${{ steps.get-latest-branches.outputs.repo-branches }}
|
||||
|
||||
steps:
|
||||
- name: "Fetch dbt-core Latest Branches"
|
||||
uses: dbt-labs/actions/fetch-repo-branches@v1.1.1
|
||||
id: get-latest-branches
|
||||
with:
|
||||
repo_name: ${{ github.event.repository.name }}
|
||||
organization: "dbt-labs"
|
||||
pat: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch_protected_branches_only: true
|
||||
regex: "^1.[0-9]+.latest$"
|
||||
perform_match_method: "match"
|
||||
retries: 3
|
||||
|
||||
- name: "[ANNOTATION] ${{ github.event.repository.name }} - branches to test"
|
||||
run: |
|
||||
title="${{ github.event.repository.name }} - branches to test"
|
||||
message="The workflow will run tests for the following branches of the ${{ github.event.repository.name }} repo: ${{ steps.get-latest-branches.outputs.repo-branches }}"
|
||||
echo "::notice $title::$message"
|
||||
|
||||
kick-off-ci:
|
||||
needs: [fetch-latest-branches]
|
||||
name: Kick-off CI
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -39,7 +65,9 @@ jobs:
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
|
||||
branch: ${{ fromJSON(needs.fetch-latest-branches.outputs.latest-branches) }}
|
||||
include:
|
||||
- branch: 'main'
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
|
||||
12
.github/workflows/release-docker.yml
vendored
12
.github/workflows/release-docker.yml
vendored
@@ -41,9 +41,9 @@ jobs:
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "::set-output name=major::$MAJOR"
|
||||
echo "::set-output name=minor::$MINOR"
|
||||
echo "::set-output name=patch::$PATCH"
|
||||
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
@@ -70,8 +70,10 @@ jobs:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v1
|
||||
|
||||
340
.github/workflows/release.yml
vendored
340
.github/workflows/release.yml
vendored
@@ -1,24 +1,110 @@
|
||||
# **what?**
|
||||
# Take the given commit, run unit tests specifically on that sha, build and
|
||||
# package it, and then release to GitHub and PyPi with that specific build
|
||||
|
||||
# Release workflow provides the following steps:
|
||||
# - checkout the given commit;
|
||||
# - validate version in sources and changelog file for given version;
|
||||
# - bump the version and generate a changelog if needed;
|
||||
# - merge all changes to the target branch if needed;
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
|
||||
#
|
||||
# **when?**
|
||||
# This will only run manually with a given sha and version
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPi
|
||||
name: Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: 'The last commit sha in the release'
|
||||
required: true
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: 'The release version number (i.e. 1.0.0b1)'
|
||||
required: true
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
@@ -28,175 +114,117 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
name: Unit test
|
||||
|
||||
log-inputs:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||
echo The branch to release from: ${{ inputs.target_branch }}
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Build script path: ${{ inputs.build_script_path }}
|
||||
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||
echo Package test command: ${{ inputs.package_test_command }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ inputs.sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets:
|
||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: Print variables
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: ${{ inputs.build_script_path }}
|
||||
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||
package_test_command: ${{ inputs.package_test_command }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: |
|
||||
dist/
|
||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
||||
|
||||
test-build:
|
||||
name: verify packages
|
||||
|
||||
needs: [build, unit]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
secrets:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: test-build
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: '.'
|
||||
|
||||
# Need to set an output variable because env variables can't be taken as input
|
||||
# This is needed for the next step with releasing to GitHub
|
||||
- name: Find release type
|
||||
id: release_type
|
||||
env:
|
||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||
run: |
|
||||
echo ::set-output name=isPrerelease::$IS_PRERELEASE
|
||||
|
||||
- name: Creating GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: dbt-core v${{github.event.inputs.version_number}}
|
||||
tag_name: v${{github.event.inputs.version_number}}
|
||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
||||
target_commitish: ${{github.event.inputs.sha}}
|
||||
body: |
|
||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
||||
files: |
|
||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
pypi-release:
|
||||
name: Pypi release
|
||||
name: PyPI Release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
needs: [github-release]
|
||||
|
||||
needs: github-release
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||
|
||||
environment: PypiProd
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: 'dist'
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
- name: Publish distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
secrets:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
|
||||
needs:
|
||||
[
|
||||
bump-version-generate-changelog,
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
|
||||
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
@@ -9,13 +9,4 @@ permissions:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
||||
with:
|
||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
|
||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||
days-before-stale: 180
|
||||
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
2
.github/workflows/version-bump.yml
vendored
2
.github/workflows/version-bump.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- name: Set branch value
|
||||
id: variables
|
||||
run: |
|
||||
echo "::set-output name=BRANCH_NAME::prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID"
|
||||
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -11,6 +11,7 @@ __pycache__/
|
||||
env*/
|
||||
dbt_env/
|
||||
build/
|
||||
!core/dbt/docs/build
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
@@ -50,6 +51,7 @@ coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
test.env
|
||||
makefile.test.env
|
||||
*.pytest_cache/
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||
exclude: ^test/
|
||||
exclude: ^(test/|core/dbt/docs/build/)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
|
||||
@@ -5,12 +5,12 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
|
||||
@@ -56,7 +56,7 @@ There are some tools that will be helpful to you in developing locally. While th
|
||||
|
||||
These are the tools used in `dbt-core` development and testing:
|
||||
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, 3.10 and 3.11
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`black`](https://github.com/psf/black) for code formatting
|
||||
@@ -96,12 +96,15 @@ brew install postgresql
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
||||
|
||||
```sh
|
||||
make dev
|
||||
# or
|
||||
```
|
||||
or, alternatively:
|
||||
```sh
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||
@@ -160,7 +163,7 @@ suites.
|
||||
|
||||
#### `tox`
|
||||
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
|
||||
#### `pytest`
|
||||
|
||||
@@ -201,13 +204,21 @@ Here are some general rules for adding tests:
|
||||
* Sometimes flake8 complains about lines that are actually fine, in which case you can put a comment on the line such as: # noqa or # noqa: ANNN, where ANNN is the error code that flake8 issues.
|
||||
* To collect output for `CProfile`, run dbt with the `-r` option and the name of an output file, i.e. `dbt -r dbt.cprof run`. If you just want to profile parsing, you can do: `dbt -r dbt.cprof parse`. `pip` install `snakeviz` to view the output. Run `snakeviz dbt.cprof` and output will be rendered in a browser window.
|
||||
|
||||
## Adding a CHANGELOG Entry
|
||||
## Adding or modifying a CHANGELOG Entry
|
||||
|
||||
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
||||
|
||||
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
|
||||
|
||||
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
|
||||
Once changie is installed and your PR is created for a new feature, simply run the following command and changie will walk you through the process of creating a changelog entry:
|
||||
|
||||
```shell
|
||||
changie new
|
||||
```
|
||||
|
||||
Commit the file that's created and your changelog entry is complete!
|
||||
|
||||
If you are contributing to a feature already in progress, you will modify the changie yaml file in dbt/.changes/unreleased/ related to your change. If you need help finding this file, please ask within the discussion for the pull request!
|
||||
|
||||
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# See `/docker` for a generic and production-ready docker file
|
||||
##
|
||||
|
||||
FROM ubuntu:22.04
|
||||
FROM ubuntu:23.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
@@ -49,6 +49,9 @@ RUN apt-get update \
|
||||
python3.10 \
|
||||
python3.10-dev \
|
||||
python3.10-venv \
|
||||
python3.11 \
|
||||
python3.11-dev \
|
||||
python3.11-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
|
||||
43
Makefile
43
Makefile
@@ -6,24 +6,37 @@ ifeq ($(USE_DOCKER),true)
|
||||
DOCKER_CMD := docker-compose run --rm test
|
||||
endif
|
||||
|
||||
LOGS_DIR := ./logs
|
||||
#
|
||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||
# DBT_TEST_USER_1=user
|
||||
# LOG_DIR="dir with a space in it"
|
||||
#
|
||||
# Warn: Restrict each line to one variable only.
|
||||
#
|
||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||
include ./makefile.test.env
|
||||
endif
|
||||
|
||||
# Optional flag to invoke tests using our CI env.
|
||||
# But we always want these active for structured
|
||||
# log testing.
|
||||
CI_FLAGS =\
|
||||
DBT_TEST_USER_1=dbt_test_user_1\
|
||||
DBT_TEST_USER_2=dbt_test_user_2\
|
||||
DBT_TEST_USER_3=dbt_test_user_3\
|
||||
RUSTFLAGS="-D warnings"\
|
||||
LOG_DIR=./logs\
|
||||
DBT_LOG_FORMAT=json
|
||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||
|
||||
.PHONY: dev
|
||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
|
||||
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@\
|
||||
pre-commit install
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@\
|
||||
@@ -61,7 +74,7 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
@\
|
||||
$(if $(USE_CI_FLAGS), $(CI_FLAGS)) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
@@ -71,9 +84,9 @@ integration-fail-fast: .env ## Runs postgres integration tests with py-integrati
|
||||
.PHONY: interop
|
||||
interop: clean
|
||||
@\
|
||||
mkdir $(LOGS_DIR) && \
|
||||
mkdir $(LOG_DIR) && \
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||
LOG_DIR=$(LOGS_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
|
||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Join the dbt Community
|
||||
|
||||
@@ -2,50 +2,59 @@
|
||||
|
||||
## The following are individual files in this directory.
|
||||
|
||||
### deprecations.py
|
||||
|
||||
### flags.py
|
||||
|
||||
### main.py
|
||||
|
||||
### tracking.py
|
||||
|
||||
### version.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### links.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### ui.py
|
||||
|
||||
### compilation.py
|
||||
|
||||
### constants.py
|
||||
|
||||
### dataclass_schema.py
|
||||
|
||||
### deprecations.py
|
||||
|
||||
### exceptions.py
|
||||
|
||||
### flags.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### hooks.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### links.py
|
||||
|
||||
### logger.py
|
||||
|
||||
### main.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### profiler.py
|
||||
|
||||
### selected_resources.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### tracking.py
|
||||
|
||||
### ui.py
|
||||
|
||||
### utils.py
|
||||
|
||||
### version.py
|
||||
|
||||
|
||||
## The subdirectories will be documented in a README in the subdirectory
|
||||
* config
|
||||
* include
|
||||
* adapters
|
||||
* context
|
||||
* deps
|
||||
* graph
|
||||
* task
|
||||
* cli
|
||||
* clients
|
||||
* config
|
||||
* context
|
||||
* contracts
|
||||
* deps
|
||||
* docs
|
||||
* events
|
||||
* graph
|
||||
* include
|
||||
* parser
|
||||
* task
|
||||
* tests
|
||||
|
||||
@@ -2,7 +2,7 @@ from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -85,7 +85,7 @@ class Column:
|
||||
|
||||
def string_size(self) -> int:
|
||||
if not self.is_string():
|
||||
raise RuntimeException("Called string_size() on non-string field!")
|
||||
raise DbtRuntimeError("Called string_size() on non-string field!")
|
||||
|
||||
if self.dtype == "text" or self.char_size is None:
|
||||
# char_size should never be None. Handle it reasonably just in case
|
||||
@@ -124,7 +124,7 @@ class Column:
|
||||
def from_description(cls, name: str, raw_data_type: str) -> "Column":
|
||||
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
|
||||
if match is None:
|
||||
raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
|
||||
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
|
||||
data_type, size_info = match.groups()
|
||||
char_size = None
|
||||
numeric_precision = None
|
||||
@@ -137,7 +137,7 @@ class Column:
|
||||
try:
|
||||
char_size = int(parts[0])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
@@ -145,14 +145,14 @@ class Column:
|
||||
try:
|
||||
numeric_precision = int(parts[0])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
try:
|
||||
numeric_scale = int(parts[1])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[1]}" to an integer'
|
||||
)
|
||||
|
||||
@@ -48,6 +48,7 @@ from dbt.events.types import (
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
@@ -90,13 +91,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
@@ -136,47 +137,49 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
conn_name: str
|
||||
if name is None:
|
||||
# if a name isn't specified, we'll re-use a single handle
|
||||
# named 'master'
|
||||
conn_name = "master"
|
||||
else:
|
||||
if not isinstance(name, str):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"For connection name, got {name} - not a string!"
|
||||
)
|
||||
assert isinstance(name, str)
|
||||
conn_name = name
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
|
||||
conn_name: str = "master" if name is None else name
|
||||
|
||||
# Get a connection for this thread
|
||||
conn = self.get_if_exists()
|
||||
|
||||
if conn and conn.name == conn_name and conn.state == "open":
|
||||
# Found a connection and nothing to do, so just return it
|
||||
return conn
|
||||
|
||||
if conn is None:
|
||||
# Create a new connection
|
||||
conn = Connection(
|
||||
type=Identifier(self.TYPE),
|
||||
name=None,
|
||||
name=conn_name,
|
||||
state=ConnectionState.INIT,
|
||||
transaction_open=False,
|
||||
handle=None,
|
||||
credentials=self.profile.credentials,
|
||||
)
|
||||
self.set_thread_connection(conn)
|
||||
|
||||
if conn.name == conn_name and conn.state == "open":
|
||||
return conn
|
||||
|
||||
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
|
||||
|
||||
if conn.state == "open":
|
||||
fire_event(ConnectionReused(conn_name=conn_name))
|
||||
else:
|
||||
conn.handle = LazyHandle(self.open)
|
||||
# Add the connection to thread_connections for this thread
|
||||
self.set_thread_connection(conn)
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
else: # existing connection either wasn't open or didn't have the right name
|
||||
if conn.state != "open":
|
||||
conn.handle = LazyHandle(self.open)
|
||||
if conn.name != conn_name:
|
||||
orig_conn_name: str = conn.name or ""
|
||||
conn.name = conn_name
|
||||
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
|
||||
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
@@ -208,7 +211,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
connect should trigger a retry.
|
||||
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||
is exceeded before a successful call, a FailedToConnectException will be raised.
|
||||
is exceeded before a successful call, a FailedToConnectError will be raised.
|
||||
Must be non-negative.
|
||||
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||
@@ -217,14 +220,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectException(
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
@@ -232,7 +235,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
@@ -243,7 +246,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
@@ -265,12 +268,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -285,7 +288,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -317,16 +320,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`begin` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`commit` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
@@ -336,7 +335,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception:
|
||||
fire_event(
|
||||
RollbackFailed(
|
||||
conn_name=cast_to_str(connection.name), exc_info=traceback.format_exc()
|
||||
conn_name=cast_to_str(connection.name),
|
||||
exc_info=traceback.format_exc(),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -345,21 +346,27 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, "close"):
|
||||
fire_event(ConnectionClosed(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(
|
||||
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
|
||||
)
|
||||
connection.handle.close()
|
||||
else:
|
||||
fire_event(ConnectionLeftOpen(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(
|
||||
ConnectionLeftOpen(
|
||||
conn_name=cast_to_str(connection.name), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
cls._rollback_handle(connection)
|
||||
|
||||
connection.transaction_open = False
|
||||
@@ -371,7 +378,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
return connection
|
||||
|
||||
if connection.transaction_open and connection.handle:
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
cls._rollback_handle(connection)
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -404,6 +411,4 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
|
||||
@@ -15,21 +15,26 @@ from typing import (
|
||||
List,
|
||||
Mapping,
|
||||
Iterator,
|
||||
Union,
|
||||
Set,
|
||||
)
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
raise_database_error,
|
||||
raise_compiler_error,
|
||||
invalid_type_error,
|
||||
get_relation_returned_multiple_results,
|
||||
InternalException,
|
||||
NotImplementedException,
|
||||
RuntimeException,
|
||||
DbtInternalError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
QuoteConfigTypeError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
DbtRuntimeError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNullError,
|
||||
UnexpectedNonTimestampError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import (
|
||||
@@ -38,9 +43,8 @@ from dbt.adapters.protocol import (
|
||||
)
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import (
|
||||
CacheMiss,
|
||||
@@ -49,7 +53,7 @@ from dbt.events.types import (
|
||||
CodeExecutionStatus,
|
||||
CatalogGenerationError,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
@@ -64,16 +68,13 @@ from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_msg
|
||||
|
||||
|
||||
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
|
||||
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
|
||||
|
||||
def _expect_row_value(key: str, row: agate.Row):
|
||||
if key not in row.keys():
|
||||
raise InternalException(
|
||||
raise DbtInternalError(
|
||||
'Got a row without "{}" column, columns: {}'.format(key, row.keys())
|
||||
)
|
||||
return row[key]
|
||||
@@ -102,18 +103,10 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
if dt is None:
|
||||
raise raise_database_error(
|
||||
"Expected a non-null value when querying field '{}' of table "
|
||||
" {} but received value 'null' instead".format(field_name, source)
|
||||
)
|
||||
raise UnexpectedNullError(field_name, source)
|
||||
|
||||
elif not hasattr(dt, "tzinfo"):
|
||||
raise raise_database_error(
|
||||
"Expected a timestamp value when querying field '{}' of table "
|
||||
"{} but received value of type '{}' instead".format(
|
||||
field_name, source, type(dt).__name__
|
||||
)
|
||||
)
|
||||
raise UnexpectedNonTimestampError(field_name, source, dt)
|
||||
|
||||
elif dt.tzinfo:
|
||||
return dt.astimezone(pytz.UTC)
|
||||
@@ -243,9 +236,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return conn.name
|
||||
|
||||
@contextmanager
|
||||
def connection_named(
|
||||
self, name: str, node: Optional[CompileResultNode] = None
|
||||
) -> Iterator[None]:
|
||||
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
|
||||
try:
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.set(name, node)
|
||||
@@ -257,7 +248,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
self.connections.query_header.reset()
|
||||
|
||||
@contextmanager
|
||||
def connection_for(self, node: CompileResultNode) -> Iterator[None]:
|
||||
def connection_for(self, node: ResultNode) -> Iterator[None]:
|
||||
with self.connection_named(node.unique_id, node):
|
||||
yield
|
||||
|
||||
@@ -372,7 +363,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[CompileResultNode] = chain(
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
@@ -441,7 +432,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""Cache a new relation in dbt. It will show up in `list relations`."""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise_compiler_error("Attempted to cache a null relation for {}".format(name))
|
||||
raise NullRelationCacheAttemptedError(name)
|
||||
self.cache.add(relation)
|
||||
# so jinja doesn't render things
|
||||
return ""
|
||||
@@ -453,7 +444,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise_compiler_error("Attempted to drop a null relation for {}".format(name))
|
||||
raise NullRelationDropAttemptedError(name)
|
||||
self.cache.drop(relation)
|
||||
return ""
|
||||
|
||||
@@ -470,9 +461,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
name = self.nice_connection_name()
|
||||
src_name = _relation_name(from_relation)
|
||||
dst_name = _relation_name(to_relation)
|
||||
raise_compiler_error(
|
||||
"Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
|
||||
)
|
||||
raise RenameToNoneAttemptedError(src_name, dst_name, name)
|
||||
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
return ""
|
||||
@@ -484,12 +473,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def date_function(cls) -> str:
|
||||
"""Get the date function used by this adapter's database."""
|
||||
raise NotImplementedException("`date_function` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`date_function` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`is_cancelable` is not implemented for this adapter!")
|
||||
|
||||
###
|
||||
# Abstract methods about schemas
|
||||
@@ -497,7 +486,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
"""Get a list of existing schemas in database"""
|
||||
raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
|
||||
|
||||
@available.parse(lambda *a, **k: False)
|
||||
def check_schema_exists(self, database: str, schema: str) -> bool:
|
||||
@@ -520,13 +509,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
*Implementors must call self.cache.drop() to preserve cache state!*
|
||||
"""
|
||||
raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def truncate_relation(self, relation: BaseRelation) -> None:
|
||||
"""Truncate the given relation."""
|
||||
raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
@@ -535,15 +524,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Implementors must call self.cache.rename() to preserve cache state.
|
||||
"""
|
||||
raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_list
|
||||
def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
|
||||
"""Get a list of the columns in the given Relation."""
|
||||
raise NotImplementedException(
|
||||
"`get_columns_in_relation` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
|
||||
|
||||
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
|
||||
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
|
||||
@@ -565,7 +552,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param self.Relation current: A relation that currently exists in the
|
||||
database with columns of unspecified types.
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
raise NotImplementedError(
|
||||
"`expand_target_column_types` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -580,7 +567,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:return: The relations in schema
|
||||
:rtype: List[self.Relation]
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
raise NotImplementedError(
|
||||
"`list_relations_without_caching` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -622,7 +609,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
to_relation.
|
||||
"""
|
||||
if not isinstance(from_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="get_missing_columns",
|
||||
arg_name="from_relation",
|
||||
got_value=from_relation,
|
||||
@@ -630,7 +617,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
if not isinstance(to_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="get_missing_columns",
|
||||
arg_name="to_relation",
|
||||
got_value=to_relation,
|
||||
@@ -651,11 +638,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
expected columns.
|
||||
|
||||
:param Relation relation: The relation to check
|
||||
:raises CompilationException: If the columns are
|
||||
:raises InvalidMacroArgType: If the columns are
|
||||
incorrect.
|
||||
"""
|
||||
if not isinstance(relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="valid_snapshot_target",
|
||||
arg_name="relation",
|
||||
got_value=relation,
|
||||
@@ -676,24 +663,16 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
if missing:
|
||||
if extra:
|
||||
msg = (
|
||||
'Snapshot target has ("{}") but not ("{}") - is it an '
|
||||
"unmigrated previous version archive?".format(
|
||||
'", "'.join(extra), '", "'.join(missing)
|
||||
)
|
||||
)
|
||||
raise SnapshotTargetIncompleteError(extra, missing)
|
||||
else:
|
||||
msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
|
||||
'", "'.join(missing)
|
||||
)
|
||||
raise_compiler_error(msg)
|
||||
raise SnapshotTargetNotSnapshotTableError(missing)
|
||||
|
||||
@available.parse_none
|
||||
def expand_target_column_types(
|
||||
self, from_relation: BaseRelation, to_relation: BaseRelation
|
||||
) -> None:
|
||||
if not isinstance(from_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="expand_target_column_types",
|
||||
arg_name="from_relation",
|
||||
got_value=from_relation,
|
||||
@@ -701,7 +680,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
if not isinstance(to_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="expand_target_column_types",
|
||||
arg_name="to_relation",
|
||||
got_value=to_relation,
|
||||
@@ -783,7 +762,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"schema": schema,
|
||||
"database": database,
|
||||
}
|
||||
get_relation_returned_multiple_results(kwargs, matches)
|
||||
raise RelationReturnedMultipleResultsError(kwargs, matches)
|
||||
|
||||
elif matches:
|
||||
return matches[0]
|
||||
@@ -805,20 +784,20 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse_none
|
||||
def create_schema(self, relation: BaseRelation):
|
||||
"""Create the given schema if it does not exist."""
|
||||
raise NotImplementedException("`create_schema` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`create_schema` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def drop_schema(self, relation: BaseRelation):
|
||||
"""Drop the given schema (and everything in it) if it exists."""
|
||||
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`drop_schema` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def quote(cls, identifier: str) -> str:
|
||||
"""Quote the given identifier, as appropriate for the database."""
|
||||
raise NotImplementedException("`quote` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`quote` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
def quote_as_configured(self, identifier: str, quote_key: str) -> str:
|
||||
@@ -847,10 +826,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
elif quote_config is None:
|
||||
pass
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f'The seed configuration value of "quote_columns" has an '
|
||||
f"invalid type {type(quote_config)}"
|
||||
)
|
||||
raise QuoteConfigTypeError(quote_config)
|
||||
|
||||
if quote_columns:
|
||||
return self.quote(column)
|
||||
@@ -871,7 +847,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_text_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -883,7 +859,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -895,9 +871,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_boolean_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -909,9 +883,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_datetime_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -923,7 +895,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_date_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -935,7 +907,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_time_type` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@@ -970,7 +942,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Dict[str, Any] = None,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> agate.Table:
|
||||
) -> AttrDict:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
|
||||
:param macro_name: The name of the macro to execute.
|
||||
@@ -1002,7 +974,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
package_name = 'the "{}" package'.format(project)
|
||||
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find a macro with the name "{}" in {}'.format(
|
||||
macro_name, package_name
|
||||
)
|
||||
@@ -1055,7 +1027,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest)
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
@@ -1087,7 +1059,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Dict[str, Any]:
|
||||
) -> Tuple[AdapterResponse, Dict[str, Any]]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1096,15 +1068,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
}
|
||||
|
||||
# run the macro
|
||||
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||
# the current time according to the db.
|
||||
if len(table) != 1 or len(table[0]) != 2:
|
||||
raise_compiler_error(
|
||||
'Got an invalid result from "{}" macro: {}'.format(
|
||||
FRESHNESS_MACRO_NAME, [tuple(r) for r in table]
|
||||
)
|
||||
)
|
||||
raise MacroResultError(FRESHNESS_MACRO_NAME, table)
|
||||
if table[0][0] is None:
|
||||
# no records in the table, so really the max_loaded_at was
|
||||
# infinitely long ago. Just call it 0:00 January 1 year UTC
|
||||
@@ -1114,11 +1083,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
return {
|
||||
freshness = {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
@@ -1181,7 +1151,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
elif location == "prepend":
|
||||
return f"'{value}' || {add_to}"
|
||||
else:
|
||||
raise RuntimeException(f'Got an unexpected location value of "{location}"')
|
||||
raise DbtRuntimeError(f'Got an unexpected location value of "{location}"')
|
||||
|
||||
def get_rows_different_sql(
|
||||
self,
|
||||
@@ -1239,7 +1209,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return self.generate_python_submission_response(submission_result)
|
||||
|
||||
def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse:
|
||||
raise NotImplementedException(
|
||||
raise NotImplementedError(
|
||||
"Your adapter need to implement generate_python_submission_response"
|
||||
)
|
||||
|
||||
@@ -1263,7 +1233,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
valid_strategies.append("default")
|
||||
builtin_strategies = self.builtin_incremental_strategies()
|
||||
if strategy in builtin_strategies and strategy not in valid_strategies:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"The incremental strategy '{strategy}' is not valid for this adapter"
|
||||
)
|
||||
|
||||
@@ -1271,7 +1241,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
macro_name = f"get_incremental_{strategy}_sql"
|
||||
# The model_context should have MacroGenerator callable objects for all macros
|
||||
if macro_name not in model_context:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||
macro_name, self.config.project_name
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List, Optional, Type
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ def project_name_from_path(include_path: str) -> str:
|
||||
|
||||
partial = Project.partial_load(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationException(f"Invalid project at {include_path}: name not set!")
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@ from dbt.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
@@ -48,7 +48,7 @@ class _QueryComment(local):
|
||||
if isinstance(comment, str) and "*/" in comment:
|
||||
# tell the user "no" so they don't hurt themselves by writing
|
||||
# garbage
|
||||
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
|
||||
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
|
||||
self.query_comment = comment
|
||||
self.append = append
|
||||
|
||||
@@ -90,7 +90,7 @@ class MacroQueryStringSetter:
|
||||
def reset(self):
|
||||
self.set("master", None)
|
||||
|
||||
def set(self, name: str, node: Optional[CompileResultNode]):
|
||||
def set(self, name: str, node: Optional[ResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
|
||||
from dbt.contracts.graph.compiled import CompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
RelationType,
|
||||
ComponentName,
|
||||
@@ -12,7 +11,11 @@ from dbt.contracts.relation import (
|
||||
Policy,
|
||||
Path,
|
||||
)
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.exceptions import (
|
||||
ApproximateMatchError,
|
||||
DbtInternalError,
|
||||
MultipleDatabasesNotAllowedError,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
|
||||
@@ -27,8 +30,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
path: Path
|
||||
type: Optional[RelationType] = None
|
||||
quote_character: str = '"'
|
||||
include_policy: Policy = Policy()
|
||||
quote_policy: Policy = Policy()
|
||||
# Python 3.11 requires that these use default_factory instead of simple default
|
||||
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
|
||||
include_policy: Policy = field(default_factory=lambda: Policy())
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
@@ -39,9 +44,9 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@classmethod
|
||||
def _get_field_named(cls, field_name):
|
||||
for field, _ in cls._get_fields():
|
||||
if field.name == field_name:
|
||||
return field
|
||||
for f, _ in cls._get_fields():
|
||||
if f.name == field_name:
|
||||
return f
|
||||
# this should be unreachable
|
||||
raise ValueError(f"BaseRelation has no {field_name} field!")
|
||||
|
||||
@@ -52,11 +57,11 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
return cls._get_field_named("quote_policy").default
|
||||
return cls._get_field_named("quote_policy").default_factory()
|
||||
|
||||
@classmethod
|
||||
def get_default_include_policy(cls) -> Policy:
|
||||
return cls._get_field_named("include_policy").default
|
||||
return cls._get_field_named("include_policy").default_factory()
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Override `.get` to return a metadata object so we don't break
|
||||
@@ -82,7 +87,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
raise dbt.exceptions.DbtRuntimeError(
|
||||
"Tried to match relation, but no search path was passed!"
|
||||
)
|
||||
|
||||
@@ -99,7 +104,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if approximate_match and not exact_match:
|
||||
target = self.create(database=database, schema=schema, identifier=identifier)
|
||||
dbt.exceptions.approximate_relation_match(target, self)
|
||||
raise ApproximateMatchError(target, self)
|
||||
|
||||
return exact_match
|
||||
|
||||
@@ -184,7 +189,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
@@ -209,7 +214,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_ephemeral_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
node: ManifestNode,
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
@@ -222,7 +227,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
node: ManifestNode,
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
@@ -243,20 +248,20 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, ParsedSourceDefinition):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
if not isinstance(node, (ParsedNode, CompiledNode)):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedNode or CompiledNode but "
|
||||
"got {}".format(type(node))
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise DbtInternalError(
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@@ -353,7 +358,7 @@ class InformationSchema(BaseRelation):
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationException(
|
||||
raise dbt.exceptions.CompilationError(
|
||||
"Got an invalid name: {}".format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@@ -437,7 +442,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
if not allow_multiple_databases:
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
raise MultipleDatabasesNotAllowedError(seen)
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {"database": information_schema_name.database, "schema": schema}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import re
|
||||
import threading
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
@@ -9,23 +8,15 @@ from dbt.adapters.reference_keys import (
|
||||
_make_msg_from_ref_key,
|
||||
_ReferenceKey,
|
||||
)
|
||||
import dbt.exceptions
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import (
|
||||
AddLink,
|
||||
AddRelation,
|
||||
DropCascade,
|
||||
DropMissingRelation,
|
||||
DropRelation,
|
||||
DumpAfterAddGraph,
|
||||
DumpAfterRenameSchema,
|
||||
DumpBeforeAddGraph,
|
||||
DumpBeforeRenameSchema,
|
||||
RenameSchema,
|
||||
TemporaryRelation,
|
||||
UncachedRelation,
|
||||
UpdateReference,
|
||||
from dbt.exceptions import (
|
||||
DependentLinkNotCachedError,
|
||||
NewNameAlreadyInCacheError,
|
||||
NoneRelationFoundError,
|
||||
ReferencedLinkNotCachedError,
|
||||
TruncatedModelNameCausedCollisionError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
import dbt.flags as flags
|
||||
from dbt.utils import lowercase
|
||||
|
||||
@@ -150,11 +141,7 @@ class _CachedRelation:
|
||||
:raises InternalError: If the new key already exists.
|
||||
"""
|
||||
if new_key in self.referenced_by:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in rename of "{}" -> "{}", new name is in the cache already'.format(
|
||||
old_key, new_key
|
||||
)
|
||||
)
|
||||
raise NewNameAlreadyInCacheError(old_key, new_key)
|
||||
|
||||
if old_key not in self.referenced_by:
|
||||
return
|
||||
@@ -270,21 +257,17 @@ class RelationsCache:
|
||||
if referenced is None:
|
||||
return
|
||||
if referenced is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, referenced link key {} not in cache!".format(referenced_key)
|
||||
)
|
||||
raise ReferencedLinkNotCachedError(referenced_key)
|
||||
|
||||
dependent = self.relations.get(dependent_key)
|
||||
if dependent is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, dependent link key {} not in cache!".format(dependent_key)
|
||||
)
|
||||
raise DependentLinkNotCachedError(dependent_key)
|
||||
|
||||
assert dependent is not None # we just raised!
|
||||
|
||||
referenced.add_reference(dependent)
|
||||
|
||||
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
|
||||
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
|
||||
def add_link(self, referenced, dependent):
|
||||
"""Add a link between two relations to the database. If either relation
|
||||
does not exist, it will be added as an "external" relation.
|
||||
@@ -306,9 +289,9 @@ class RelationsCache:
|
||||
# referring to a table outside our control. There's no need to make
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
fire_event(
|
||||
UncachedRelation(
|
||||
dep_key=_make_msg_from_ref_key(dep_key),
|
||||
CacheAction(
|
||||
ref_key=_make_msg_from_ref_key(ref_key),
|
||||
ref_key_2=_make_msg_from_ref_key(dep_key),
|
||||
)
|
||||
)
|
||||
return
|
||||
@@ -321,8 +304,10 @@ class RelationsCache:
|
||||
dependent = dependent.replace(type=referenced.External)
|
||||
self.add(dependent)
|
||||
fire_event(
|
||||
AddLink(
|
||||
dep_key=_make_msg_from_ref_key(dep_key), ref_key=_make_msg_from_ref_key(ref_key)
|
||||
CacheAction(
|
||||
action="add_link",
|
||||
ref_key=_make_msg_from_ref_key(dep_key),
|
||||
ref_key_2=_make_msg_from_ref_key(ref_key),
|
||||
)
|
||||
)
|
||||
with self.lock:
|
||||
@@ -335,12 +320,18 @@ class RelationsCache:
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event(AddRelation(relation=_make_ref_key_msg(cached)))
|
||||
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpBeforeAddGraph(dump=self.dump_graph()))
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_msg(cached)))
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpAfterAddGraph(dump=self.dump_graph()))
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
def _remove_refs(self, keys):
|
||||
"""Removes all references to all entries in keys. This does not
|
||||
@@ -368,16 +359,19 @@ class RelationsCache:
|
||||
"""
|
||||
dropped_key = _make_ref_key(relation)
|
||||
dropped_key_msg = _make_ref_key_msg(relation)
|
||||
fire_event(DropRelation(dropped=dropped_key_msg))
|
||||
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
||||
with self.lock:
|
||||
if dropped_key not in self.relations:
|
||||
fire_event(DropMissingRelation(relation=dropped_key_msg))
|
||||
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
|
||||
return
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||
consequence_msgs = [_make_msg_from_ref_key(key) for key in consequences]
|
||||
|
||||
fire_event(DropCascade(dropped=dropped_key_msg, consequences=consequence_msgs))
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
|
||||
)
|
||||
)
|
||||
self._remove_refs(consequences)
|
||||
|
||||
def _rename_relation(self, old_key, new_relation):
|
||||
@@ -400,12 +394,14 @@ class RelationsCache:
|
||||
for cached in self.relations.values():
|
||||
if cached.is_referenced_by(old_key):
|
||||
fire_event(
|
||||
UpdateReference(
|
||||
old_key=_make_ref_key_msg(old_key),
|
||||
new_key=_make_ref_key_msg(new_key),
|
||||
cached_key=_make_ref_key_msg(cached.key()),
|
||||
CacheAction(
|
||||
action="update_reference",
|
||||
ref_key=_make_ref_key_msg(old_key),
|
||||
ref_key_2=_make_ref_key_msg(new_key),
|
||||
ref_key_3=_make_ref_key_msg(cached.key()),
|
||||
)
|
||||
)
|
||||
|
||||
cached.rename_key(old_key, new_key)
|
||||
|
||||
self.relations[new_key] = relation
|
||||
@@ -430,27 +426,12 @@ class RelationsCache:
|
||||
if new_key in self.relations:
|
||||
# Tell user when collision caused by model names truncated during
|
||||
# materialization.
|
||||
match = re.search("__dbt_backup|__dbt_tmp$", new_key.identifier)
|
||||
if match:
|
||||
truncated_model_name_prefix = new_key.identifier[: match.start()]
|
||||
message_addendum = (
|
||||
"\n\nName collisions can occur when the length of two "
|
||||
"models' names approach your database's builtin limit. "
|
||||
"Try restructuring your project such that no two models "
|
||||
"share the prefix '{}'.".format(truncated_model_name_prefix)
|
||||
+ " Then, clean your warehouse of any removed models."
|
||||
)
|
||||
else:
|
||||
message_addendum = ""
|
||||
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in rename, new key {} already in cache: {}{}".format(
|
||||
new_key, list(self.relations.keys()), message_addendum
|
||||
)
|
||||
)
|
||||
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
||||
|
||||
if old_key not in self.relations:
|
||||
fire_event(TemporaryRelation(key=_make_msg_from_ref_key(old_key)))
|
||||
fire_event(
|
||||
CacheAction(action="temporary_relation", ref_key=_make_msg_from_ref_key(old_key))
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -469,13 +450,16 @@ class RelationsCache:
|
||||
old_key = _make_ref_key(old)
|
||||
new_key = _make_ref_key(new)
|
||||
fire_event(
|
||||
RenameSchema(
|
||||
old_key=_make_msg_from_ref_key(old_key), new_key=_make_msg_from_ref_key(new)
|
||||
CacheAction(
|
||||
action="rename_relation",
|
||||
ref_key=_make_msg_from_ref_key(old_key),
|
||||
ref_key_2=_make_msg_from_ref_key(new),
|
||||
)
|
||||
)
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS, lambda: DumpBeforeRenameSchema(dump=self.dump_graph())
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
@@ -485,7 +469,8 @@ class RelationsCache:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS, lambda: DumpAfterRenameSchema(dump=self.dump_graph())
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||
@@ -505,9 +490,7 @@ class RelationsCache:
|
||||
]
|
||||
|
||||
if None in results:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in get_relations, a None relation was found in the cache!"
|
||||
)
|
||||
raise NoneRelationFoundError()
|
||||
return results
|
||||
|
||||
def clear(self):
|
||||
|
||||
@@ -10,7 +10,7 @@ from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtoc
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError
|
||||
from dbt.exceptions import InternalException, RuntimeException
|
||||
from dbt.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
|
||||
@@ -34,7 +34,7 @@ class AdapterContainer:
|
||||
names = ", ".join(self.plugins.keys())
|
||||
|
||||
message = f"Invalid adapter type {name}! Must be one of {names}"
|
||||
raise RuntimeException(message)
|
||||
raise DbtRuntimeError(message)
|
||||
|
||||
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
||||
plugin = self.get_plugin_by_name(name)
|
||||
@@ -60,7 +60,7 @@ class AdapterContainer:
|
||||
# the user about it via a runtime error
|
||||
if exc.name == "dbt.adapters." + name:
|
||||
fire_event(AdapterImportError(exc=str(exc)))
|
||||
raise RuntimeException(f"Could not find adapter type {name}!")
|
||||
raise DbtRuntimeError(f"Could not find adapter type {name}!")
|
||||
# otherwise, the error had to have come from some underlying
|
||||
# library. Log the stack trace.
|
||||
|
||||
@@ -70,7 +70,7 @@ class AdapterContainer:
|
||||
plugin_type = plugin.adapter.type()
|
||||
|
||||
if plugin_type != name:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"Expected to find adapter with type named {name}, got "
|
||||
f"adapter with type {plugin_type}"
|
||||
)
|
||||
@@ -132,7 +132,7 @@ class AdapterContainer:
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise InternalException(f"No plugin found for {plugin_name}") from None
|
||||
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
for dep in plugin.dependencies:
|
||||
@@ -151,7 +151,7 @@ class AdapterContainer:
|
||||
try:
|
||||
path = self.packages[package_name]
|
||||
except KeyError:
|
||||
raise InternalException(f"No internal package listing found for {package_name}")
|
||||
raise DbtInternalError(f"No internal package listing found for {package_name}")
|
||||
paths.append(path)
|
||||
return paths
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import (
|
||||
Generic,
|
||||
TypeVar,
|
||||
Tuple,
|
||||
Union,
|
||||
Dict,
|
||||
Any,
|
||||
)
|
||||
@@ -17,8 +16,7 @@ from typing_extensions import Protocol
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
|
||||
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.relation import Policy, HasQuoting
|
||||
@@ -48,11 +46,7 @@ class RelationProtocol(Protocol):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
) -> Self:
|
||||
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
|
||||
...
|
||||
|
||||
|
||||
@@ -65,7 +59,7 @@ class CompilerProtocol(Protocol):
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> NonSourceCompiledNode:
|
||||
) -> ManifestNode:
|
||||
...
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
|
||||
@@ -26,9 +27,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
@abc.abstractmethod
|
||||
def cancel(self, connection: Connection):
|
||||
"""Cancel the given connection."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`cancel` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
|
||||
|
||||
def cancel_open(self) -> List[str]:
|
||||
names = []
|
||||
@@ -56,7 +55,13 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=cast_to_str(connection.name)))
|
||||
fire_event(
|
||||
ConnectionUsed(
|
||||
conn_type=self.TYPE,
|
||||
conn_name=cast_to_str(connection.name),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
with self.exception_handler(sql):
|
||||
if abridge_sql_log:
|
||||
@@ -64,7 +69,11 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
else:
|
||||
log_sql = sql
|
||||
|
||||
fire_event(SQLQuery(conn_name=cast_to_str(connection.name), sql=log_sql))
|
||||
fire_event(
|
||||
SQLQuery(
|
||||
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
pre = time.time()
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
@@ -72,7 +81,9 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
fire_event(
|
||||
SQLQueryStatus(
|
||||
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
|
||||
status=str(self.get_response(cursor)),
|
||||
elapsed=round((time.time() - pre)),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -82,7 +93,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
@abc.abstractmethod
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`get_response` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -138,7 +149,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
"it already had one open!".format(connection.name)
|
||||
)
|
||||
@@ -151,12 +162,12 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
"it does not have one open!".format(connection.name)
|
||||
)
|
||||
|
||||
fire_event(SQLCommit(conn_name=connection.name))
|
||||
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
|
||||
self.add_commit_query()
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import agate
|
||||
from typing import Any, Optional, Tuple, Type, List
|
||||
|
||||
import dbt.clients.agate_helper
|
||||
from dbt.contracts.connection import Connection
|
||||
import dbt.exceptions
|
||||
from dbt.exceptions import RelationTypeNullError
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.cache import _make_ref_key_msg
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
@@ -132,9 +131,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def drop_relation(self, relation):
|
||||
if relation.type is None:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
"Tried to drop relation {}, but its type is null.".format(relation)
|
||||
)
|
||||
raise RelationTypeNullError(relation)
|
||||
|
||||
self.cache_dropped(relation)
|
||||
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
||||
|
||||
@@ -31,7 +31,6 @@ def cli_runner():
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.enable_legacy_logger
|
||||
@p.event_buffer_size
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_format
|
||||
@@ -47,6 +46,7 @@ def cli_runner():
|
||||
@p.version
|
||||
@p.version_check
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from click import ParamType
|
||||
import yaml
|
||||
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
|
||||
|
||||
class YAML(ParamType):
|
||||
"""The Click YAML type. Converts YAML strings into objects."""
|
||||
@@ -17,6 +19,19 @@ class YAML(ParamType):
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
class WarnErrorOptionsType(YAML):
|
||||
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
|
||||
|
||||
name = "WarnErrorOptionsType"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
|
||||
return WarnErrorOptions(
|
||||
include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", [])
|
||||
)
|
||||
|
||||
|
||||
class Truthy(ParamType):
|
||||
"""The Click Truthy type. Converts strings into a "truthy" type"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import click
|
||||
from dbt.cli.option_types import YAML
|
||||
from dbt.cli.option_types import YAML, WarnErrorOptionsType
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
|
||||
|
||||
@@ -80,14 +80,6 @@ enable_legacy_logger = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
event_buffer_size = click.option(
|
||||
"--event-buffer-size",
|
||||
envvar="DBT_EVENT_BUFFER_SIZE",
|
||||
help="Sets the max number of events to buffer in EVENT_HISTORY.",
|
||||
default=100000,
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
|
||||
|
||||
fail_fast = click.option(
|
||||
@@ -278,7 +270,7 @@ show = click.option(
|
||||
)
|
||||
|
||||
skip_profile_setup = click.option(
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interative profile setup.", is_flag=True
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
@@ -366,9 +358,20 @@ version_check = click.option(
|
||||
)
|
||||
|
||||
warn_error = click.option(
|
||||
"--warn-error/--no-warn-error",
|
||||
"--warn-error",
|
||||
envvar="DBT_WARN_ERROR",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --models that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
default=None,
|
||||
flag_value=True,
|
||||
)
|
||||
|
||||
warn_error_options = click.option(
|
||||
"--warn-error-options",
|
||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||
default=None,
|
||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||
type=WarnErrorOptionsType(),
|
||||
)
|
||||
|
||||
write_json = click.option(
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import re
|
||||
from collections import namedtuple
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.exceptions import (
|
||||
BlockDefinitionNotAtTopError,
|
||||
DbtInternalError,
|
||||
MissingCloseTagError,
|
||||
MissingControlFlowStartTagError,
|
||||
NestedTagsError,
|
||||
UnexpectedControlFlowEndTagError,
|
||||
UnexpectedMacroEOFError,
|
||||
)
|
||||
|
||||
|
||||
def regex(pat):
|
||||
@@ -139,10 +147,7 @@ class TagIterator:
|
||||
def _expect_match(self, expected_name, *patterns, **kwargs):
|
||||
match = self._first_match(*patterns, **kwargs)
|
||||
if match is None:
|
||||
msg = 'unexpected EOF, expected {}, got "{}"'.format(
|
||||
expected_name, self.data[self.pos :]
|
||||
)
|
||||
dbt.exceptions.raise_compiler_error(msg)
|
||||
raise UnexpectedMacroEOFError(expected_name, self.data[self.pos :])
|
||||
return match
|
||||
|
||||
def handle_expr(self, match):
|
||||
@@ -256,7 +261,7 @@ class TagIterator:
|
||||
elif block_type_name is not None:
|
||||
yield self.handle_tag(match)
|
||||
else:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise DbtInternalError(
|
||||
"Invalid regex match in next_block, expected block start, "
|
||||
"expr start, or comment start"
|
||||
)
|
||||
@@ -265,13 +270,6 @@ class TagIterator:
|
||||
return self.find_tags()
|
||||
|
||||
|
||||
duplicate_tags = (
|
||||
"Got nested tags: {outer.block_type_name} (started at {outer.start}) did "
|
||||
"not have a matching {{% end{outer.block_type_name} %}} before a "
|
||||
"subsequent {inner.block_type_name} was found (started at {inner.start})"
|
||||
)
|
||||
|
||||
|
||||
_CONTROL_FLOW_TAGS = {
|
||||
"if": "endif",
|
||||
"for": "endfor",
|
||||
@@ -319,33 +317,16 @@ class BlockIterator:
|
||||
found = self.stack.pop()
|
||||
else:
|
||||
expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name]
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"never saw a preceeding {} (@ {})"
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
raise UnexpectedControlFlowEndTagError(tag, expected, self.tag_parser)
|
||||
expected = _CONTROL_FLOW_TAGS[found]
|
||||
if expected != tag.block_type_name:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"expected {} next (@ {})"
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
raise MissingControlFlowStartTagError(tag, expected, self.tag_parser)
|
||||
|
||||
if tag.block_type_name in allowed_blocks:
|
||||
if self.stack:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got a block definition inside control flow at {}. "
|
||||
"All dbt block definitions must be at the top level"
|
||||
).format(self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
raise BlockDefinitionNotAtTopError(self.tag_parser, tag.start)
|
||||
if self.current is not None:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
duplicate_tags.format(outer=self.current, inner=tag)
|
||||
)
|
||||
raise NestedTagsError(outer=self.current, inner=tag)
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position : tag.start]
|
||||
self.last_position = tag.start
|
||||
@@ -366,11 +347,7 @@ class BlockIterator:
|
||||
|
||||
if self.current:
|
||||
linecount = self.data[: self.current.end].count("\n") + 1
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
("Reached EOF without finding a close tag for {} (searched from line {})").format(
|
||||
self.current.block_type_name, linecount
|
||||
)
|
||||
)
|
||||
raise MissingCloseTagError(self.current.block_type_name, linecount)
|
||||
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position :]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user