mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
396 Commits
v0.20.1
...
add-except
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d342165f6d | ||
|
|
a3dc5efda7 | ||
|
|
1015b89dbf | ||
|
|
5c9fd07050 | ||
|
|
c019a94206 | ||
|
|
f9bdfa050b | ||
|
|
1b35d1aa21 | ||
|
|
420ef9cc7b | ||
|
|
02fdc2cb9f | ||
|
|
f82745fb0c | ||
|
|
3397bdc6a5 | ||
|
|
96e858ac0b | ||
|
|
f6a98b5674 | ||
|
|
824f0bf2c0 | ||
|
|
5648b1c622 | ||
|
|
bb1382e576 | ||
|
|
085ea9181f | ||
|
|
eace5b77a7 | ||
|
|
1c61bb18e6 | ||
|
|
f79a968a09 | ||
|
|
34c23fe650 | ||
|
|
3ae9475655 | ||
|
|
11436fed45 | ||
|
|
21a7b71657 | ||
|
|
280e9ad9c9 | ||
|
|
97f31c88e1 | ||
|
|
5f483a6b13 | ||
|
|
86f24e13db | ||
|
|
4bda8c8880 | ||
|
|
80a5d27127 | ||
|
|
4307a82058 | ||
|
|
5c01c42308 | ||
|
|
80ba71682b | ||
|
|
26625e9627 | ||
|
|
134e8423b7 | ||
|
|
04a9195297 | ||
|
|
8a10a69f59 | ||
|
|
fd7c95d1d2 | ||
|
|
79aa136301 | ||
|
|
3b5cec6cc6 | ||
|
|
0e9a67956d | ||
|
|
f9f0eab0b7 | ||
|
|
ed01b439cf | ||
|
|
a398ed1a3e | ||
|
|
a818e6551b | ||
|
|
0a7471ebdc | ||
|
|
1a5bc83598 | ||
|
|
6e2df00648 | ||
|
|
b338dfc99a | ||
|
|
47033c459f | ||
|
|
92a0930634 | ||
|
|
cad1a48eb0 | ||
|
|
b451f87e3c | ||
|
|
20756290bc | ||
|
|
f44c6ed136 | ||
|
|
b501f4317c | ||
|
|
91b43f71bb | ||
|
|
6fc64f0d3b | ||
|
|
ee5c697645 | ||
|
|
3caec08ccb | ||
|
|
f7680379fc | ||
|
|
3789acc5a7 | ||
|
|
8ae232abe8 | ||
|
|
332d23c5eb | ||
|
|
5799973474 | ||
|
|
3d816d56ec | ||
|
|
111f3c28f8 | ||
|
|
10aded793c | ||
|
|
b5cc7b8dff | ||
|
|
449f042742 | ||
|
|
66b70e025b | ||
|
|
578c6d6a20 | ||
|
|
64ce9d6aa4 | ||
|
|
213ddedb85 | ||
|
|
c96201c060 | ||
|
|
16b02f4f55 | ||
|
|
e0d2b02d46 | ||
|
|
65e76df6ec | ||
|
|
052a3060d4 | ||
|
|
b65ae1ddde | ||
|
|
a8246ab1f1 | ||
|
|
6854e67464 | ||
|
|
ca7c1fc4ad | ||
|
|
5dbc945f23 | ||
|
|
655ff85dc9 | ||
|
|
6a2ceaa073 | ||
|
|
e8fb29d185 | ||
|
|
8443142f27 | ||
|
|
7ebe21dccb | ||
|
|
c25b7a1143 | ||
|
|
38eb46dfc3 | ||
|
|
fe9ed9ccdd | ||
|
|
ff4e5219b1 | ||
|
|
04632a008f | ||
|
|
6925cebcf6 | ||
|
|
571beb13d9 | ||
|
|
69cd82f483 | ||
|
|
11e379280f | ||
|
|
0018eb7db6 | ||
|
|
154a682180 | ||
|
|
1b79a245e6 | ||
|
|
6b590122c7 | ||
|
|
d5f632e6fd | ||
|
|
2fc8e5e0b6 | ||
|
|
5ab07273ba | ||
|
|
19c9e5bfdf | ||
|
|
60794367a5 | ||
|
|
ea07729bbf | ||
|
|
c4370773f6 | ||
|
|
fda17b456e | ||
|
|
bc3e1a0a71 | ||
|
|
a06988706c | ||
|
|
ce73124bbf | ||
|
|
352c62f3c3 | ||
|
|
81a51d3942 | ||
|
|
64fc3a39a7 | ||
|
|
e5b6f4f293 | ||
|
|
d26e63ed9a | ||
|
|
f4f5d31959 | ||
|
|
e7e12075b9 | ||
|
|
74dda5aa19 | ||
|
|
092e96ce70 | ||
|
|
18102027ba | ||
|
|
f80825d63e | ||
|
|
9316e47b77 | ||
|
|
f99cf1218a | ||
|
|
5871915ce9 | ||
|
|
5ce290043f | ||
|
|
080d27321b | ||
|
|
1d0936bd14 | ||
|
|
706b8ca9df | ||
|
|
7dc491b7ba | ||
|
|
779c789a64 | ||
|
|
409b4ba109 | ||
|
|
59d131d3ac | ||
|
|
6563d09ba7 | ||
|
|
05dea18b62 | ||
|
|
d7177c7d89 | ||
|
|
35f0fea804 | ||
|
|
8953c7c533 | ||
|
|
76c59a5545 | ||
|
|
237048c7ac | ||
|
|
30ff395b7b | ||
|
|
5c0a31b829 | ||
|
|
243bc3d41d | ||
|
|
67b594a950 | ||
|
|
2493c21649 | ||
|
|
d3826e670f | ||
|
|
4b5b1696b7 | ||
|
|
abb59ef14f | ||
|
|
3b7c2816b9 | ||
|
|
484517416f | ||
|
|
39447055d3 | ||
|
|
95cca277c9 | ||
|
|
96083dcaf5 | ||
|
|
75b4cf691b | ||
|
|
7c9171b00b | ||
|
|
3effade266 | ||
|
|
44e7390526 | ||
|
|
c141798abc | ||
|
|
df7ec3fb37 | ||
|
|
90e5507d03 | ||
|
|
332d3494b3 | ||
|
|
6393f5a5d7 | ||
|
|
ce97a9ca7a | ||
|
|
9af071bfe4 | ||
|
|
45a41202f3 | ||
|
|
9768999ca1 | ||
|
|
fc0d11c0a5 | ||
|
|
e6344205bb | ||
|
|
9d7a6556ef | ||
|
|
15f4add0b8 | ||
|
|
464becacd0 | ||
|
|
51a76d0d63 | ||
|
|
052e54d43a | ||
|
|
9e796671dd | ||
|
|
a9a6254f52 | ||
|
|
8b3a09c7ae | ||
|
|
6aa4d812d4 | ||
|
|
07fa719fb0 | ||
|
|
650b34ae24 | ||
|
|
0a935855f3 | ||
|
|
d500aae4dc | ||
|
|
370d3e746d | ||
|
|
ab06149c81 | ||
|
|
e72895c7c9 | ||
|
|
fe4a67daa4 | ||
|
|
09ea989d81 | ||
|
|
7fa14b6948 | ||
|
|
d4974cd35c | ||
|
|
459178811b | ||
|
|
b37f6a010e | ||
|
|
e817164d31 | ||
|
|
09ce43edbf | ||
|
|
2980cd17df | ||
|
|
8c804de643 | ||
|
|
c8241b87e6 | ||
|
|
f204d24ed8 | ||
|
|
d5461ccd8b | ||
|
|
a20d2d93d3 | ||
|
|
57e1eec165 | ||
|
|
d2dbe6afe4 | ||
|
|
72eb163223 | ||
|
|
af16c74c3a | ||
|
|
664f6584b9 | ||
|
|
76fd3bdf8c | ||
|
|
b633adb881 | ||
|
|
b6e534cdd0 | ||
|
|
1dc4adb86f | ||
|
|
0a4d7c4831 | ||
|
|
ad67e55d74 | ||
|
|
2fae64a488 | ||
|
|
1a984601ee | ||
|
|
454168204c | ||
|
|
43642956a2 | ||
|
|
1fe53750fa | ||
|
|
8609c02383 | ||
|
|
355b0c496e | ||
|
|
cd6894acf4 | ||
|
|
b90b3a9c19 | ||
|
|
e7b8488be8 | ||
|
|
06cc0c57e8 | ||
|
|
87072707ed | ||
|
|
ef63319733 | ||
|
|
2068dd5510 | ||
|
|
3e1e171c66 | ||
|
|
5f9ed1a83c | ||
|
|
3d9e54d970 | ||
|
|
52a0fdef6c | ||
|
|
d9b02fb0a0 | ||
|
|
6c8de62b24 | ||
|
|
2d3d1b030a | ||
|
|
88acf0727b | ||
|
|
02839ec779 | ||
|
|
44a8f6a3bf | ||
|
|
751ea92576 | ||
|
|
02007b3619 | ||
|
|
fe0b9e7ef5 | ||
|
|
4b1c6b51f9 | ||
|
|
0b4689f311 | ||
|
|
b77eff8f6f | ||
|
|
2782a33ecf | ||
|
|
94c6cf1b3c | ||
|
|
3c8daacd3e | ||
|
|
2f9907b072 | ||
|
|
287c4d2b03 | ||
|
|
ba9d76b3f9 | ||
|
|
0efaaf7daf | ||
|
|
9ae7d68260 | ||
|
|
486afa9fcd | ||
|
|
1f189f5225 | ||
|
|
580b1fdd68 | ||
|
|
bad0198a36 | ||
|
|
252280b56e | ||
|
|
64bf9c8885 | ||
|
|
935c138736 | ||
|
|
5891b59790 | ||
|
|
4e020c3878 | ||
|
|
3004969a93 | ||
|
|
873e9714f8 | ||
|
|
fe24dd43d4 | ||
|
|
ed91ded2c1 | ||
|
|
757614d57f | ||
|
|
faff8c00b3 | ||
|
|
45fe76eef4 | ||
|
|
80244a09fe | ||
|
|
ea772ae419 | ||
|
|
c68fca7937 | ||
|
|
37e86257f5 | ||
|
|
c182c05c2f | ||
|
|
b02875a12b | ||
|
|
03332b2955 | ||
|
|
f1f99a2371 | ||
|
|
95116dbb5b | ||
|
|
868fd64adf | ||
|
|
2f7ab2d038 | ||
|
|
159e79ee6b | ||
|
|
3d4a82cca2 | ||
|
|
6ba837d73d | ||
|
|
f4775d7673 | ||
|
|
429396aa02 | ||
|
|
8a5e9b71a5 | ||
|
|
fa78102eaf | ||
|
|
5466d474c5 | ||
|
|
80951ae973 | ||
|
|
d5662ef34c | ||
|
|
57783bb5f6 | ||
|
|
d73ee588e5 | ||
|
|
40089d710b | ||
|
|
6ec61950eb | ||
|
|
72c831a80a | ||
|
|
929931a26a | ||
|
|
577e2438c1 | ||
|
|
2679792199 | ||
|
|
2adf982991 | ||
|
|
1fb4a7f428 | ||
|
|
30e72bc5e2 | ||
|
|
35645a7233 | ||
|
|
d583c8d737 | ||
|
|
a83f00c594 | ||
|
|
45bb955b55 | ||
|
|
c448702c1b | ||
|
|
558a6a03ac | ||
|
|
52ec7907d3 | ||
|
|
792f39a888 | ||
|
|
16264f58c1 | ||
|
|
2317c0c3c8 | ||
|
|
3c09ab9736 | ||
|
|
f10dc0e1b3 | ||
|
|
634bc41d8a | ||
|
|
d7ea3648c6 | ||
|
|
e5c8e19ff2 | ||
|
|
4ddba7e44c | ||
|
|
37b31d10c8 | ||
|
|
93cf1f085f | ||
|
|
a84f824a44 | ||
|
|
9c58f3465b | ||
|
|
0e3778132b | ||
|
|
72722635f2 | ||
|
|
a4c7c7fc55 | ||
|
|
2bad73eead | ||
|
|
c8bc25d11a | ||
|
|
4c06689ff5 | ||
|
|
a45c9d0192 | ||
|
|
34e2c4f90b | ||
|
|
c0e2023c81 | ||
|
|
108b55bdc3 | ||
|
|
a29367b7fe | ||
|
|
1d7e8349ed | ||
|
|
67c194dcd1 | ||
|
|
75d3d87d64 | ||
|
|
4ff3f6d4e8 | ||
|
|
d0773f3346 | ||
|
|
ee58d27d94 | ||
|
|
9e3da391a7 | ||
|
|
bd7010678a | ||
|
|
9f716b31b3 | ||
|
|
3dd486d8fa | ||
|
|
33217891ca | ||
|
|
1d37c4e555 | ||
|
|
9f62ec2153 | ||
|
|
372eca76b8 | ||
|
|
e3cb050bbc | ||
|
|
0ae93c7f54 | ||
|
|
1f6386d760 | ||
|
|
66eb3964e2 | ||
|
|
f460d275ba | ||
|
|
fb91bad800 | ||
|
|
eaec22ae53 | ||
|
|
b7c1768cca | ||
|
|
387b26a202 | ||
|
|
8a1e6438f1 | ||
|
|
aaac5ff2e6 | ||
|
|
4dc29630b5 | ||
|
|
f716631439 | ||
|
|
648a780850 | ||
|
|
de0919ff88 | ||
|
|
8b1ea5fb6c | ||
|
|
85627aafcd | ||
|
|
49065158f5 | ||
|
|
bdb3049218 | ||
|
|
e10d1b0f86 | ||
|
|
83b98c8ebf | ||
|
|
b9d5123aa3 | ||
|
|
c09300bfd2 | ||
|
|
fc490cee7b | ||
|
|
3baa3d7fe8 | ||
|
|
764c7c0fdc | ||
|
|
c97ebbbf35 | ||
|
|
85fe32bd08 | ||
|
|
eba3fd2255 | ||
|
|
e2f2c07873 | ||
|
|
70850cd362 | ||
|
|
16992e6391 | ||
|
|
fd0d95140e | ||
|
|
ac65fcd557 | ||
|
|
4d246567b9 | ||
|
|
1ad1c834f3 | ||
|
|
41610b822c | ||
|
|
c794600242 | ||
|
|
9d414f6ec3 | ||
|
|
552e831306 | ||
|
|
c712c96a0b | ||
|
|
eb46bfc3d6 | ||
|
|
f52537b606 | ||
|
|
762419d2fe | ||
|
|
4feb7cb15b | ||
|
|
eb47b85148 | ||
|
|
9faa019a07 | ||
|
|
9589dc91fa | ||
|
|
14507a283e | ||
|
|
af0fe120ec | ||
|
|
16501ec1c6 | ||
|
|
bf867f6aff | ||
|
|
eb4ad4444f | ||
|
|
8fdba17ac6 |
@@ -1,23 +1,27 @@
|
||||
[bumpversion]
|
||||
current_version = 0.20.0rc1
|
||||
current_version = 1.0.0b2
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prerelease>[a-z]+)(?P<num>\d+))?
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
)?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prerelease}{num}
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
|
||||
[bumpversion:part:prerelease]
|
||||
[bumpversion:part:prekind]
|
||||
first_value = a
|
||||
optional_value = final
|
||||
values =
|
||||
a
|
||||
b
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:num]
|
||||
[bumpversion:part:pre]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
@@ -26,19 +30,8 @@ first_value = 1
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:core/scripts/create_adapter_plugins.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/dbt/adapters/redshift/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/dbt/adapters/snowflake/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/dbt/adapters/bigquery/__version__.py]
|
||||
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
version: 2.1
|
||||
jobs:
|
||||
unit:
|
||||
docker: &test_only
|
||||
- image: fishtownanalytics/test-container:12
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
DOCKER_TEST_DATABASE_HOST: "database"
|
||||
TOX_PARALLEL_NO_SPINNER: 1
|
||||
steps:
|
||||
- checkout
|
||||
- run: tox -p -e py36,py37,py38
|
||||
lint:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run: tox -e mypy,flake8 -- -v
|
||||
build-wheels:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Build wheels
|
||||
command: |
|
||||
python3.8 -m venv "${PYTHON_ENV}"
|
||||
export PYTHON_BIN="${PYTHON_ENV}/bin/python"
|
||||
$PYTHON_BIN -m pip install -U pip setuptools
|
||||
$PYTHON_BIN -m pip install -r requirements.txt
|
||||
$PYTHON_BIN -m pip install -r dev-requirements.txt
|
||||
/bin/bash ./scripts/build-wheels.sh
|
||||
$PYTHON_BIN ./scripts/collect-dbt-contexts.py > ./dist/context_metadata.json
|
||||
$PYTHON_BIN ./scripts/collect-artifact-schema.py > ./dist/artifact_schemas.json
|
||||
environment:
|
||||
PYTHON_ENV: /home/tox/build_venv/
|
||||
- store_artifacts:
|
||||
path: ./dist
|
||||
destination: dist
|
||||
integration-postgres:
|
||||
docker:
|
||||
- image: fishtownanalytics/test-container:12
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
DOCKER_TEST_DATABASE_HOST: "database"
|
||||
TOX_PARALLEL_NO_SPINNER: 1
|
||||
- image: postgres
|
||||
name: database
|
||||
environment:
|
||||
POSTGRES_USER: "root"
|
||||
POSTGRES_PASSWORD: "password"
|
||||
POSTGRES_DB: "dbt"
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Setup postgres
|
||||
command: bash test/setup_db.sh
|
||||
environment:
|
||||
PGHOST: database
|
||||
PGUSER: root
|
||||
PGPASSWORD: password
|
||||
PGDATABASE: postgres
|
||||
- run:
|
||||
name: Postgres integration tests
|
||||
command: tox -p -e py36-postgres,py38-postgres -- -v -n4
|
||||
no_output_timeout: 30m
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Snowflake integration tests
|
||||
command: tox -p -e py36-snowflake,py38-snowflake -- -v -n4
|
||||
no_output_timeout: 30m
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Redshift integration tests
|
||||
command: tox -p -e py36-redshift,py38-redshift -- -v -n4
|
||||
no_output_timeout: 30m
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Bigquery integration test
|
||||
command: tox -p -e py36-bigquery,py38-bigquery -- -v -n4
|
||||
no_output_timeout: 30m
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test-everything:
|
||||
jobs:
|
||||
- lint
|
||||
- unit
|
||||
- integration-postgres:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift:
|
||||
requires:
|
||||
- unit
|
||||
- integration-bigquery:
|
||||
requires:
|
||||
- unit
|
||||
- integration-snowflake:
|
||||
requires:
|
||||
- unit
|
||||
- build-wheels:
|
||||
requires:
|
||||
- lint
|
||||
- unit
|
||||
- integration-postgres
|
||||
- integration-redshift
|
||||
- integration-bigquery
|
||||
- integration-snowflake
|
||||
85
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
85
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
name: 🐞 Bug
|
||||
description: Report a bug or an issue you've found with dbt
|
||||
title: "[Bug] <title>"
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue already exists for the bug you encountered.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current Behavior
|
||||
description: A concise description of what you're experiencing.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A concise description of what you expected to happen.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Steps to reproduce the behavior.
|
||||
placeholder: |
|
||||
1. In this environment...
|
||||
2. With this config...
|
||||
3. Run '...'
|
||||
4. See error...
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
If applicable, log output to help explain your problem.
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **Python**: 3.7.2 (`python --version`)
|
||||
- **dbt**: 0.21.0 (`dbt --version`)
|
||||
value: |
|
||||
- OS:
|
||||
- Python:
|
||||
- dbt:
|
||||
render: markdown
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: database
|
||||
attributes:
|
||||
label: What database are you using dbt with?
|
||||
multiple: true
|
||||
options:
|
||||
- postgres
|
||||
- redshift
|
||||
- snowflake
|
||||
- bigquery
|
||||
- other (mention it in "Additional Context")
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||
|
||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,41 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a bug or an issue you've found with dbt
|
||||
title: ''
|
||||
labels: bug, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the bug
|
||||
A clear and concise description of what the bug is. What command did you run? What happened?
|
||||
|
||||
### Steps To Reproduce
|
||||
In as much detail as possible, please provide steps to reproduce the issue. Sample data that triggers the issue, example model code, etc is all very helpful here.
|
||||
|
||||
### Expected behavior
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
### Screenshots and log output
|
||||
If applicable, add screenshots or log output to help explain your problem.
|
||||
|
||||
### System information
|
||||
**Which database are you using dbt with?**
|
||||
- [ ] postgres
|
||||
- [ ] redshift
|
||||
- [ ] bigquery
|
||||
- [ ] snowflake
|
||||
- [ ] other (specify: ____________)
|
||||
|
||||
|
||||
**The output of `dbt --version`:**
|
||||
```
|
||||
<output goes here>
|
||||
```
|
||||
|
||||
**The operating system you're using:**
|
||||
|
||||
**The output of `python --version`:**
|
||||
|
||||
### Additional context
|
||||
Add any other context about the problem here.
|
||||
16
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
16
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
contact_links:
|
||||
- name: Create an issue for dbt-redshift
|
||||
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-redshift
|
||||
- name: Create an issue for dbt-bigquery
|
||||
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-bigquery
|
||||
- name: Create an issue for dbt-snowflake
|
||||
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-snowflake
|
||||
- name: Ask a question or get support
|
||||
url: https://docs.getdbt.com/docs/guides/getting-help
|
||||
about: Ask a question or request support
|
||||
- name: Questions on Stack Overflow
|
||||
url: https://stackoverflow.com/questions/tagged/dbt
|
||||
about: Look at questions/answers at Stack Overflow
|
||||
49
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
49
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: ✨ Feature
|
||||
description: Suggest an idea for dbt
|
||||
title: "[Feature] <title>"
|
||||
labels: ["enhancement", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this feature requests!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing feature request for this?
|
||||
description: Please search to see if an issue already exists for the feature you would like.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the Feature
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Who will this benefit?
|
||||
description: |
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
attributes:
|
||||
label: Are you interested in contributing this feature?
|
||||
description: Let us know if you want to write some code, and how we can help.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else?
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the feature you are suggesting!
|
||||
validations:
|
||||
required: false
|
||||
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,23 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for dbt
|
||||
title: ''
|
||||
labels: enhancement, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the feature
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
### Describe alternatives you've considered
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
### Additional context
|
||||
Is this feature database-specific? Which database(s) is/are relevant? Please include any other relevant context here.
|
||||
|
||||
### Who will this benefit?
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
|
||||
### Are you interested in contributing this feature?
|
||||
Let us know if you want to write some code, and how we can help.
|
||||
10
.github/actions/setup-postgres-linux/action.yml
vendored
Normal file
10
.github/actions/setup-postgres-linux/action.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
name: "Set up postgres (linux)"
|
||||
description: "Set up postgres service on linux vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
sudo systemctl start postgresql.service
|
||||
pg_isready
|
||||
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
||||
1
.github/actions/setup-postgres-linux/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-linux/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../test/setup_db.sh
|
||||
24
.github/actions/setup-postgres-macos/action.yml
vendored
Normal file
24
.github/actions/setup-postgres-macos/action.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: "Set up postgres (macos)"
|
||||
description: "Set up postgres service on macos vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
brew services start postgresql
|
||||
echo "Check PostgreSQL service is running"
|
||||
i=10
|
||||
COMMAND='pg_isready'
|
||||
while [ $i -gt -1 ]; do
|
||||
if [ $i == 0 ]; then
|
||||
echo "PostgreSQL service not ready, all attempts exhausted"
|
||||
exit 1
|
||||
fi
|
||||
echo "Check PostgreSQL service status"
|
||||
eval $COMMAND && break
|
||||
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
|
||||
sleep 10
|
||||
((i--))
|
||||
done
|
||||
createuser -s postgres
|
||||
bash ${{ github.action_path }}/setup_db.sh
|
||||
1
.github/actions/setup-postgres-macos/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-macos/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../test/setup_db.sh
|
||||
12
.github/actions/setup-postgres-windows/action.yml
vendored
Normal file
12
.github/actions/setup-postgres-windows/action.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "Set up postgres (windows)"
|
||||
description: "Set up postgres service on windows vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: pwsh
|
||||
run: |
|
||||
$pgService = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||
$env:Path += ";$env:PGBIN"
|
||||
bash ${{ github.action_path }}/setup_db.sh
|
||||
1
.github/actions/setup-postgres-windows/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-windows/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../test/setup_db.sh
|
||||
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@@ -11,26 +11,11 @@ updates:
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/bigquery"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/postgres"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/redshift"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/snowflake"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# docker dependencies
|
||||
- package-ecosystem: "docker"
|
||||
|
||||
19
.github/pull_request_template.md
vendored
19
.github/pull_request_template.md
vendored
@@ -4,19 +4,18 @@ resolves #
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Example:
|
||||
resolves #1234
|
||||
-->
|
||||
|
||||
|
||||
### Description
|
||||
|
||||
<!--- Describe the Pull Request here -->
|
||||
|
||||
<!---
|
||||
Describe the Pull Request here. Add any references and info to help reviewers
|
||||
understand your changes. Include any tradeoffs you considered.
|
||||
-->
|
||||
|
||||
### Checklist
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section.
|
||||
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] I have updated the `CHANGELOG.md` and added information about my change
|
||||
|
||||
95
.github/scripts/integration-test-matrix.js
vendored
Normal file
95
.github/scripts/integration-test-matrix.js
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
module.exports = ({ context }) => {
|
||||
const defaultPythonVersion = "3.8";
|
||||
const supportedPythonVersions = ["3.6", "3.7", "3.8", "3.9"];
|
||||
const supportedAdapters = ["postgres"];
|
||||
|
||||
// if PR, generate matrix based on files changed and PR labels
|
||||
if (context.eventName.includes("pull_request")) {
|
||||
// `changes` is a list of adapter names that have related
|
||||
// file changes in the PR
|
||||
// ex: ['postgres', 'snowflake']
|
||||
const changes = JSON.parse(process.env.CHANGES);
|
||||
const labels = context.payload.pull_request.labels.map(({ name }) => name);
|
||||
console.log("labels", labels);
|
||||
console.log("changes", changes);
|
||||
const testAllLabel = labels.includes("test all");
|
||||
const include = [];
|
||||
|
||||
for (const adapter of supportedAdapters) {
|
||||
if (
|
||||
changes.includes(adapter) ||
|
||||
testAllLabel ||
|
||||
labels.includes(`test ${adapter}`)
|
||||
) {
|
||||
for (const pythonVersion of supportedPythonVersions) {
|
||||
if (
|
||||
pythonVersion === defaultPythonVersion ||
|
||||
labels.includes(`test python${pythonVersion}`) ||
|
||||
testAllLabel
|
||||
) {
|
||||
// always run tests on ubuntu by default
|
||||
include.push({
|
||||
os: "ubuntu-latest",
|
||||
adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
|
||||
if (labels.includes("test windows") || testAllLabel) {
|
||||
include.push({
|
||||
os: "windows-latest",
|
||||
adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
}
|
||||
|
||||
if (labels.includes("test macos") || testAllLabel) {
|
||||
include.push({
|
||||
os: "macos-latest",
|
||||
adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log("matrix", { include });
|
||||
|
||||
return {
|
||||
include,
|
||||
};
|
||||
}
|
||||
// if not PR, generate matrix of python version, adapter, and operating
|
||||
// system to run integration tests on
|
||||
|
||||
const include = [];
|
||||
// run for all adapters and python versions on ubuntu
|
||||
for (const adapter of supportedAdapters) {
|
||||
for (const pythonVersion of supportedPythonVersions) {
|
||||
include.push({
|
||||
os: 'ubuntu-latest',
|
||||
adapter: adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// additionally include runs for all adapters, on macos and windows,
|
||||
// but only for the default python version
|
||||
for (const adapter of supportedAdapters) {
|
||||
for (const operatingSystem of ["windows-latest", "macos-latest"]) {
|
||||
include.push({
|
||||
os: operatingSystem,
|
||||
adapter: adapter,
|
||||
"python-version": defaultPythonVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log("matrix", { include });
|
||||
|
||||
return {
|
||||
include,
|
||||
};
|
||||
};
|
||||
222
.github/workflows/integration.yml
vendored
Normal file
222
.github/workflows/integration.yml
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
# **what?**
|
||||
# This workflow runs all integration tests for supported OS
|
||||
# and python versions and core adapters. If triggered by PR,
|
||||
# the workflow will only run tests for adapters related
|
||||
# to code changes. Use the `test all` and `test ${adapter}`
|
||||
# label to run all or additional tests. Use `ok to test`
|
||||
# label to mark PRs from forked repositories that are safe
|
||||
# to run integration tests for. Requires secrets to run
|
||||
# against different warehouses.
|
||||
|
||||
# **why?**
|
||||
# This checks the functionality of dbt from a user's perspective
|
||||
# and attempts to catch functional regressions.
|
||||
|
||||
# **when?**
|
||||
# This workflow will run on every push to a protected branch
|
||||
# and when manually triggered. It will also run for all PRs, including
|
||||
# PRs from forks. The workflow will be skipped until there is a label
|
||||
# to mark the PR as safe to run.
|
||||
|
||||
name: Adapter Integration Tests
|
||||
|
||||
on:
|
||||
# pushes to release branches
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
# all PRs, important to note that `pull_request_target` workflows
|
||||
# will run in the context of the target branch of a PR
|
||||
pull_request_target:
|
||||
# manual tigger
|
||||
workflow_dispatch:
|
||||
|
||||
# explicitly turn off permissions for `GITHUB_TOKEN`
|
||||
permissions: read-all
|
||||
|
||||
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# sets default shell to bash, for all operating systems
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# generate test metadata about what files changed and the testing matrix to use
|
||||
test-metadata:
|
||||
# run if not a PR from a forked repository or has a label to mark as safe to test
|
||||
if: >-
|
||||
github.event_name != 'pull_request_target' ||
|
||||
github.event.pull_request.head.repo.full_name == github.repository ||
|
||||
contains(github.event.pull_request.labels.*.name, 'ok to test')
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
matrix: ${{ steps.generate-matrix.outputs.result }}
|
||||
|
||||
steps:
|
||||
- name: Check out the repository (non-PR)
|
||||
if: github.event_name != 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check out the repository (PR)
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Check if relevant files changed
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
# For each filter, it sets output variable named by the filter to the text:
|
||||
# 'true' - if any of changed files matches any of filter rules
|
||||
# 'false' - if none of changed files matches any of filter rules
|
||||
# also, returns:
|
||||
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||
uses: dorny/paths-filter@v2
|
||||
id: get-changes
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filters: |
|
||||
postgres:
|
||||
- 'core/**'
|
||||
- 'plugins/postgres/**'
|
||||
- 'dev-requirements.txt'
|
||||
|
||||
- name: Generate integration test matrix
|
||||
id: generate-matrix
|
||||
uses: actions/github-script@v4
|
||||
env:
|
||||
CHANGES: ${{ steps.get-changes.outputs.changes }}
|
||||
with:
|
||||
script: |
|
||||
const script = require('./.github/scripts/integration-test-matrix.js')
|
||||
const matrix = script({ context })
|
||||
console.log(matrix)
|
||||
return matrix
|
||||
|
||||
test:
|
||||
name: ${{ matrix.adapter }} / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
# run if not a PR from a forked repository or has a label to mark as safe to test
|
||||
# also checks that the matrix generated is not empty
|
||||
if: >-
|
||||
needs.test-metadata.outputs.matrix &&
|
||||
fromJSON( needs.test-metadata.outputs.matrix ).include[0] &&
|
||||
(
|
||||
github.event_name != 'pull_request_target' ||
|
||||
github.event.pull_request.head.repo.full_name == github.repository ||
|
||||
contains(github.event.pull_request.labels.*.name, 'ok to test')
|
||||
)
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
needs: test-metadata
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJSON(needs.test-metadata.outputs.matrix) }}
|
||||
|
||||
env:
|
||||
TOXENV: integration-${{ matrix.adapter }}
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
if: github.event_name != 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# explicity checkout the branch for the PR,
|
||||
# this is necessary for the `pull_request_target` event
|
||||
- name: Check out the repository (PR)
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Set up postgres (linux)
|
||||
if: |
|
||||
matrix.adapter == 'postgres' &&
|
||||
runner.os == 'Linux'
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: Set up postgres (macos)
|
||||
if: |
|
||||
matrix.adapter == 'postgres' &&
|
||||
runner.os == 'macOS'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: Set up postgres (windows)
|
||||
if: |
|
||||
matrix.adapter == 'postgres' &&
|
||||
runner.os == 'Windows'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run tox (postgres)
|
||||
if: matrix.adapter == 'postgres'
|
||||
run: tox
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: logs
|
||||
path: ./logs
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv
|
||||
path: integration_results.csv
|
||||
|
||||
require-label-comment:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
needs: test
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Needs permission PR comment
|
||||
if: >-
|
||||
needs.test.result == 'skipped' &&
|
||||
github.event_name == 'pull_request_target' &&
|
||||
github.event.pull_request.head.repo.full_name != github.repository
|
||||
uses: unsplash/comment-on-pr@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
msg: |
|
||||
"You do not have permissions to run integration tests, @dbt-labs/core "\
|
||||
"needs to label this PR with `ok to test` in order to run integration tests!"
|
||||
check_for_duplicate_msg: true
|
||||
206
.github/workflows/main.yml
vendored
Normal file
206
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,206 @@
|
||||
# **what?**
|
||||
# Runs code quality checks, unit tests, and verifies python build on
|
||||
# all code commited to the repository. This workflow should not
|
||||
# require any secrets since it runs for PRs from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
|
||||
# **why?**
|
||||
# Ensure code for dbt meets a certain quality standard.
|
||||
|
||||
# **when?**
|
||||
# This will run for all PRs, when code is pushed to a release
|
||||
# branch, and when manually triggered.
|
||||
|
||||
name: Tests and Code Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: ${{ matrix.toxenv }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
toxenv: [flake8, mypy]
|
||||
|
||||
env:
|
||||
TOXENV: ${{ matrix.toxenv }}
|
||||
PYTEST_ADDOPTS: "-v --color=yes"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
|
||||
unit:
|
||||
name: unit test / python ${{ matrix.python-version }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8] # TODO: support unit testing for python 3.9 (https://github.com/dbt-labs/dbt/issues/3689)
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||
path: unit_results.csv
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
test-build:
|
||||
name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
needs: build
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
176
.github/workflows/performance.yml
vendored
Normal file
176
.github/workflows/performance.yml
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
name: Performance Regression Tests
|
||||
# Schedule triggers
|
||||
on:
|
||||
# runs twice a day at 10:05am and 10:05pm
|
||||
schedule:
|
||||
- cron: "5 10,22 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# checks fmt of runner code
|
||||
# purposefully not a dependency of any other job
|
||||
# will block merging, but not prevent developing
|
||||
fmt:
|
||||
name: Cargo fmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- run: rustup component add rustfmt
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||
|
||||
# runs any tests associated with the runner
|
||||
# these tests make sure the runner logic is correct
|
||||
test-runner:
|
||||
name: Test Runner
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# turns errors into warnings
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --manifest-path performance/runner/Cargo.toml
|
||||
|
||||
# build an optimized binary to be used as the runner in later steps
|
||||
build-runner:
|
||||
needs: [test-runner]
|
||||
name: Build Runner
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --manifest-path performance/runner/Cargo.toml
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
path: performance/runner/target/release/runner
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
measure-dev:
|
||||
needs: [build-runner]
|
||||
name: Measure Dev Branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: install dbt
|
||||
run: pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
- name: install hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run
|
||||
run: ./runner measure -b dev -p ${{ github.workspace }}/performance/projects/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dev-results
|
||||
path: performance/results/
|
||||
|
||||
# run the performance measurements on the release branch which we use
|
||||
# as a performance baseline. This part takes by far the longest, so
|
||||
# we do everything we can first so the job fails fast.
|
||||
# -----
|
||||
# we need to checkout dbt twice in this job: once for the baseline dbt
|
||||
# version, and once to get the latest regression testing projects,
|
||||
# metrics, and runner code from the develop or current branch so that
|
||||
# the calculations match for both versions of dbt we are comparing.
|
||||
measure-baseline:
|
||||
needs: [build-runner]
|
||||
name: Measure Baseline Branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout latest
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: "0.20.latest"
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: move repo up a level
|
||||
run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline
|
||||
- name: "[debug] ls new dbt location"
|
||||
run: ls ${{ github.workspace }}/../baseline/dbt/
|
||||
# installation creates egg-links so we have to preserve source
|
||||
- name: install dbt from new location
|
||||
run: cd ${{ github.workspace }}/../baseline/dbt/ && pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
# checkout the current branch to get all the target projects
|
||||
# this deletes the old checked out code which is why we had to copy before
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
- name: install hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run runner
|
||||
run: ./runner measure -b baseline -p ${{ github.workspace }}/performance/projects/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: baseline-results
|
||||
path: performance/results/
|
||||
|
||||
# detect regressions on the output generated from measuring
|
||||
# the two branches. Exits with non-zero code if a regression is detected.
|
||||
calculate-regressions:
|
||||
needs: [measure-dev, measure-baseline]
|
||||
name: Compare Results
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dev-results
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: baseline-results
|
||||
- name: "[debug] ls result files"
|
||||
run: ls
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: make results directory
|
||||
run: mkdir ./final-output/
|
||||
- name: run calculation
|
||||
run: ./runner calculate -r ./ -o ./final-output/
|
||||
# always attempt to upload the results even if there were regressions found
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: final-calculations
|
||||
path: ./final-output/*
|
||||
87
.github/workflows/schema-check.yml
vendored
Normal file
87
.github/workflows/schema-check.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
# **what?**
|
||||
# Compares the schema of the dbt version of the given ref vs
|
||||
# the latest official schema releases found in schemas.getdbt.com.
|
||||
# If there are differences, the workflow will fail and upload the
|
||||
# diff as an artifact. The metadata team should be alerted to the change.
|
||||
#
|
||||
# **why?**
|
||||
# Reaction work may need to be done if artifact schema changes
|
||||
# occur so we want to proactively alert to it.
|
||||
#
|
||||
# **when?**
|
||||
# On pushes to `develop` and release branches. Manual runs are also enabled.
|
||||
name: Artifact Schema Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request: #TODO: remove before merging
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
|
||||
env:
|
||||
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
||||
DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt
|
||||
SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com
|
||||
|
||||
jobs:
|
||||
checking-schemas:
|
||||
name: "Checking schemas"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
|
||||
- name: Checkout schemas.getdbt.com repo
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
repository: dbt-labs/schemas.getdbt.com
|
||||
ref: 'main'
|
||||
ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }}
|
||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
|
||||
- name: Generate current schema
|
||||
run: |
|
||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||
|
||||
# Copy generated schema files into the schemas.getdbt.com repo
|
||||
# Do a git diff to find any changes
|
||||
# Ignore any date or version changes though
|
||||
- name: Compare schemas
|
||||
run: |
|
||||
cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
cd ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary)
|
||||
if [[ $(echo diff_results) ]]; then
|
||||
echo $diff_results
|
||||
echo "Schema changes detected!"
|
||||
git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||
exit 1
|
||||
else
|
||||
echo "No schema changes detected"
|
||||
fi
|
||||
|
||||
- name: Upload schema diff
|
||||
uses: actions/upload-artifact@v2.2.4
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
name: 'schema_schanges.txt'
|
||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
||||
18
.github/workflows/stale.yml
vendored
Normal file
18
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: "Close stale issues and PRs"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
||||
with:
|
||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||
days-before-stale: 180
|
||||
# ignore checking issues with the following labels
|
||||
exempt-issue-labels: "epic,discussion"
|
||||
178
.github/workflows/tests.yml
vendored
178
.github/workflows/tests.yml
vendored
@@ -1,178 +0,0 @@
|
||||
# This is a workflow to run our unit and integration tests for windows and mac
|
||||
|
||||
name: dbt Tests
|
||||
|
||||
# Triggers
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events and also adds a manual trigger
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
- '*.latest'
|
||||
- 'releases/*'
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'develop'
|
||||
- '*.latest'
|
||||
- 'pr/*'
|
||||
- 'releases/*'
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Linting:
|
||||
runs-on: ubuntu-latest #no need to run on every OS
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Linting'
|
||||
run: tox -e mypy,flake8 -- -v
|
||||
|
||||
UnitTest:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run unit tests'
|
||||
run: python -m tox -e py -- -v
|
||||
|
||||
PostgresIntegrationTest:
|
||||
runs-on: 'windows-latest' #TODO: Add Mac support
|
||||
environment: 'Postgres'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: 'Install postgresql and set up database'
|
||||
shell: pwsh
|
||||
run: |
|
||||
$serviceName = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $serviceName -StartupType Automatic
|
||||
Start-Service -InputObject $serviceName
|
||||
& $env:PGBIN\createdb.exe -U postgres dbt
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD '$env:ROOT_PASSWORD';"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD '$env:NOACCESS_PASSWORD' NOSUPERUSER;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;"
|
||||
env:
|
||||
ROOT_PASSWORD: ${{ secrets.ROOT_PASSWORD }}
|
||||
NOACCESS_PASSWORD: ${{ secrets.NOACCESS_PASSWORD }}
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run integration tests'
|
||||
run: python -m tox -e py-postgres -- -v -n4
|
||||
|
||||
# These three are all similar except secure environment variables, which MUST be passed along to their tasks,
|
||||
# but there's probably a better way to do this!
|
||||
SnowflakeIntegrationTest:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
environment: 'Snowflake'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run integration tests'
|
||||
run: python -m tox -e py-snowflake -- -v -n4
|
||||
env:
|
||||
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
|
||||
SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
|
||||
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
|
||||
SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
|
||||
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }}
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }}
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }}
|
||||
SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }}
|
||||
SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }}
|
||||
SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
|
||||
SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }}
|
||||
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
|
||||
|
||||
BigQueryIntegrationTest:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
environment: 'Bigquery'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run integration tests'
|
||||
run: python -m tox -e py-bigquery -- -v -n4
|
||||
env:
|
||||
BIGQUERY_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_SERVICE_ACCOUNT_JSON }}
|
||||
BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }}
|
||||
|
||||
RedshiftIntegrationTest:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
environment: 'Redshift'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run integration tests'
|
||||
run: python -m tox -e py-redshift -- -v -n4
|
||||
env:
|
||||
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
|
||||
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
|
||||
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
|
||||
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
|
||||
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
|
||||
109
.github/workflows/version-bump.yml
vendored
Normal file
109
.github/workflows/version-bump.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# **what?**
|
||||
# This workflow will take a version number and a dry run flag. With that
|
||||
# it will run versionbump to update the version number everywhere in the
|
||||
# code base and then generate an update Docker requirements file. If this
|
||||
# is a dry run, a draft PR will open with the changes. If this isn't a dry
|
||||
# run, the changes will be committed to the branch this is run on.
|
||||
|
||||
# **why?**
|
||||
# This is to aid in releasing dbt and making sure we have updated
|
||||
# the versions and Docker requirements in all places.
|
||||
|
||||
# **when?**
|
||||
# This is triggered either manually OR
|
||||
# from the repository_dispatch event "version-bump" which is sent from
|
||||
# the dbt-release repo Action
|
||||
|
||||
name: Version Bump
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_number:
|
||||
description: 'The version number to bump to'
|
||||
required: true
|
||||
is_dry_run:
|
||||
description: 'Creates a draft PR to allow testing instead of committing to a branch'
|
||||
required: true
|
||||
default: 'true'
|
||||
repository_dispatch:
|
||||
types: [version-bump]
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set version and dry run values
|
||||
id: variables
|
||||
env:
|
||||
VERSION_NUMBER: "${{ github.event.client_payload.version_number == '' && github.event.inputs.version_number || github.event.client_payload.version_number }}"
|
||||
IS_DRY_RUN: "${{ github.event.client_payload.is_dry_run == '' && github.event.inputs.is_dry_run || github.event.client_payload.is_dry_run }}"
|
||||
run: |
|
||||
echo Repository dispatch event version: ${{ github.event.client_payload.version_number }}
|
||||
echo Repository dispatch event dry run: ${{ github.event.client_payload.is_dry_run }}
|
||||
echo Workflow dispatch event version: ${{ github.event.inputs.version_number }}
|
||||
echo Workflow dispatch event dry run: ${{ github.event.inputs.is_dry_run }}
|
||||
echo ::set-output name=VERSION_NUMBER::$VERSION_NUMBER
|
||||
echo ::set-output name=IS_DRY_RUN::$IS_DRY_RUN
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
|
||||
- name: Create PR branch
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||
run: |
|
||||
git checkout -b bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||
git push origin bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||
git branch --set-upstream-to=origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||
|
||||
- name: Generate Docker requirements
|
||||
run: |
|
||||
source env/bin/activate
|
||||
pip install -r requirements.txt
|
||||
pip freeze -l > docker/requirements/requirements.txt
|
||||
git status
|
||||
|
||||
- name: Bump version
|
||||
run: |
|
||||
source env/bin/activate
|
||||
pip install -r dev-requirements.txt
|
||||
env/bin/bumpversion --allow-dirty --new-version ${{steps.variables.outputs.VERSION_NUMBER}} major
|
||||
git status
|
||||
|
||||
- name: Commit version bump directly
|
||||
uses: EndBug/add-and-commit@v7
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'false' }}
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
|
||||
- name: Commit version bump to branch
|
||||
uses: EndBug/add-and-commit@v7
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
push: 'origin origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
draft: true
|
||||
base: ${{github.ref}}
|
||||
title: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||
@@ -1,8 +1,9 @@
|
||||
The core function of dbt is SQL compilation and execution. Users create projects of dbt resources (models, tests, seeds, snapshots, ...), defined in SQL and YAML files, and they invoke dbt to create, update, or query associated views and tables. Today, dbt makes heavy use of Jinja2 to enable the templating of SQL, and to construct a DAG (Directed Acyclic Graph) from all of the resources in a project. Users can also extend their projects by installing resources (including Jinja macros) from other projects, called "packages."
|
||||
The core function of dbt is SQL compilation and execution. Users create projects of dbt resources (models, tests, seeds, snapshots, ...), defined in SQL and YAML files, and they invoke dbt to create, update, or query associated views and tables. Today, dbt makes heavy use of Jinja2 to enable the templating of SQL, and to construct a DAG (Directed Acyclic Graph) from all of the resources in a project. Users can also extend their projects by installing resources (including Jinja macros) from other projects, called "packages."
|
||||
|
||||
## dbt-core
|
||||
|
||||
Most of the python code in the repository is within the `core/dbt` directory. Currently the main subdirectories are:
|
||||
|
||||
- [`adapters`](core/dbt/adapters): Define base classes for behavior that is likely to differ across databases
|
||||
- [`clients`](core/dbt/clients): Interface with dependencies (agate, jinja) or across operating systems
|
||||
- [`config`](core/dbt/config): Reconcile user-supplied configuration from connection profiles, project files, and Jinja macros
|
||||
@@ -12,23 +13,20 @@ Most of the python code in the repository is within the `core/dbt` directory. Cu
|
||||
- [`graph`](core/dbt/graph): Produce a `networkx` DAG of project resources, and selecting those resources given user-supplied criteria
|
||||
- [`include`](core/dbt/include): The dbt "global project," which defines default implementations of Jinja2 macros
|
||||
- [`parser`](core/dbt/parser): Read project files, validate, construct python objects
|
||||
- [`rpc`](core/dbt/rpc): Provide remote procedure call server for invoking dbt, following JSON-RPC 2.0 spec
|
||||
- [`task`](core/dbt/task): Set forth the actions that dbt can perform when invoked
|
||||
|
||||
### Invoking dbt
|
||||
|
||||
There are two supported ways of invoking dbt: from the command line and using an RPC server.
|
||||
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task/rpc should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask.
|
||||
|
||||
core/dbt/include/index.html
|
||||
This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged.
|
||||
|
||||
## Adapters
|
||||
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. The four core adapters that are in the main repository, contained within the [`plugins`](plugins) subdirectory, are: Postgres Redshift, Snowflake and BigQuery. Other warehouses use adapter plugins defined in separate repositories (e.g. [dbt-spark](https://github.com/fishtown-analytics/dbt-spark), [dbt-presto](https://github.com/fishtown-analytics/dbt-presto)).
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. For testing and development purposes, the dbt-postgres plugin lives alongside the dbt-core codebase, in the [`plugins`](plugins) subdirectory. Like other adapter plugins, it is a self-contained codebase and package that builds on top of dbt-core.
|
||||
|
||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||
|
||||
Each adapter plugin is a standalone python package that includes:
|
||||
|
||||
|
||||
2809
CHANGELOG.md
2809
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -14,6 +14,10 @@ This document is a guide intended for folks interested in contributing to `dbt`.
|
||||
|
||||
If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the `#dbt-core-development` channel on [slack](https://community.getdbt.com).
|
||||
|
||||
#### Adapters
|
||||
|
||||
If you have an issue or code change suggestion related to a specific database [adapter](https://docs.getdbt.com/docs/available-adapters); please refer to that supported databases seperate repo for those contributions.
|
||||
|
||||
### Signing the CLA
|
||||
|
||||
Please note that all contributors to `dbt` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the `dbt` codebase. If you are unable to sign the CLA, then the `dbt` maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones.
|
||||
@@ -24,7 +28,7 @@ Please note that all contributors to `dbt` must sign the [Contributor License Ag
|
||||
|
||||
### Defining the problem
|
||||
|
||||
If you have an idea for a new feature or if you've discovered a bug in `dbt`, the first step is to open an issue. Please check the list of [open issues](https://github.com/fishtown-analytics/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
If you have an idea for a new feature or if you've discovered a bug in `dbt`, the first step is to open an issue. Please check the list of [open issues](https://github.com/dbt-labs/dbt-core/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
|
||||
> **Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed.
|
||||
|
||||
@@ -36,7 +40,7 @@ After you open an issue, a `dbt` maintainer will follow up by commenting on your
|
||||
|
||||
If an issue is appropriately well scoped and describes a beneficial change to the `dbt` codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this.
|
||||
|
||||
The `dbt` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/fishtown-analytics/dbt/contribute) page.
|
||||
The `dbt` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/dbt-labs/dbt-core/contribute) page.
|
||||
|
||||
Here's a good workflow:
|
||||
- Comment on the open issue, expressing your interest in contributing the required code change
|
||||
@@ -52,15 +56,15 @@ The `dbt` maintainers use labels to categorize open issues. Some labels indicate
|
||||
|
||||
| tag | description |
|
||||
| --- | ----------- |
|
||||
| [triage](https://github.com/fishtown-analytics/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt` maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/fishtown-analytics/dbt/labels/bug) | This issue represents a defect or regression in `dbt` |
|
||||
| [enhancement](https://github.com/fishtown-analytics/dbt/labels/enhancement) | This issue represents net-new functionality in `dbt` |
|
||||
| [good first issue](https://github.com/fishtown-analytics/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt` codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/fishtown-analytics/`dbt`/labels/help%20wanted) / [discussion](https://github.com/fishtown-analytics/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/fishtown-analytics/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt` maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/fishtown-analytics/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt` maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/fishtown-analytics/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt` maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/fishtown-analytics/dbt/labels/wontfix) | This issue does not require a code change in the `dbt` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
| [triage](https://github.com/dbt-labs/dbt-core/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt` maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/dbt-labs/dbt-core/labels/bug) | This issue represents a defect or regression in `dbt` |
|
||||
| [enhancement](https://github.com/dbt-labs/dbt-core/labels/enhancement) | This issue represents net-new functionality in `dbt` |
|
||||
| [good first issue](https://github.com/dbt-labs/dbt-core/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt` codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/dbt-labs/dbt-core/labels/help%20wanted) / [discussion](https://github.com/dbt-labs/dbt-core/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/dbt-labs/dbt-core/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt` maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/dbt-labs/dbt-core/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt` maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/dbt-labs/dbt-core/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt` maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/dbt-labs/dbt-core/labels/wontfix) | This issue does not require a code change in the `dbt` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
|
||||
#### Branching Strategy
|
||||
|
||||
@@ -68,7 +72,7 @@ The `dbt` maintainers use labels to categorize open issues. Some labels indicate
|
||||
|
||||
- **Trunks** are where active development of the next release takes place. There is one trunk named `develop` at the time of writing this, and will be the default branch of the repository.
|
||||
- **Release Branches** track a specific, not yet complete release of `dbt`. Each minor version release has a corresponding release branch. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of `dbt`.
|
||||
- **Feature Branches** track individual features and fixes. On completion they should be merged into the trunk brnach or a specific release branch.
|
||||
- **Feature Branches** track individual features and fixes. On completion they should be merged into the trunk branch or a specific release branch.
|
||||
|
||||
## Getting the code
|
||||
|
||||
@@ -78,17 +82,17 @@ You will need `git` in order to download and modify the `dbt` source code. On ma
|
||||
|
||||
### External contributors
|
||||
|
||||
If you are not a member of the `fishtown-analytics` GitHub organization, you can contribute to `dbt` by forking the `dbt` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt` by forking the `dbt` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
|
||||
1. fork the `dbt` repository
|
||||
2. clone your fork locally
|
||||
3. check out a new branch for your proposed changes
|
||||
4. push changes to your fork
|
||||
5. open a pull request against `fishtown-analytics/dbt` from your forked repository
|
||||
5. open a pull request against `dbt-labs/dbt` from your forked repository
|
||||
|
||||
### Core contributors
|
||||
|
||||
If you are a member of the `fishtown-analytics` GitHub organization, you will have push access to the `dbt` repo. Rather than forking `dbt` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt` repo. Rather than forking `dbt` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
|
||||
## Setting up an environment
|
||||
|
||||
@@ -103,7 +107,7 @@ A short list of tools used in `dbt` testing that will be helpful to your underst
|
||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||
- [CircleCI](https://circleci.com/product/) and [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/)
|
||||
- [Github Actions](https://github.com/features/actions)
|
||||
|
||||
A deep understanding of these tools in not required to effectively contribute to `dbt`, but we recommend checking out the attached documentation if you're interested in learning more about them.
|
||||
|
||||
@@ -135,7 +139,7 @@ brew install postgresql
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Next, install `dbt` (and its dependencies) with:
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt` (and its dependencies) with:
|
||||
|
||||
```sh
|
||||
make dev
|
||||
@@ -155,7 +159,7 @@ Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as
|
||||
|
||||
Getting the `dbt` integration tests set up in your local environment will be very helpful as you start to make changes to your local version of `dbt`. The section that follows outlines some helpful tips for setting up the test environment.
|
||||
|
||||
Since `dbt` works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, Fishtown Analytics provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup.
|
||||
Although `dbt` works with a number of different databases, you won't need to supply credentials for every one of these databases in your test environment. Instead you can test all dbt-core code changes with Python and Postgres.
|
||||
|
||||
### Initial setup
|
||||
|
||||
@@ -224,7 +228,7 @@ python -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
> is a list of useful command-line options for `pytest` to use while developing.
|
||||
## Submitting a Pull Request
|
||||
|
||||
Fishtown Analytics provides a sandboxed Redshift, Snowflake, and BigQuery database for use in a CI environment. When pull requests are submitted to the `fishtown-analytics/dbt` repo, GitHub will trigger automated tests in CircleCI and Azure Pipelines.
|
||||
dbt Labs provides a CI environment to test changes to specific adapters, and periodic maintenance checks of `dbt-core` through Github Actions. For example, if you submit a pull request to the `dbt-redshift` repo, GitHub will trigger automated code checks and tests against Redshift.
|
||||
|
||||
A `dbt` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM ubuntu:18.04
|
||||
FROM ubuntu:20.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
@@ -27,7 +27,7 @@ RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
python \
|
||||
python-dev \
|
||||
python-pip \
|
||||
python3-pip \
|
||||
python3.6 \
|
||||
python3.6-dev \
|
||||
python3-pip \
|
||||
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
Copyright 2021 dbt Labs, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
24
Makefile
24
Makefile
@@ -44,30 +44,6 @@ integration-postgres: .env ## Runs postgres integration tests with py38.
|
||||
integration-postgres-fail-fast: .env ## Runs postgres integration tests with py38 in "fail fast" mode.
|
||||
$(DOCKER_CMD) tox -e py38-postgres -- -x -nauto
|
||||
|
||||
.PHONY: integration-redshift
|
||||
integration-redshift: .env ## Runs redshift integration tests with py38.
|
||||
$(DOCKER_CMD) tox -e py38-redshift -- -nauto
|
||||
|
||||
.PHONY: integration-redshift-fail-fast
|
||||
integration-redshift-fail-fast: .env ## Runs redshift integration tests with py38 in "fail fast" mode.
|
||||
$(DOCKER_CMD) tox -e py38-redshift -- -x -nauto
|
||||
|
||||
.PHONY: integration-snowflake
|
||||
integration-snowflake: .env ## Runs snowflake integration tests with py38.
|
||||
$(DOCKER_CMD) tox -e py38-snowflake -- -nauto
|
||||
|
||||
.PHONY: integration-snowflake-fail-fast
|
||||
integration-snowflake-fail-fast: .env ## Runs snowflake integration tests with py38 in "fail fast" mode.
|
||||
$(DOCKER_CMD) tox -e py38-snowflake -- -x -nauto
|
||||
|
||||
.PHONY: integration-bigquery
|
||||
integration-bigquery: .env ## Runs bigquery integration tests with py38.
|
||||
$(DOCKER_CMD) tox -e py38-bigquery -- -nauto
|
||||
|
||||
.PHONY: integration-bigquery-fail-fast
|
||||
integration-bigquery-fail-fast: .env ## Runs bigquery integration tests with py38 in "fail fast" mode.
|
||||
$(DOCKER_CMD) tox -e py38-bigquery -- -x -nauto
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
docker-compose up -d database
|
||||
|
||||
46
README.md
46
README.md
@@ -1,28 +1,18 @@
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/fishtown-analytics/dbt/6c6649f9129d5d108aa3b0526f634cd8f3a9d1ed/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/>
|
||||
<img src="https://raw.githubusercontent.com/dbt-labs/dbt-core/fa1ea14ddfb1d5ae319d5141844910dd53ab2834/etc/dbt-core.svg" alt="dbt logo" width="750"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://codeclimate.com/github/fishtown-analytics/dbt">
|
||||
<img src="https://codeclimate.com/github/fishtown-analytics/dbt/badges/gpa.svg" alt="Code Climate"/>
|
||||
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml">
|
||||
<img src="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml/badge.svg?event=push" alt="Unit Tests Badge"/>
|
||||
</a>
|
||||
<a href="https://circleci.com/gh/fishtown-analytics/dbt/tree/master">
|
||||
<img src="https://circleci.com/gh/fishtown-analytics/dbt/tree/master.svg?style=svg" alt="CircleCI" />
|
||||
</a>
|
||||
<a href="https://ci.appveyor.com/project/DrewBanin/dbt/branch/development">
|
||||
<img src="https://ci.appveyor.com/api/projects/status/v01rwd3q91jnwp9m/branch/development?svg=true" alt="AppVeyor" />
|
||||
</a>
|
||||
<a href="https://community.getdbt.com">
|
||||
<img src="https://community.getdbt.com/badge.svg" alt="Slack" />
|
||||
<a href="https://github.com/dbt-labs/dbt-core/actions/workflows/integration.yml">
|
||||
<img src="https://github.com/dbt-labs/dbt-core/actions/workflows/integration.yml/badge.svg?event=push" alt="Integration Tests Badge"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
**[dbt](https://www.getdbt.com/)** (data build tool) enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
|
||||
|
||||
dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis.
|
||||
|
||||

|
||||
|
||||
dbt can be used to [aggregate pageviews into sessions](https://github.com/fishtown-analytics/snowplow), calculate [ad spend ROI](https://github.com/fishtown-analytics/facebook-ads), or report on [email campaign performance](https://github.com/fishtown-analytics/mailchimp).
|
||||

|
||||
|
||||
## Understanding dbt
|
||||
|
||||
@@ -30,28 +20,22 @@ Analysts using dbt can transform their data by simply writing select statements,
|
||||
|
||||
These select statements, or "models", form a dbt project. Models frequently build on top of one another – dbt makes it easy to [manage relationships](https://docs.getdbt.com/docs/ref) between models, and [visualize these relationships](https://docs.getdbt.com/docs/documentation), as well as assure the quality of your transformations through [testing](https://docs.getdbt.com/docs/testing).
|
||||
|
||||

|
||||

|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [documentation](https://docs.getdbt.com/).
|
||||
- Productionize your dbt project with [dbt Cloud](https://www.getdbt.com)
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Find out more
|
||||
## Join the dbt Community
|
||||
|
||||
- Check out the [Introduction to dbt](https://docs.getdbt.com/docs/introduction/).
|
||||
- Read the [dbt Viewpoint](https://docs.getdbt.com/docs/about/viewpoint/).
|
||||
|
||||
## Join thousands of analysts in the dbt community
|
||||
|
||||
- Join the [chat](http://community.getdbt.com/) on Slack.
|
||||
- Find community posts on [dbt Discourse](https://discourse.getdbt.com).
|
||||
- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/)
|
||||
- Read more on the [dbt Community Discourse](https://discourse.getdbt.com)
|
||||
|
||||
## Reporting bugs and contributing code
|
||||
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/fishtown-analytics/dbt/issues/new).
|
||||
- Want to help us build dbt? Check out the [Contributing Getting Started Guide](https://github.com/fishtown-analytics/dbt/blob/HEAD/CONTRIBUTING.md)
|
||||
- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new)
|
||||
- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
# Python package
|
||||
# Create and test a Python package on multiple Python versions.
|
||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- develop
|
||||
- '*.latest'
|
||||
- pr/*
|
||||
|
||||
jobs:
|
||||
- job: UnitTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e py -- -v
|
||||
displayName: Run unit tests
|
||||
|
||||
- job: PostgresIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: UnitTest
|
||||
|
||||
steps:
|
||||
- pwsh: |
|
||||
$serviceName = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $serviceName -StartupType Automatic
|
||||
Start-Service -InputObject $serviceName
|
||||
|
||||
& $env:PGBIN\createdb.exe -U postgres dbt
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD 'password';"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;"
|
||||
& $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;"
|
||||
displayName: Install postgresql and set up database
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e py-postgres -- -v -n4
|
||||
displayName: Run integration tests
|
||||
|
||||
# These three are all similar except secure environment variables, which MUST be passed along to their tasks,
|
||||
# but there's probably a better way to do this!
|
||||
- job: SnowflakeIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: UnitTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e py-snowflake -- -v -n4
|
||||
env:
|
||||
SNOWFLAKE_TEST_ACCOUNT: $(SNOWFLAKE_TEST_ACCOUNT)
|
||||
SNOWFLAKE_TEST_PASSWORD: $(SNOWFLAKE_TEST_PASSWORD)
|
||||
SNOWFLAKE_TEST_USER: $(SNOWFLAKE_TEST_USER)
|
||||
SNOWFLAKE_TEST_WAREHOUSE: $(SNOWFLAKE_TEST_WAREHOUSE)
|
||||
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: $(SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN)
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: $(SNOWFLAKE_TEST_OAUTH_CLIENT_ID)
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: $(SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: BigQueryIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: UnitTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
- script: python -m tox -e py-bigquery -- -v -n4
|
||||
env:
|
||||
BIGQUERY_SERVICE_ACCOUNT_JSON: $(BIGQUERY_SERVICE_ACCOUNT_JSON)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: RedshiftIntegrationTest
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn: UnitTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: python -m pip install --upgrade pip && pip install tox
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: python -m tox -e py-redshift -- -v -n4
|
||||
env:
|
||||
REDSHIFT_TEST_DBNAME: $(REDSHIFT_TEST_DBNAME)
|
||||
REDSHIFT_TEST_PASS: $(REDSHIFT_TEST_PASS)
|
||||
REDSHIFT_TEST_USER: $(REDSHIFT_TEST_USER)
|
||||
REDSHIFT_TEST_PORT: $(REDSHIFT_TEST_PORT)
|
||||
REDSHIFT_TEST_HOST: $(REDSHIFT_TEST_HOST)
|
||||
displayName: Run integration tests
|
||||
|
||||
- job: BuildWheel
|
||||
pool:
|
||||
vmImage: 'vs2017-win2016'
|
||||
dependsOn:
|
||||
- UnitTest
|
||||
- PostgresIntegrationTest
|
||||
- RedshiftIntegrationTest
|
||||
- SnowflakeIntegrationTest
|
||||
- BigQueryIntegrationTest
|
||||
condition: succeeded()
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
architecture: 'x64'
|
||||
- script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r dev-requirements.txt
|
||||
displayName: Install dependencies
|
||||
- task: ShellScript@2
|
||||
inputs:
|
||||
scriptPath: scripts/build-wheels.sh
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
contents: 'dist\?(*.whl|*.tar.gz)'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: dists
|
||||
73
converter.py
73
converter.py
@@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import json
|
||||
import yaml
|
||||
import sys
|
||||
import argparse
|
||||
from datetime import datetime, timezone
|
||||
import dbt.clients.registry as registry
|
||||
|
||||
|
||||
def yaml_type(fname):
|
||||
with open(fname) as f:
|
||||
return yaml.load(f)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--project", type=yaml_type, default="dbt_project.yml")
|
||||
parser.add_argument("--namespace", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_full_name(args):
|
||||
return "{}/{}".format(args.namespace, args.project["name"])
|
||||
|
||||
|
||||
def init_project_in_packages(args, packages):
|
||||
full_name = get_full_name(args)
|
||||
if full_name not in packages:
|
||||
packages[full_name] = {
|
||||
"name": args.project["name"],
|
||||
"namespace": args.namespace,
|
||||
"latest": args.project["version"],
|
||||
"assets": {},
|
||||
"versions": {},
|
||||
}
|
||||
return packages[full_name]
|
||||
|
||||
|
||||
def add_version_to_package(args, project_json):
|
||||
project_json["versions"][args.project["version"]] = {
|
||||
"id": "{}/{}".format(get_full_name(args), args.project["version"]),
|
||||
"name": args.project["name"],
|
||||
"version": args.project["version"],
|
||||
"description": "",
|
||||
"published_at": datetime.now(timezone.utc).astimezone().isoformat(),
|
||||
"packages": args.project.get("packages") or [],
|
||||
"works_with": [],
|
||||
"_source": {
|
||||
"type": "github",
|
||||
"url": "",
|
||||
"readme": "",
|
||||
},
|
||||
"downloads": {
|
||||
"tarball": "",
|
||||
"format": "tgz",
|
||||
"sha1": "",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
packages = registry.packages()
|
||||
project_json = init_project_in_packages(args, packages)
|
||||
if args.project["version"] in project_json["versions"]:
|
||||
raise Exception("Version {} already in packages JSON"
|
||||
.format(args.project["version"]),
|
||||
file=sys.stderr)
|
||||
add_version_to_package(args, project_json)
|
||||
print(json.dumps(packages, indent=2))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1 +1 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
||||
|
||||
@@ -238,12 +238,6 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In _rollback, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Tried to rollback transaction on connection '
|
||||
@@ -257,12 +251,6 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
|
||||
@classmethod
|
||||
def close(cls, connection: Connection) -> Connection:
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In close, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
# if the connection is in closed or init, there's nothing to do
|
||||
if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}:
|
||||
return connection
|
||||
|
||||
@@ -16,9 +16,7 @@ from dbt.exceptions import (
|
||||
get_relation_returned_multiple_results,
|
||||
InternalException, NotImplementedException, RuntimeException,
|
||||
)
|
||||
from dbt import flags
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterConfig,
|
||||
ConnectionManagerProtocol,
|
||||
@@ -31,7 +29,6 @@ from dbt.contracts.graph.compiled import (
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.utils import filter_null_values, executor
|
||||
|
||||
@@ -290,9 +287,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
|
||||
"""Check if the schema is cached, and by default logs if it is not."""
|
||||
|
||||
if flags.USE_CACHE is False:
|
||||
return False
|
||||
elif (database, schema) not in self.cache:
|
||||
if (database, schema) not in self.cache:
|
||||
logger.debug(
|
||||
'On "{}": cache miss for schema "{}.{}", this is inefficient'
|
||||
.format(self.nice_connection_name(), database, schema)
|
||||
@@ -310,8 +305,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
for node in manifest.nodes.values()
|
||||
if (
|
||||
node.resource_type in NodeType.executable() and
|
||||
not node.is_ephemeral_model
|
||||
node.is_relational and not node.is_ephemeral_model
|
||||
)
|
||||
}
|
||||
|
||||
@@ -326,7 +320,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[CompileResultNode] = chain(
|
||||
manifest.nodes.values(),
|
||||
[node for node in manifest.nodes.values() if (
|
||||
node.is_relational and not node.is_ephemeral_model
|
||||
)],
|
||||
manifest.sources.values(),
|
||||
)
|
||||
for node in nodes:
|
||||
@@ -342,9 +338,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""Populate the relations cache for the given schemas. Returns an
|
||||
iterable of the schemas populated, as strings.
|
||||
"""
|
||||
if not flags.USE_CACHE:
|
||||
return
|
||||
|
||||
cache_schemas = self._get_cache_schemas(manifest)
|
||||
with executor(self.config) as tpe:
|
||||
futures: List[Future[List[BaseRelation]]] = []
|
||||
@@ -377,9 +370,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""Run a query that gets a populated cache of the relations in the
|
||||
database and set the cache on this adapter.
|
||||
"""
|
||||
if not flags.USE_CACHE:
|
||||
return
|
||||
|
||||
with self.cache.lock:
|
||||
if clear:
|
||||
self.cache.clear()
|
||||
@@ -393,8 +383,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
raise_compiler_error(
|
||||
'Attempted to cache a null relation for {}'.format(name)
|
||||
)
|
||||
if flags.USE_CACHE:
|
||||
self.cache.add(relation)
|
||||
self.cache.add(relation)
|
||||
# so jinja doesn't render things
|
||||
return ''
|
||||
|
||||
@@ -408,8 +397,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
raise_compiler_error(
|
||||
'Attempted to drop a null relation for {}'.format(name)
|
||||
)
|
||||
if flags.USE_CACHE:
|
||||
self.cache.drop(relation)
|
||||
self.cache.drop(relation)
|
||||
return ''
|
||||
|
||||
@available
|
||||
@@ -430,8 +418,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
.format(src_name, dst_name, name)
|
||||
)
|
||||
|
||||
if flags.USE_CACHE:
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
return ''
|
||||
|
||||
###
|
||||
@@ -513,7 +500,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def get_columns_in_relation(
|
||||
self, relation: BaseRelation
|
||||
) -> List[BaseColumn]:
|
||||
"""Get a list of the columns in the given Relation."""
|
||||
"""Get a list of the columns in the given Relation. """
|
||||
raise NotImplementedException(
|
||||
'`get_columns_in_relation` is not implemented for this adapter!'
|
||||
)
|
||||
@@ -809,12 +796,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def quote_seed_column(
|
||||
self, column: str, quote_config: Optional[bool]
|
||||
) -> str:
|
||||
# this is the default for now
|
||||
quote_columns: bool = False
|
||||
quote_columns: bool = True
|
||||
if isinstance(quote_config, bool):
|
||||
quote_columns = quote_config
|
||||
elif quote_config is None:
|
||||
deprecations.warn('column-quoting-unset')
|
||||
pass
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f'The seed configuration value of "quote_columns" has an '
|
||||
@@ -946,7 +932,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
project: Optional[str] = None,
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Dict[str, Any] = None,
|
||||
release: bool = False,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> agate.Table:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
@@ -960,10 +945,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
execution context.
|
||||
:param kwargs: An optional dict of keyword args used to pass to the
|
||||
macro.
|
||||
:param release: Ignored.
|
||||
"""
|
||||
if release is not False:
|
||||
deprecations.warn('execute-macro-release')
|
||||
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
if context_override is None:
|
||||
|
||||
@@ -433,13 +433,14 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
for schema in schemas:
|
||||
yield information_schema_name, schema
|
||||
|
||||
def flatten(self):
|
||||
def flatten(self, allow_multiple_databases: bool = False):
|
||||
new = self.__class__()
|
||||
|
||||
# make sure we don't have duplicates
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
# make sure we don't have multiple databases if allow_multiple_databases is set to False
|
||||
if not allow_multiple_databases:
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {
|
||||
|
||||
@@ -11,7 +11,6 @@ from dbt.contracts.connection import (
|
||||
Connection, ConnectionState, AdapterResponse
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import flags
|
||||
|
||||
|
||||
class SQLConnectionManager(BaseConnectionManager):
|
||||
@@ -144,13 +143,6 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In begin, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
@@ -163,12 +155,6 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(connection, Connection):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f'In commit, got {connection} - not a Connection!'
|
||||
)
|
||||
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
|
||||
@@ -35,7 +35,11 @@ class ISODateTime(agate.data_types.DateTime):
|
||||
)
|
||||
|
||||
|
||||
def build_type_tester(text_columns: Iterable[str]) -> agate.TypeTester:
|
||||
def build_type_tester(
|
||||
text_columns: Iterable[str],
|
||||
string_null_values: Optional[Iterable[str]] = ('null', '')
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
agate.data_types.Number(null_values=('null', '')),
|
||||
agate.data_types.Date(null_values=('null', ''),
|
||||
@@ -46,10 +50,10 @@ def build_type_tester(text_columns: Iterable[str]) -> agate.TypeTester:
|
||||
agate.data_types.Boolean(true_values=('true',),
|
||||
false_values=('false',),
|
||||
null_values=('null', '')),
|
||||
agate.data_types.Text(null_values=('null', ''))
|
||||
agate.data_types.Text(null_values=string_null_values)
|
||||
]
|
||||
force = {
|
||||
k: agate.data_types.Text(null_values=('null', ''))
|
||||
k: agate.data_types.Text(null_values=string_null_values)
|
||||
for k in text_columns
|
||||
}
|
||||
return agate.TypeTester(force=force, types=types)
|
||||
@@ -66,7 +70,13 @@ def table_from_rows(
|
||||
if text_only_columns is None:
|
||||
column_types = DEFAULT_TYPE_TESTER
|
||||
else:
|
||||
column_types = build_type_tester(text_only_columns)
|
||||
# If text_only_columns are present, prevent coercing empty string or
|
||||
# literal 'null' strings to a None representation.
|
||||
column_types = build_type_tester(
|
||||
text_only_columns,
|
||||
string_null_values=()
|
||||
)
|
||||
|
||||
return agate.Table(rows, column_names, column_types=column_types)
|
||||
|
||||
|
||||
@@ -86,19 +96,34 @@ def table_from_data(data, column_names: Iterable[str]) -> agate.Table:
|
||||
|
||||
|
||||
def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table:
|
||||
"Convert list of dictionaries into an Agate table"
|
||||
"""
|
||||
Convert a list of dictionaries into an Agate table. This method does not
|
||||
coerce string values into more specific types (eg. '005' will not be
|
||||
coerced to '5'). Additionally, this method does not coerce values to
|
||||
None (eg. '' or 'null' will retain their string literal representations).
|
||||
"""
|
||||
|
||||
rows = []
|
||||
text_only_columns = set()
|
||||
for _row in data:
|
||||
row = []
|
||||
for value in list(_row.values()):
|
||||
for col_name in column_names:
|
||||
value = _row[col_name]
|
||||
if isinstance(value, (dict, list, tuple)):
|
||||
row.append(json.dumps(value, cls=dbt.utils.JSONEncoder))
|
||||
else:
|
||||
row.append(value)
|
||||
# Represent container types as json strings
|
||||
value = json.dumps(value, cls=dbt.utils.JSONEncoder)
|
||||
text_only_columns.add(col_name)
|
||||
elif isinstance(value, str):
|
||||
text_only_columns.add(col_name)
|
||||
row.append(value)
|
||||
|
||||
rows.append(row)
|
||||
|
||||
return table_from_rows(rows=rows, column_names=column_names)
|
||||
return table_from_rows(
|
||||
rows=rows,
|
||||
column_names=column_names,
|
||||
text_only_columns=text_only_columns
|
||||
)
|
||||
|
||||
|
||||
def empty_table():
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
import dbt.exceptions
|
||||
from dbt.clients.system import run_cmd
|
||||
|
||||
NOT_INSTALLED_MSG = """
|
||||
dbt requires the gcloud SDK to be installed to authenticate with BigQuery.
|
||||
Please download and install the SDK, or use a Service Account instead.
|
||||
|
||||
https://cloud.google.com/sdk/
|
||||
"""
|
||||
|
||||
|
||||
def gcloud_installed():
|
||||
try:
|
||||
run_cmd('.', ['gcloud', '--version'])
|
||||
return True
|
||||
except OSError as e:
|
||||
logger.debug(e)
|
||||
return False
|
||||
|
||||
|
||||
def setup_default_credentials():
|
||||
if gcloud_installed():
|
||||
run_cmd('.', ["gcloud", "auth", "application-default", "login"])
|
||||
else:
|
||||
raise dbt.exceptions.RuntimeException(NOT_INSTALLED_MSG)
|
||||
@@ -25,8 +25,8 @@ from dbt.utils import (
|
||||
)
|
||||
|
||||
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
|
||||
from dbt.contracts.graph.compiled import CompiledSchemaTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedSchemaTestNode
|
||||
from dbt.contracts.graph.compiled import CompiledGenericTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedGenericTestNode
|
||||
from dbt.exceptions import (
|
||||
InternalException, raise_compiler_error, CompilationException,
|
||||
invalid_materialization_argument, MacroReturn, JinjaRenderingException,
|
||||
@@ -627,12 +627,12 @@ def extract_toplevel_blocks(
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_TEST_KWARGS_NAME = '_dbt_schema_test_kwargs'
|
||||
GENERIC_TEST_KWARGS_NAME = '_dbt_generic_test_kwargs'
|
||||
|
||||
|
||||
def add_rendered_test_kwargs(
|
||||
context: Dict[str, Any],
|
||||
node: Union[ParsedSchemaTestNode, CompiledSchemaTestNode],
|
||||
node: Union[ParsedGenericTestNode, CompiledGenericTestNode],
|
||||
capture_macros: bool = False,
|
||||
) -> None:
|
||||
"""Render each of the test kwargs in the given context using the native
|
||||
@@ -662,4 +662,4 @@ def add_rendered_test_kwargs(
|
||||
return value
|
||||
|
||||
kwargs = deep_map(_convert_function, node.test_metadata.kwargs)
|
||||
context[SCHEMA_TEST_KWARGS_NAME] = kwargs
|
||||
context[GENERIC_TEST_KWARGS_NAME] = kwargs
|
||||
|
||||
@@ -96,7 +96,6 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
possible_macro_calls.append(func_name)
|
||||
|
||||
# packages positional argument
|
||||
packages = None
|
||||
macro_namespace = None
|
||||
packages_arg = None
|
||||
packages_arg_type = None
|
||||
@@ -109,13 +108,7 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
# keyword arguments
|
||||
if func_call.kwargs:
|
||||
for kwarg in func_call.kwargs:
|
||||
if kwarg.key == 'packages':
|
||||
# The packages keyword will be deprecated and
|
||||
# eventually removed
|
||||
packages_arg = kwarg.value
|
||||
# This can be a List or a Call
|
||||
packages_arg_type = type(kwarg.value).__name__
|
||||
elif kwarg.key == 'macro_name':
|
||||
if kwarg.key == 'macro_name':
|
||||
# This will remain to enable static resolution
|
||||
if type(kwarg.value).__name__ == 'Const':
|
||||
func_name = kwarg.value.value
|
||||
@@ -142,63 +135,10 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
elif packages_arg_type == 'Const':
|
||||
# This will remain to enable static resolution
|
||||
macro_namespace = packages_arg.value
|
||||
elif packages_arg_type == 'Call':
|
||||
# This is deprecated and should be removed eventually.
|
||||
# It is here to support (hackily) common ways of providing
|
||||
# a packages list to adapter.dispatch
|
||||
if (hasattr(packages_arg, 'node') and
|
||||
hasattr(packages_arg.node, 'node') and
|
||||
hasattr(packages_arg.node.node, 'name') and
|
||||
hasattr(packages_arg.node, 'attr')):
|
||||
package_name = packages_arg.node.node.name
|
||||
macro_name = packages_arg.node.attr
|
||||
if (macro_name.startswith('_get') and 'namespaces' in macro_name):
|
||||
# noqa: https://github.com/fishtown-analytics/dbt-utils/blob/9e9407b/macros/cross_db_utils/_get_utils_namespaces.sql
|
||||
var_name = f'{package_name}_dispatch_list'
|
||||
# hard code compatibility for fivetran_utils, just a teensy bit different
|
||||
# noqa: https://github.com/fivetran/dbt_fivetran_utils/blob/0978ba2/macros/_get_utils_namespaces.sql
|
||||
if package_name == 'fivetran_utils':
|
||||
default_packages = ['dbt_utils', 'fivetran_utils']
|
||||
else:
|
||||
default_packages = [package_name]
|
||||
|
||||
namespace_names = get_dispatch_list(ctx, var_name, default_packages)
|
||||
packages = []
|
||||
if namespace_names:
|
||||
packages.extend(namespace_names)
|
||||
else:
|
||||
msg = (
|
||||
f"As of v0.19.2, custom macros, such as '{macro_name}', are no longer "
|
||||
"supported in the 'packages' argument of 'adapter.dispatch()'.\n"
|
||||
f"See https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch "
|
||||
"for details."
|
||||
).strip()
|
||||
raise_compiler_error(msg)
|
||||
elif packages_arg_type == 'Add':
|
||||
# This logic is for when there is a variable and an addition of a list,
|
||||
# like: packages = (var('local_utils_dispatch_list', []) + ['local_utils2'])
|
||||
# This is deprecated and should be removed eventually.
|
||||
namespace_var = None
|
||||
default_namespaces = []
|
||||
# This might be a single call or it might be the 'left' piece in an addition
|
||||
for var_call in packages_arg.find_all(jinja2.nodes.Call):
|
||||
if (hasattr(var_call, 'node') and
|
||||
var_call.node.name == 'var' and
|
||||
hasattr(var_call, 'args')):
|
||||
namespace_var = var_call.args[0].value
|
||||
if hasattr(packages_arg, 'right'): # we have a default list of namespaces
|
||||
for item in packages_arg.right.items:
|
||||
default_namespaces.append(item.value)
|
||||
if namespace_var:
|
||||
namespace_names = get_dispatch_list(ctx, namespace_var, default_namespaces)
|
||||
packages = []
|
||||
if namespace_names:
|
||||
packages.extend(namespace_names)
|
||||
|
||||
if db_wrapper:
|
||||
macro = db_wrapper.dispatch(
|
||||
func_name,
|
||||
packages=packages,
|
||||
macro_namespace=macro_namespace
|
||||
).macro
|
||||
func_name = f'{macro.package_name}.{macro.name}'
|
||||
@@ -206,20 +146,9 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
else: # this is only for test/unit/test_macro_calls.py
|
||||
if macro_namespace:
|
||||
packages = [macro_namespace]
|
||||
if packages is None:
|
||||
else:
|
||||
packages = []
|
||||
for package_name in packages:
|
||||
possible_macro_calls.append(f'{package_name}.{func_name}')
|
||||
|
||||
return possible_macro_calls
|
||||
|
||||
|
||||
def get_dispatch_list(ctx, var_name, default_packages):
|
||||
namespace_list = None
|
||||
try:
|
||||
# match the logic currently used in package _get_namespaces() macro
|
||||
namespace_list = ctx['var'](var_name) + default_packages
|
||||
except Exception:
|
||||
pass
|
||||
namespace_list = namespace_list if namespace_list else default_packages
|
||||
return namespace_list
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
from functools import wraps
|
||||
import functools
|
||||
import requests
|
||||
from dbt.exceptions import RegistryException
|
||||
from dbt.utils import memoized
|
||||
from dbt.utils import memoized, _connection_exception_retry as connection_exception_retry
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import deprecations
|
||||
import os
|
||||
import time
|
||||
|
||||
if os.getenv('DBT_PACKAGE_HUB_URL'):
|
||||
DEFAULT_REGISTRY_BASE_URL = os.getenv('DBT_PACKAGE_HUB_URL')
|
||||
@@ -19,26 +18,11 @@ def _get_url(url, registry_base_url=None):
|
||||
return '{}{}'.format(registry_base_url, url)
|
||||
|
||||
|
||||
def _wrap_exceptions(fn):
|
||||
@wraps(fn)
|
||||
def wrapper(*args, **kwargs):
|
||||
max_attempts = 5
|
||||
attempt = 0
|
||||
while True:
|
||||
attempt += 1
|
||||
try:
|
||||
return fn(*args, **kwargs)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as exc:
|
||||
if attempt < max_attempts:
|
||||
time.sleep(1)
|
||||
continue
|
||||
raise RegistryException(
|
||||
'Unable to connect to registry hub'
|
||||
) from exc
|
||||
return wrapper
|
||||
def _get_with_retries(path, registry_base_url=None):
|
||||
get_fn = functools.partial(_get, path, registry_base_url)
|
||||
return connection_exception_retry(get_fn, 5)
|
||||
|
||||
|
||||
@_wrap_exceptions
|
||||
def _get(path, registry_base_url=None):
|
||||
url = _get_url(path, registry_base_url)
|
||||
logger.debug('Making package registry request: GET {}'.format(url))
|
||||
@@ -50,22 +34,44 @@ def _get(path, registry_base_url=None):
|
||||
|
||||
|
||||
def index(registry_base_url=None):
|
||||
return _get('api/v1/index.json', registry_base_url)
|
||||
return _get_with_retries('api/v1/index.json', registry_base_url)
|
||||
|
||||
|
||||
index_cached = memoized(index)
|
||||
|
||||
|
||||
def packages(registry_base_url=None):
|
||||
return _get('api/v1/packages.json', registry_base_url)
|
||||
return _get_with_retries('api/v1/packages.json', registry_base_url)
|
||||
|
||||
|
||||
def package(name, registry_base_url=None):
|
||||
return _get('api/v1/{}.json'.format(name), registry_base_url)
|
||||
response = _get_with_retries('api/v1/{}.json'.format(name), registry_base_url)
|
||||
|
||||
# Either redirectnamespace or redirectname in the JSON response indicate a redirect
|
||||
# redirectnamespace redirects based on package ownership
|
||||
# redirectname redirects based on package name
|
||||
# Both can be present at the same time, or neither. Fails gracefully to old name
|
||||
|
||||
if ('redirectnamespace' in response) or ('redirectname' in response):
|
||||
|
||||
if ('redirectnamespace' in response) and response['redirectnamespace'] is not None:
|
||||
use_namespace = response['redirectnamespace']
|
||||
else:
|
||||
use_namespace = response['namespace']
|
||||
|
||||
if ('redirectname' in response) and response['redirectname'] is not None:
|
||||
use_name = response['redirectname']
|
||||
else:
|
||||
use_name = response['name']
|
||||
|
||||
new_nwo = use_namespace + "/" + use_name
|
||||
deprecations.warn('package-redirect', old_name=name, new_name=new_nwo)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def package_version(name, version, registry_base_url=None):
|
||||
return _get('api/v1/{}/{}.json'.format(name, version), registry_base_url)
|
||||
return _get_with_retries('api/v1/{}/{}.json'.format(name, version), registry_base_url)
|
||||
|
||||
|
||||
def get_available_versions(name):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import errno
|
||||
import functools
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
@@ -15,9 +16,8 @@ from typing import (
|
||||
)
|
||||
|
||||
import dbt.exceptions
|
||||
import dbt.utils
|
||||
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
|
||||
if sys.platform == 'win32':
|
||||
from ctypes import WinDLL, c_bool
|
||||
@@ -30,7 +30,7 @@ def find_matching(
|
||||
root_path: str,
|
||||
relative_paths_to_search: List[str],
|
||||
file_pattern: str,
|
||||
) -> List[Dict[str, str]]:
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Given an absolute `root_path`, a list of relative paths to that
|
||||
absolute root path (`relative_paths_to_search`), and a `file_pattern`
|
||||
@@ -61,11 +61,19 @@ def find_matching(
|
||||
relative_path = os.path.relpath(
|
||||
absolute_path, absolute_path_to_search
|
||||
)
|
||||
modification_time = 0.0
|
||||
try:
|
||||
modification_time = os.path.getmtime(absolute_path)
|
||||
except OSError:
|
||||
logger.exception(
|
||||
f"Error retrieving modification time for file {absolute_path}"
|
||||
)
|
||||
if reobj.match(local_file):
|
||||
matching.append({
|
||||
'searched_path': relative_path_to_search,
|
||||
'absolute_path': absolute_path,
|
||||
'relative_path': relative_path,
|
||||
'modification_time': modification_time,
|
||||
})
|
||||
|
||||
return matching
|
||||
@@ -441,6 +449,13 @@ def run_cmd(
|
||||
return out, err
|
||||
|
||||
|
||||
def download_with_retries(
|
||||
url: str, path: str, timeout: Optional[Union[float, tuple]] = None
|
||||
) -> None:
|
||||
download_fn = functools.partial(download, url, path, timeout)
|
||||
connection_exception_retry(download_fn, 5)
|
||||
|
||||
|
||||
def download(
|
||||
url: str, path: str, timeout: Optional[Union[float, tuple]] = None
|
||||
) -> None:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import dbt.exceptions
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
import yaml
|
||||
import yaml.scanner
|
||||
|
||||
# the C version is faster, but it doesn't always exist
|
||||
try:
|
||||
@@ -56,7 +55,7 @@ def contextualized_yaml_error(raw_contents, error):
|
||||
raw_error=error)
|
||||
|
||||
|
||||
def safe_load(contents):
|
||||
def safe_load(contents) -> Optional[Dict[str, Any]]:
|
||||
return yaml.load(contents, Loader=SafeLoader)
|
||||
|
||||
|
||||
|
||||
@@ -10,10 +10,10 @@ from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.manifest import Manifest, UniqueID
|
||||
from dbt.contracts.graph.compiled import (
|
||||
COMPILED_TYPES,
|
||||
CompiledSchemaTestNode,
|
||||
CompiledGenericTestNode,
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
ManifestNode,
|
||||
@@ -107,6 +107,19 @@ def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
_add_prepended_cte(prepended_ctes, new_cte)
|
||||
|
||||
|
||||
def _get_tests_for_node(manifest: Manifest, unique_id: UniqueID) -> List[UniqueID]:
|
||||
""" Get a list of tests that depend on the node with the
|
||||
provided unique id """
|
||||
|
||||
tests = []
|
||||
if unique_id in manifest.child_map:
|
||||
for child_unique_id in manifest.child_map[unique_id]:
|
||||
if child_unique_id.startswith('test.'):
|
||||
tests.append(child_unique_id)
|
||||
|
||||
return tests
|
||||
|
||||
|
||||
class Linker:
|
||||
def __init__(self, data=None):
|
||||
if data is None:
|
||||
@@ -142,7 +155,7 @@ class Linker:
|
||||
include all nodes in their corresponding graph entries.
|
||||
"""
|
||||
out_graph = self.graph.copy()
|
||||
for node_id in self.graph.nodes():
|
||||
for node_id in self.graph:
|
||||
data = manifest.expect(node_id).to_dict(omit_none=True)
|
||||
out_graph.add_node(node_id, **data)
|
||||
nx.write_gpickle(out_graph, outfile)
|
||||
@@ -154,7 +167,7 @@ class Compiler:
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.target_path)
|
||||
make_directory(self.config.modules_path)
|
||||
make_directory(self.config.packages_install_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
@@ -169,7 +182,7 @@ class Compiler:
|
||||
node, self.config, manifest
|
||||
)
|
||||
context.update(extra_context)
|
||||
if isinstance(node, CompiledSchemaTestNode):
|
||||
if isinstance(node, CompiledGenericTestNode):
|
||||
# for test nodes, add a special keyword args value to the context
|
||||
jinja.add_rendered_test_kwargs(context, node)
|
||||
|
||||
@@ -412,13 +425,82 @@ class Compiler:
|
||||
self.link_node(linker, node, manifest)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(linker, exposure, manifest)
|
||||
# linker.add_node(exposure.unique_id)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
if cycle:
|
||||
raise RuntimeError("Found a cycle: {}".format(cycle))
|
||||
|
||||
manifest.build_parent_and_child_maps()
|
||||
|
||||
self.resolve_graph(linker, manifest)
|
||||
|
||||
def resolve_graph(self, linker: Linker, manifest: Manifest) -> None:
|
||||
""" This method adds additional edges to the DAG. For a given non-test
|
||||
executable node, add an edge from an upstream test to the given node if
|
||||
the set of nodes the test depends on is a proper/strict subset of the
|
||||
upstream nodes for the given node. """
|
||||
|
||||
# Given a graph:
|
||||
# model1 --> model2 --> model3
|
||||
# | |
|
||||
# | \/
|
||||
# \/ test 2
|
||||
# test1
|
||||
#
|
||||
# Produce the following graph:
|
||||
# model1 --> model2 --> model3
|
||||
# | | /\ /\
|
||||
# | \/ | |
|
||||
# \/ test2 ------- |
|
||||
# test1 -------------------
|
||||
|
||||
for node_id in linker.graph:
|
||||
# If node is executable (in manifest.nodes) and does _not_
|
||||
# represent a test, continue.
|
||||
if (
|
||||
node_id in manifest.nodes and
|
||||
manifest.nodes[node_id].resource_type != NodeType.Test
|
||||
):
|
||||
# Get *everything* upstream of the node
|
||||
all_upstream_nodes = nx.traversal.bfs_tree(
|
||||
linker.graph, node_id, reverse=True
|
||||
)
|
||||
# Get the set of upstream nodes not including the current node.
|
||||
upstream_nodes = set([
|
||||
n for n in all_upstream_nodes if n != node_id
|
||||
])
|
||||
|
||||
# Get all tests that depend on any upstream nodes.
|
||||
upstream_tests = []
|
||||
for upstream_node in upstream_nodes:
|
||||
upstream_tests += _get_tests_for_node(
|
||||
manifest,
|
||||
upstream_node
|
||||
)
|
||||
|
||||
for upstream_test in upstream_tests:
|
||||
# Get the set of all nodes that the test depends on
|
||||
# including the upstream_node itself. This is necessary
|
||||
# because tests can depend on multiple nodes (ex:
|
||||
# relationship tests). Test nodes do not distinguish
|
||||
# between what node the test is "testing" and what
|
||||
# node(s) it depends on.
|
||||
test_depends_on = set(
|
||||
manifest.nodes[upstream_test].depends_on_nodes
|
||||
)
|
||||
|
||||
# If the set of nodes that an upstream test depends on
|
||||
# is a proper (or strict) subset of all upstream nodes of
|
||||
# the current node, add an edge from the upstream test
|
||||
# to the current node. Must be a proper/strict subset to
|
||||
# avoid adding a circular dependency to the graph.
|
||||
if (test_depends_on < upstream_nodes):
|
||||
linker.graph.add_edge(
|
||||
upstream_test,
|
||||
node_id
|
||||
)
|
||||
|
||||
def compile(self, manifest: Manifest, write=True) -> Graph:
|
||||
self.initialize()
|
||||
linker = Linker()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, PROFILES_DIR, read_user_config # noqa
|
||||
from .profile import Profile, read_user_config # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
|
||||
@@ -4,6 +4,7 @@ import os
|
||||
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from dbt import flags
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
@@ -20,10 +21,8 @@ from dbt.utils import coerce_dict_str
|
||||
from .renderer import ProfileRenderer
|
||||
|
||||
DEFAULT_THREADS = 1
|
||||
|
||||
DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser('~'), '.dbt')
|
||||
PROFILES_DIR = os.path.expanduser(
|
||||
os.getenv('DBT_PROFILES_DIR', DEFAULT_PROFILES_DIR)
|
||||
)
|
||||
|
||||
INVALID_PROFILE_MESSAGE = """
|
||||
dbt encountered an error while trying to read your profiles.yml file.
|
||||
@@ -43,7 +42,7 @@ Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(profiles_file=PROFILES_DIR)
|
||||
""".format(profiles_file=DEFAULT_PROFILES_DIR)
|
||||
|
||||
|
||||
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
@@ -73,10 +72,10 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
try:
|
||||
profile = read_profile(directory)
|
||||
if profile:
|
||||
user_cfg = coerce_dict_str(profile.get('config', {}))
|
||||
if user_cfg is not None:
|
||||
UserConfig.validate(user_cfg)
|
||||
return UserConfig.from_dict(user_cfg)
|
||||
user_config = coerce_dict_str(profile.get('config', {}))
|
||||
if user_config is not None:
|
||||
UserConfig.validate(user_config)
|
||||
return UserConfig.from_dict(user_config)
|
||||
except (RuntimeException, ValidationError):
|
||||
pass
|
||||
return UserConfig()
|
||||
@@ -84,14 +83,32 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
|
||||
# The Profile class is included in RuntimeConfig, so any attribute
|
||||
# additions must also be set where the RuntimeConfig class is created
|
||||
@dataclass
|
||||
# `init=False` is a workaround for https://bugs.python.org/issue45081
|
||||
@dataclass(init=False)
|
||||
class Profile(HasCredentials):
|
||||
profile_name: str
|
||||
target_name: str
|
||||
config: UserConfig
|
||||
user_config: UserConfig
|
||||
threads: int
|
||||
credentials: Credentials
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
profile_name: str,
|
||||
target_name: str,
|
||||
user_config: UserConfig,
|
||||
threads: int,
|
||||
credentials: Credentials
|
||||
):
|
||||
"""Explicitly defining `__init__` to work around bug in Python 3.9.7
|
||||
https://bugs.python.org/issue45081
|
||||
"""
|
||||
self.profile_name = profile_name
|
||||
self.target_name = target_name
|
||||
self.user_config = user_config
|
||||
self.threads = threads
|
||||
self.credentials = credentials
|
||||
|
||||
def to_profile_info(
|
||||
self, serialize_credentials: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
@@ -106,12 +123,12 @@ class Profile(HasCredentials):
|
||||
result = {
|
||||
'profile_name': self.profile_name,
|
||||
'target_name': self.target_name,
|
||||
'config': self.config,
|
||||
'user_config': self.user_config,
|
||||
'threads': self.threads,
|
||||
'credentials': self.credentials,
|
||||
}
|
||||
if serialize_credentials:
|
||||
result['config'] = self.config.to_dict(omit_none=True)
|
||||
result['user_config'] = self.user_config.to_dict(omit_none=True)
|
||||
result['credentials'] = self.credentials.to_dict(omit_none=True)
|
||||
return result
|
||||
|
||||
@@ -125,7 +142,7 @@ class Profile(HasCredentials):
|
||||
'name': self.target_name,
|
||||
'target_name': self.target_name,
|
||||
'profile_name': self.profile_name,
|
||||
'config': self.config.to_dict(omit_none=True),
|
||||
'config': self.user_config.to_dict(omit_none=True),
|
||||
})
|
||||
return target
|
||||
|
||||
@@ -220,7 +237,7 @@ class Profile(HasCredentials):
|
||||
threads: int,
|
||||
profile_name: str,
|
||||
target_name: str,
|
||||
user_cfg: Optional[Dict[str, Any]] = None
|
||||
user_config: Optional[Dict[str, Any]] = None
|
||||
) -> 'Profile':
|
||||
"""Create a profile from an existing set of Credentials and the
|
||||
remaining information.
|
||||
@@ -229,20 +246,20 @@ class Profile(HasCredentials):
|
||||
:param threads: The number of threads to use for connections.
|
||||
:param profile_name: The profile name used for this profile.
|
||||
:param target_name: The target name used for this profile.
|
||||
:param user_cfg: The user-level config block from the
|
||||
:param user_config: The user-level config block from the
|
||||
raw profiles, if specified.
|
||||
:raises DbtProfileError: If the profile is invalid.
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
if user_cfg is None:
|
||||
user_cfg = {}
|
||||
UserConfig.validate(user_cfg)
|
||||
config = UserConfig.from_dict(user_cfg)
|
||||
if user_config is None:
|
||||
user_config = {}
|
||||
UserConfig.validate(user_config)
|
||||
user_config_obj: UserConfig = UserConfig.from_dict(user_config)
|
||||
|
||||
profile = cls(
|
||||
profile_name=profile_name,
|
||||
target_name=target_name,
|
||||
config=config,
|
||||
user_config=user_config_obj,
|
||||
threads=threads,
|
||||
credentials=credentials
|
||||
)
|
||||
@@ -295,7 +312,7 @@ class Profile(HasCredentials):
|
||||
raw_profile: Dict[str, Any],
|
||||
profile_name: str,
|
||||
renderer: ProfileRenderer,
|
||||
user_cfg: Optional[Dict[str, Any]] = None,
|
||||
user_config: Optional[Dict[str, Any]] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> 'Profile':
|
||||
@@ -307,7 +324,7 @@ class Profile(HasCredentials):
|
||||
disk as yaml and its values rendered with jinja.
|
||||
:param profile_name: The profile name used.
|
||||
:param renderer: The config renderer.
|
||||
:param user_cfg: The global config for the user, if it
|
||||
:param user_config: The global config for the user, if it
|
||||
was present.
|
||||
:param target_override: The target to use, if provided on
|
||||
the command line.
|
||||
@@ -317,9 +334,9 @@ class Profile(HasCredentials):
|
||||
target could not be found
|
||||
:returns: The new Profile object.
|
||||
"""
|
||||
# user_cfg is not rendered.
|
||||
if user_cfg is None:
|
||||
user_cfg = raw_profile.get('config')
|
||||
# user_config is not rendered.
|
||||
if user_config is None:
|
||||
user_config = raw_profile.get('config')
|
||||
# TODO: should it be, and the values coerced to bool?
|
||||
target_name, profile_data = cls.render_profile(
|
||||
raw_profile, profile_name, target_override, renderer
|
||||
@@ -340,7 +357,7 @@ class Profile(HasCredentials):
|
||||
profile_name=profile_name,
|
||||
target_name=target_name,
|
||||
threads=threads,
|
||||
user_cfg=user_cfg
|
||||
user_config=user_config
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -383,13 +400,13 @@ class Profile(HasCredentials):
|
||||
error_string=msg
|
||||
)
|
||||
)
|
||||
user_cfg = raw_profiles.get('config')
|
||||
user_config = raw_profiles.get('config')
|
||||
|
||||
return cls.from_raw_profile_info(
|
||||
raw_profile=raw_profile,
|
||||
profile_name=profile_name,
|
||||
renderer=renderer,
|
||||
user_cfg=user_cfg,
|
||||
user_config=user_config,
|
||||
target_override=target_override,
|
||||
threads_override=threads_override,
|
||||
)
|
||||
@@ -417,7 +434,7 @@ class Profile(HasCredentials):
|
||||
"""
|
||||
threads_override = getattr(args, 'threads', None)
|
||||
target_override = getattr(args, 'target', None)
|
||||
raw_profiles = read_profile(args.profiles_dir)
|
||||
raw_profiles = read_profile(flags.PROFILES_DIR)
|
||||
profile_name = cls.pick_profile_name(getattr(args, 'profile', None),
|
||||
project_profile_name)
|
||||
return cls.from_raw_profiles(
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing_extensions import Protocol, runtime_checkable
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.clients.system import resolve_path_from_base
|
||||
from dbt.clients.system import path_exists
|
||||
from dbt.clients.system import load_file_contents
|
||||
@@ -123,13 +124,13 @@ def _parse_versions(versions: Union[List[str], str]) -> List[VersionSpecifier]:
|
||||
|
||||
|
||||
def _all_source_paths(
|
||||
source_paths: List[str],
|
||||
data_paths: List[str],
|
||||
model_paths: List[str],
|
||||
seed_paths: List[str],
|
||||
snapshot_paths: List[str],
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
return list(chain(source_paths, data_paths, snapshot_paths, analysis_paths,
|
||||
return list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths,
|
||||
macro_paths))
|
||||
|
||||
|
||||
@@ -292,6 +293,21 @@ class PartialProject(RenderComponents):
|
||||
exc.path = os.path.join(self.project_root, 'dbt_project.yml')
|
||||
raise
|
||||
|
||||
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
||||
if deprecated_path in project_dict:
|
||||
if exp_path in project_dict:
|
||||
msg = (
|
||||
'{deprecated_path} and {exp_path} cannot both be defined. The '
|
||||
'`{deprecated_path}` config has been deprecated in favor of `{exp_path}`. '
|
||||
'Please update your `dbt_project.yml` configuration to reflect this '
|
||||
'change.'
|
||||
)
|
||||
raise DbtProjectError(msg.format(deprecated_path=deprecated_path,
|
||||
exp_path=exp_path))
|
||||
deprecations.warn('project_config_path',
|
||||
deprecated_path=deprecated_path,
|
||||
exp_path=exp_path)
|
||||
|
||||
def create_project(self, rendered: RenderComponents) -> 'Project':
|
||||
unrendered = RenderComponents(
|
||||
project_dict=self.project_dict,
|
||||
@@ -303,6 +319,9 @@ class PartialProject(RenderComponents):
|
||||
verify_version=self.verify_version,
|
||||
)
|
||||
|
||||
self.check_config_path(rendered.project_dict, 'source-paths', 'model-paths')
|
||||
self.check_config_path(rendered.project_dict, 'data-paths', 'seed-paths')
|
||||
|
||||
try:
|
||||
ProjectContract.validate(rendered.project_dict)
|
||||
cfg = ProjectContract.from_dict(
|
||||
@@ -324,15 +343,24 @@ class PartialProject(RenderComponents):
|
||||
# to have been a cli argument.
|
||||
profile_name = cfg.profile
|
||||
# these are all the defaults
|
||||
source_paths: List[str] = value_or(cfg.source_paths, ['models'])
|
||||
|
||||
# `source_paths` is deprecated but still allowed. Copy it into
|
||||
# `model_paths` to simlify logic throughout the rest of the system.
|
||||
model_paths: List[str] = value_or(cfg.model_paths
|
||||
if 'model-paths' in rendered.project_dict
|
||||
else cfg.source_paths, ['models'])
|
||||
macro_paths: List[str] = value_or(cfg.macro_paths, ['macros'])
|
||||
data_paths: List[str] = value_or(cfg.data_paths, ['data'])
|
||||
test_paths: List[str] = value_or(cfg.test_paths, ['test'])
|
||||
analysis_paths: List[str] = value_or(cfg.analysis_paths, [])
|
||||
# `data_paths` is deprecated but still allowed. Copy it into
|
||||
# `seed_paths` to simlify logic throughout the rest of the system.
|
||||
seed_paths: List[str] = value_or(cfg.seed_paths
|
||||
if 'seed-paths' in rendered.project_dict
|
||||
else cfg.data_paths, ['seeds'])
|
||||
test_paths: List[str] = value_or(cfg.test_paths, ['tests'])
|
||||
analysis_paths: List[str] = value_or(cfg.analysis_paths, ['analyses'])
|
||||
snapshot_paths: List[str] = value_or(cfg.snapshot_paths, ['snapshots'])
|
||||
|
||||
all_source_paths: List[str] = _all_source_paths(
|
||||
source_paths, data_paths, snapshot_paths, analysis_paths,
|
||||
model_paths, seed_paths, snapshot_paths, analysis_paths,
|
||||
macro_paths
|
||||
)
|
||||
|
||||
@@ -341,7 +369,7 @@ class PartialProject(RenderComponents):
|
||||
target_path: str = value_or(cfg.target_path, 'target')
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = value_or(cfg.log_path, 'logs')
|
||||
modules_path: str = value_or(cfg.modules_path, 'dbt_modules')
|
||||
packages_install_path: str = value_or(cfg.packages_install_path, 'dbt_packages')
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
# break many things
|
||||
@@ -382,15 +410,14 @@ class PartialProject(RenderComponents):
|
||||
# of dicts.
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict['selectors'])
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
project_root=project_root,
|
||||
profile_name=profile_name,
|
||||
source_paths=source_paths,
|
||||
model_paths=model_paths,
|
||||
macro_paths=macro_paths,
|
||||
data_paths=data_paths,
|
||||
seed_paths=seed_paths,
|
||||
test_paths=test_paths,
|
||||
analysis_paths=analysis_paths,
|
||||
docs_paths=docs_paths,
|
||||
@@ -399,7 +426,7 @@ class PartialProject(RenderComponents):
|
||||
snapshot_paths=snapshot_paths,
|
||||
clean_targets=clean_targets,
|
||||
log_path=log_path,
|
||||
modules_path=modules_path,
|
||||
packages_install_path=packages_install_path,
|
||||
quoting=quoting,
|
||||
models=models,
|
||||
on_run_start=on_run_start,
|
||||
@@ -500,9 +527,9 @@ class Project:
|
||||
version: Union[SemverString, float]
|
||||
project_root: str
|
||||
profile_name: Optional[str]
|
||||
source_paths: List[str]
|
||||
model_paths: List[str]
|
||||
macro_paths: List[str]
|
||||
data_paths: List[str]
|
||||
seed_paths: List[str]
|
||||
test_paths: List[str]
|
||||
analysis_paths: List[str]
|
||||
docs_paths: List[str]
|
||||
@@ -511,7 +538,7 @@ class Project:
|
||||
snapshot_paths: List[str]
|
||||
clean_targets: List[str]
|
||||
log_path: str
|
||||
modules_path: str
|
||||
packages_install_path: str
|
||||
quoting: Dict[str, Any]
|
||||
models: Dict[str, Any]
|
||||
on_run_start: List[str]
|
||||
@@ -533,7 +560,7 @@ class Project:
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
return _all_source_paths(
|
||||
self.source_paths, self.data_paths, self.snapshot_paths,
|
||||
self.model_paths, self.seed_paths, self.snapshot_paths,
|
||||
self.analysis_paths, self.macro_paths
|
||||
)
|
||||
|
||||
@@ -561,9 +588,9 @@ class Project:
|
||||
'version': self.version,
|
||||
'project-root': self.project_root,
|
||||
'profile': self.profile_name,
|
||||
'source-paths': self.source_paths,
|
||||
'model-paths': self.model_paths,
|
||||
'macro-paths': self.macro_paths,
|
||||
'data-paths': self.data_paths,
|
||||
'seed-paths': self.seed_paths,
|
||||
'test-paths': self.test_paths,
|
||||
'analysis-paths': self.analysis_paths,
|
||||
'docs-paths': self.docs_paths,
|
||||
@@ -645,13 +672,24 @@ class Project:
|
||||
def hashed_name(self):
|
||||
return hashlib.md5(self.project_name.encode('utf-8')).hexdigest()
|
||||
|
||||
def get_selector(self, name: str) -> SelectionSpec:
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
raise RuntimeException(
|
||||
f'Could not find selector named {name}, expected one of '
|
||||
f'{list(self.selectors)}'
|
||||
)
|
||||
return self.selectors[name]
|
||||
return self.selectors[name]["definition"]
|
||||
|
||||
def get_default_selector_name(self) -> Union[str, None]:
|
||||
"""This function fetch the default selector to use on `dbt run` (if any)
|
||||
:return: either a selector if default is set or None
|
||||
:rtype: Union[SelectionSpec, None]
|
||||
"""
|
||||
for selector_name, selector in self.selectors.items():
|
||||
if selector["default"] is True:
|
||||
return selector_name
|
||||
|
||||
return None
|
||||
|
||||
def get_macro_search_order(self, macro_namespace: str):
|
||||
for dispatch_entry in self.dispatch:
|
||||
|
||||
@@ -147,7 +147,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
|
||||
if first in {'seeds', 'models', 'snapshots', 'tests'}:
|
||||
keypath_parts = {
|
||||
(k.lstrip('+') if isinstance(k, str) else k)
|
||||
(k.lstrip('+ ') if isinstance(k, str) else k)
|
||||
for k in keypath
|
||||
}
|
||||
# model-level hooks
|
||||
|
||||
@@ -12,6 +12,7 @@ from .profile import Profile
|
||||
from .project import Project
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
from dbt.adapters.factory import get_relation_class_by_name, get_include_paths
|
||||
from dbt.helper_types import FQNPath, PathSet
|
||||
@@ -86,9 +87,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
source_paths=project.source_paths,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
data_paths=project.data_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
@@ -97,7 +98,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
modules_path=project.modules_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=quoting,
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
@@ -117,7 +118,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
unrendered=project.unrendered,
|
||||
profile_name=profile.profile_name,
|
||||
target_name=profile.target_name,
|
||||
config=profile.config,
|
||||
user_config=profile.user_config,
|
||||
threads=profile.threads,
|
||||
credentials=profile.credentials,
|
||||
args=args,
|
||||
@@ -144,7 +145,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
project = Project.from_project_root(
|
||||
project_root,
|
||||
renderer,
|
||||
verify_version=getattr(self.args, 'version_check', False),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
|
||||
cfg = self.from_parts(
|
||||
@@ -197,7 +198,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
) -> Tuple[Project, Profile]:
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = getattr(args, 'version_check', False)
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(
|
||||
project_root,
|
||||
verify_version=version_check
|
||||
@@ -337,7 +338,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
f'dbt found {count_packages_specified} package(s) '
|
||||
f'specified in packages.yml, but only '
|
||||
f'{count_packages_installed} package(s) installed '
|
||||
f'in {self.modules_path}. Run "dbt deps" to '
|
||||
f'in {self.packages_install_path}. Run "dbt deps" to '
|
||||
f'install package dependencies.'
|
||||
)
|
||||
project_paths = itertools.chain(
|
||||
@@ -375,7 +376,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
yield project.project_name, project
|
||||
|
||||
def _get_project_directories(self) -> Iterator[Path]:
|
||||
root = Path(self.project_root) / self.modules_path
|
||||
root = Path(self.project_root) / self.packages_install_path
|
||||
|
||||
if root.exists():
|
||||
for path in root.iterdir():
|
||||
@@ -391,6 +392,10 @@ class UnsetCredentials(Credentials):
|
||||
def type(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def unique_field(self):
|
||||
return None
|
||||
|
||||
def connection_info(self, *args, **kwargs):
|
||||
return {}
|
||||
|
||||
@@ -412,7 +417,7 @@ class UnsetConfig(UserConfig):
|
||||
class UnsetProfile(Profile):
|
||||
def __init__(self):
|
||||
self.credentials = UnsetCredentials()
|
||||
self.config = UnsetConfig()
|
||||
self.user_config = UnsetConfig()
|
||||
self.profile_name = ''
|
||||
self.target_name = ''
|
||||
self.threads = -1
|
||||
@@ -478,9 +483,9 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
source_paths=project.source_paths,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
data_paths=project.data_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
@@ -489,7 +494,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
modules_path=project.modules_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=project.quoting, # we never use this anyway.
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
@@ -509,7 +514,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
unrendered=project.unrendered,
|
||||
profile_name='',
|
||||
target_name='',
|
||||
config=UnsetConfig(),
|
||||
user_config=UnsetConfig(),
|
||||
threads=getattr(args, 'threads', 1),
|
||||
credentials=UnsetCredentials(),
|
||||
args=args,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Union
|
||||
from dbt.clients.yaml_helper import ( # noqa: F401
|
||||
yaml, Loader, Dumper, load_yaml_text
|
||||
)
|
||||
@@ -29,13 +29,14 @@ Validator Error:
|
||||
"""
|
||||
|
||||
|
||||
class SelectorConfig(Dict[str, SelectionSpec]):
|
||||
class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
|
||||
@classmethod
|
||||
def selectors_from_dict(cls, data: Dict[str, Any]) -> 'SelectorConfig':
|
||||
try:
|
||||
SelectorFile.validate(data)
|
||||
selector_file = SelectorFile.from_dict(data)
|
||||
validate_selector_default(selector_file)
|
||||
selectors = parse_from_selectors_definition(selector_file)
|
||||
except ValidationError as exc:
|
||||
yaml_sel_cfg = yaml.dump(exc.instance)
|
||||
@@ -118,6 +119,24 @@ def selector_config_from_data(
|
||||
return selectors
|
||||
|
||||
|
||||
def validate_selector_default(selector_file: SelectorFile) -> None:
|
||||
"""Check if a selector.yml file has more than 1 default key set to true"""
|
||||
default_set: bool = False
|
||||
default_selector_name: Union[str, None] = None
|
||||
|
||||
for selector in selector_file.selectors:
|
||||
if selector.default is True and default_set is False:
|
||||
default_set = True
|
||||
default_selector_name = selector.name
|
||||
continue
|
||||
if selector.default is True and default_set is True:
|
||||
raise DbtSelectorsError(
|
||||
"Error when parsing the selector file. "
|
||||
"Found multiple selectors with `default: true`:"
|
||||
f"{default_selector_name} and {selector.name}"
|
||||
)
|
||||
|
||||
|
||||
# These are utilities to clean up the dictionary created from
|
||||
# selectors.yml by turning the cli-string format entries into
|
||||
# normalized dictionary entries. It parallels the flow in
|
||||
|
||||
@@ -12,7 +12,8 @@ from dbt.clients.yaml_helper import ( # noqa: F401
|
||||
)
|
||||
from dbt.contracts.graph.compiled import CompiledResource
|
||||
from dbt.exceptions import raise_compiler_error, MacroReturn
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import MacroEventInfo, MacroEventDebug
|
||||
from dbt.version import __version__ as dbt_version
|
||||
|
||||
# These modules are added to the context. Consider alternative
|
||||
@@ -443,9 +444,9 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endmacro %}"
|
||||
"""
|
||||
if info:
|
||||
logger.info(msg)
|
||||
fire_event(MacroEventInfo(msg))
|
||||
else:
|
||||
logger.debug(msg)
|
||||
fire_event(MacroEventDebug(msg))
|
||||
return ''
|
||||
|
||||
@contextproperty
|
||||
@@ -524,14 +525,8 @@ class BaseContext(metaclass=ContextMeta):
|
||||
-- no-op
|
||||
{% endif %}
|
||||
|
||||
The list of valid flags are:
|
||||
|
||||
- `flags.STRICT_MODE`: True if `--strict` (or `-S`) was provided on the
|
||||
command line
|
||||
- `flags.FULL_REFRESH`: True if `--full-refresh` was provided on the
|
||||
command line
|
||||
- `flags.NON_DESTRUCTIVE`: True if `--non-destructive` was provided on
|
||||
the command line
|
||||
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
||||
TODO: Replace with object that provides read-only access to flag values
|
||||
"""
|
||||
return flags
|
||||
|
||||
|
||||
@@ -97,7 +97,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
result = {}
|
||||
for key, value in level_config.items():
|
||||
if key.startswith('+'):
|
||||
result[key[1:]] = deepcopy(value)
|
||||
result[key[1:].strip()] = deepcopy(value)
|
||||
elif not isinstance(value, dict):
|
||||
result[key] = deepcopy(value)
|
||||
|
||||
@@ -120,11 +120,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
|
||||
def calculate_node_config(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Dict[str, Any] = None
|
||||
) -> BaseConfig:
|
||||
own_config = self.get_node_project(project_name)
|
||||
|
||||
@@ -134,8 +135,15 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
for fqn_config in project_configs:
|
||||
result = self._update_from_config(result, fqn_config)
|
||||
|
||||
for config_call in config_calls:
|
||||
result = self._update_from_config(result, config_call)
|
||||
# When schema files patch config, it has lower precedence than
|
||||
# config in the models (config_call_dict), so we add the patch_config_dict
|
||||
# before the config_call_dict
|
||||
if patch_config_dict:
|
||||
result = self._update_from_config(result, patch_config_dict)
|
||||
|
||||
# config_calls are created in the 'experimental' model parser and
|
||||
# the ParseConfigObject (via add_config_call)
|
||||
result = self._update_from_config(result, config_call_dict)
|
||||
|
||||
if own_config.project_name != self._active_project.project_name:
|
||||
for fqn_config in self._active_project_configs(fqn, resource_type):
|
||||
@@ -147,11 +155,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
@abstractmethod
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
...
|
||||
|
||||
@@ -186,18 +195,20 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
config = self.calculate_node_config(
|
||||
config_calls=config_calls,
|
||||
config_call_dict=config_call_dict,
|
||||
fqn=fqn,
|
||||
resource_type=resource_type,
|
||||
project_name=project_name,
|
||||
base=base,
|
||||
patch_config_dict=patch_config_dict
|
||||
)
|
||||
finalized = config.finalize_and_validate()
|
||||
return finalized.to_dict(omit_none=True)
|
||||
@@ -209,18 +220,20 @@ class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
return self.calculate_node_config(
|
||||
config_calls=config_calls,
|
||||
config_call_dict=config_call_dict,
|
||||
fqn=fqn,
|
||||
resource_type=resource_type,
|
||||
project_name=project_name,
|
||||
base=base,
|
||||
patch_config_dict=patch_config_dict
|
||||
)
|
||||
|
||||
def initial_result(
|
||||
@@ -251,20 +264,39 @@ class ContextConfig:
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
) -> None:
|
||||
self._config_calls: List[Dict[str, Any]] = []
|
||||
self._config_call_dict: Dict[str, Any] = {}
|
||||
self._active_project = active_project
|
||||
self._fqn = fqn
|
||||
self._resource_type = resource_type
|
||||
self._project_name = project_name
|
||||
|
||||
def update_in_model_config(self, opts: Dict[str, Any]) -> None:
|
||||
self._config_calls.append(opts)
|
||||
def add_config_call(self, opts: Dict[str, Any]) -> None:
|
||||
dct = self._config_call_dict
|
||||
self._add_config_call(dct, opts)
|
||||
|
||||
@classmethod
|
||||
def _add_config_call(cls, config_call_dict, opts: Dict[str, Any]) -> None:
|
||||
for k, v in opts.items():
|
||||
# MergeBehavior for post-hook and pre-hook is to collect all
|
||||
# values, instead of overwriting
|
||||
if k in BaseConfig.mergebehavior['append']:
|
||||
if not isinstance(v, list):
|
||||
v = [v]
|
||||
if k in BaseConfig.mergebehavior['update'] and not isinstance(v, dict):
|
||||
raise InternalException(f'expected dict, got {v}')
|
||||
if k in config_call_dict and isinstance(config_call_dict[k], list):
|
||||
config_call_dict[k].extend(v)
|
||||
elif k in config_call_dict and isinstance(config_call_dict[k], dict):
|
||||
config_call_dict[k].update(v)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
|
||||
def build_config_dict(
|
||||
self,
|
||||
base: bool = False,
|
||||
*,
|
||||
rendered: bool = True,
|
||||
patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
if rendered:
|
||||
src = ContextConfigGenerator(self._active_project)
|
||||
@@ -272,9 +304,10 @@ class ContextConfig:
|
||||
src = UnrenderedConfigGenerator(self._active_project)
|
||||
|
||||
return src.calculate_node_config_dict(
|
||||
config_calls=self._config_calls,
|
||||
config_call_dict=self._config_call_dict,
|
||||
fqn=self._fqn,
|
||||
resource_type=self._resource_type,
|
||||
project_name=self._project_name,
|
||||
base=base,
|
||||
patch_config_dict=patch_config_dict
|
||||
)
|
||||
|
||||
@@ -169,6 +169,8 @@ class TestMacroNamespace:
|
||||
|
||||
def recursively_get_depends_on_macros(self, depends_on_macros, dep_macros):
|
||||
for macro_unique_id in depends_on_macros:
|
||||
if macro_unique_id in dep_macros:
|
||||
continue
|
||||
dep_macros.append(macro_unique_id)
|
||||
if macro_unique_id in self.macro_resolver.macros:
|
||||
macro = self.macro_resolver.macros[macro_unique_id]
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.base.column import Column
|
||||
from dbt.adapters.factory import (
|
||||
get_adapter, get_adapter_package_names, get_adapter_type_names
|
||||
@@ -40,6 +39,7 @@ from dbt.exceptions import (
|
||||
InternalException,
|
||||
ValidationException,
|
||||
RuntimeException,
|
||||
macro_invalid_dispatch_arg,
|
||||
missing_config,
|
||||
raise_compiler_error,
|
||||
ref_invalid_args,
|
||||
@@ -49,7 +49,6 @@ from dbt.exceptions import (
|
||||
wrapped_exports,
|
||||
)
|
||||
from dbt.config import IsFQNResource
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
from dbt.utils import (
|
||||
@@ -120,39 +119,37 @@ class BaseDatabaseWrapper:
|
||||
self,
|
||||
macro_name: str,
|
||||
macro_namespace: Optional[str] = None,
|
||||
packages: Optional[List[str]] = None,
|
||||
packages: Optional[List[str]] = None, # eventually remove since it's fully deprecated
|
||||
) -> MacroGenerator:
|
||||
search_packages: List[Optional[str]]
|
||||
|
||||
if '.' in macro_name:
|
||||
suggest_package, suggest_macro_name = macro_name.split('.', 1)
|
||||
suggest_macro_namespace, suggest_macro_name = macro_name.split('.', 1)
|
||||
msg = (
|
||||
f'In adapter.dispatch, got a macro name of "{macro_name}", '
|
||||
f'but "." is not a valid macro name component. Did you mean '
|
||||
f'`adapter.dispatch("{suggest_macro_name}", '
|
||||
f'packages=["{suggest_package}"])`?'
|
||||
f'macro_namespace="{suggest_macro_namespace}")`?'
|
||||
)
|
||||
raise CompilationException(msg)
|
||||
|
||||
if packages is not None:
|
||||
deprecations.warn('dispatch-packages', macro_name=macro_name)
|
||||
raise macro_invalid_dispatch_arg(macro_name)
|
||||
|
||||
namespace = packages if packages else macro_namespace
|
||||
namespace = macro_namespace
|
||||
|
||||
if namespace is None:
|
||||
search_packages = [None]
|
||||
elif isinstance(namespace, str):
|
||||
search_packages = self._adapter.config.get_macro_search_order(namespace)
|
||||
if not search_packages and namespace in self._adapter.config.dependencies:
|
||||
search_packages = [namespace]
|
||||
if not search_packages:
|
||||
raise CompilationException(
|
||||
f'In adapter.dispatch, got a string packages argument '
|
||||
f'("{packages}"), but packages should be None or a list.'
|
||||
)
|
||||
search_packages = [self.config.project_name, namespace]
|
||||
else:
|
||||
# Not a string and not None so must be a list
|
||||
search_packages = namespace
|
||||
raise CompilationException(
|
||||
f'In adapter.dispatch, got a list macro_namespace argument '
|
||||
f'("{macro_namespace}"), but macro_namespace should be None or a string.'
|
||||
)
|
||||
|
||||
attempts = []
|
||||
|
||||
@@ -164,10 +161,10 @@ class BaseDatabaseWrapper:
|
||||
macro = self._namespace.get_from_package(
|
||||
package_name, search_name
|
||||
)
|
||||
except CompilationException as exc:
|
||||
raise CompilationException(
|
||||
f'In dispatch: {exc.msg}',
|
||||
) from exc
|
||||
except CompilationException:
|
||||
# Only raise CompilationException if macro is not found in
|
||||
# any package
|
||||
macro = None
|
||||
|
||||
if package_name is None:
|
||||
attempts.append(search_name)
|
||||
@@ -279,7 +276,7 @@ class Config(Protocol):
|
||||
...
|
||||
|
||||
|
||||
# `config` implementations
|
||||
# Implementation of "config(..)" calls in models
|
||||
class ParseConfigObject(Config):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
self.model = model
|
||||
@@ -316,7 +313,7 @@ class ParseConfigObject(Config):
|
||||
raise RuntimeException(
|
||||
'At parse time, did not receive a context config'
|
||||
)
|
||||
self.context_config.update_in_model_config(opts)
|
||||
self.context_config.add_config_call(opts)
|
||||
return ''
|
||||
|
||||
def set(self, name, value):
|
||||
@@ -1151,65 +1148,17 @@ class ProviderContext(ManifestContext):
|
||||
|
||||
@contextmember
|
||||
def adapter_macro(self, name: str, *args, **kwargs):
|
||||
"""Find the most appropriate macro for the name, considering the
|
||||
adapter type currently in use, and call that with the given arguments.
|
||||
|
||||
If the name has a `.` in it, the first section before the `.` is
|
||||
interpreted as a package name, and the remainder as a macro name.
|
||||
|
||||
If no adapter is found, raise a compiler exception. If an invalid
|
||||
package name is specified, raise a compiler exception.
|
||||
|
||||
|
||||
Some examples:
|
||||
|
||||
{# dbt will call this macro by name, providing any arguments #}
|
||||
{% macro create_table_as(temporary, relation, sql) -%}
|
||||
|
||||
{# dbt will dispatch the macro call to the relevant macro #}
|
||||
{{ adapter_macro('create_table_as', temporary, relation, sql) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
If no macro matches the specified adapter, "default" will be
|
||||
used
|
||||
#}
|
||||
{% macro default__create_table_as(temporary, relation, sql) -%}
|
||||
...
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
|
||||
{# Example which defines special logic for Redshift #}
|
||||
{% macro redshift__create_table_as(temporary, relation, sql) -%}
|
||||
...
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
|
||||
{# Example which defines special logic for BigQuery #}
|
||||
{% macro bigquery__create_table_as(temporary, relation, sql) -%}
|
||||
...
|
||||
{%- endmacro %}
|
||||
"""This was deprecated in v0.18 in favor of adapter.dispatch
|
||||
"""
|
||||
deprecations.warn('adapter-macro', macro_name=name)
|
||||
original_name = name
|
||||
package_name = None
|
||||
if '.' in name:
|
||||
package_name, name = name.split('.', 1)
|
||||
|
||||
try:
|
||||
macro = self.db_wrapper.dispatch(
|
||||
macro_name=name, macro_namespace=package_name
|
||||
)
|
||||
except CompilationException as exc:
|
||||
raise CompilationException(
|
||||
f'In adapter_macro: {exc.msg}\n'
|
||||
f" Original name: '{original_name}'",
|
||||
node=self.model
|
||||
) from exc
|
||||
return macro(*args, **kwargs)
|
||||
msg = (
|
||||
'The "adapter_macro" macro has been deprecated. Instead, use '
|
||||
'the `adapter.dispatch` method to find a macro and call the '
|
||||
'result. For more information, see: '
|
||||
'https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch)'
|
||||
' adapter_macro was called for: {macro_name}'
|
||||
.format(macro_name=name)
|
||||
)
|
||||
raise CompilationException(msg)
|
||||
|
||||
|
||||
class MacroContext(ProviderContext):
|
||||
@@ -1243,7 +1192,7 @@ class ModelContext(ProviderContext):
|
||||
|
||||
@contextproperty
|
||||
def pre_hooks(self) -> List[Dict[str, Any]]:
|
||||
if isinstance(self.model, ParsedSourceDefinition):
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
return [
|
||||
h.to_dict(omit_none=True) for h in self.model.config.pre_hook
|
||||
@@ -1251,7 +1200,7 @@ class ModelContext(ProviderContext):
|
||||
|
||||
@contextproperty
|
||||
def post_hooks(self) -> List[Dict[str, Any]]:
|
||||
if isinstance(self.model, ParsedSourceDefinition):
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
return [
|
||||
h.to_dict(omit_none=True) for h in self.model.config.post_hook
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import abc
|
||||
import itertools
|
||||
import hashlib
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Any, ClassVar, Dict, Tuple, Iterable, Optional, List, Callable,
|
||||
@@ -127,6 +128,19 @@ class Credentials(
|
||||
'type not implemented for base credentials class'
|
||||
)
|
||||
|
||||
@property
|
||||
def unique_field(self) -> str:
|
||||
"""Hashed and included in anonymous telemetry to track adapter adoption.
|
||||
Return the field from Credentials that can uniquely identify
|
||||
one team/organization using this adapter
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
'unique_field not implemented for base credentials class'
|
||||
)
|
||||
|
||||
def hashed_unique_field(self) -> str:
|
||||
return hashlib.md5(self.unique_field.encode('utf-8')).hexdigest()
|
||||
|
||||
def connection_info(
|
||||
self, *, with_aliases: bool = False
|
||||
) -> Iterable[Tuple[str, Any]]:
|
||||
@@ -176,14 +190,11 @@ class UserConfigContract(Protocol):
|
||||
partial_parse: Optional[bool] = None
|
||||
printer_width: Optional[int] = None
|
||||
|
||||
def set_values(self, cookie_dir: str) -> None:
|
||||
...
|
||||
|
||||
|
||||
class HasCredentials(Protocol):
|
||||
credentials: Credentials
|
||||
profile_name: str
|
||||
config: UserConfigContract
|
||||
user_config: UserConfigContract
|
||||
target_name: str
|
||||
threads: int
|
||||
|
||||
|
||||
@@ -18,7 +18,8 @@ class ParseFileType(StrEnum):
|
||||
Model = 'model'
|
||||
Snapshot = 'snapshot'
|
||||
Analysis = 'analysis'
|
||||
Test = 'test'
|
||||
SingularTest = 'singular_test'
|
||||
GenericTest = 'generic_test'
|
||||
Seed = 'seed'
|
||||
Documentation = 'docs'
|
||||
Schema = 'schema'
|
||||
@@ -30,7 +31,8 @@ parse_file_type_to_parser = {
|
||||
ParseFileType.Model: 'ModelParser',
|
||||
ParseFileType.Snapshot: 'SnapshotParser',
|
||||
ParseFileType.Analysis: 'AnalysisParser',
|
||||
ParseFileType.Test: 'DataTestParser',
|
||||
ParseFileType.SingularTest: 'SingularTestParser',
|
||||
ParseFileType.GenericTest: 'GenericTestParser',
|
||||
ParseFileType.Seed: 'SeedParser',
|
||||
ParseFileType.Documentation: 'DocumentationParser',
|
||||
ParseFileType.Schema: 'SchemaParser',
|
||||
@@ -42,6 +44,7 @@ parse_file_type_to_parser = {
|
||||
class FilePath(dbtClassMixin):
|
||||
searched_path: str
|
||||
relative_path: str
|
||||
modification_time: float
|
||||
project_root: str
|
||||
|
||||
@property
|
||||
@@ -132,6 +135,10 @@ class RemoteFile(dbtClassMixin):
|
||||
def original_file_path(self):
|
||||
return 'from remote system'
|
||||
|
||||
@property
|
||||
def modification_time(self):
|
||||
return 'from remote system'
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseSourceFile(dbtClassMixin, SerializableType):
|
||||
@@ -150,26 +157,15 @@ class BaseSourceFile(dbtClassMixin, SerializableType):
|
||||
def file_id(self):
|
||||
if isinstance(self.path, RemoteFile):
|
||||
return None
|
||||
if self.checksum.name == 'none':
|
||||
return None
|
||||
return f'{self.project_name}://{self.path.original_file_path}'
|
||||
|
||||
def _serialize(self):
|
||||
dct = self.to_dict()
|
||||
if 'pp_files' in dct:
|
||||
del dct['pp_files']
|
||||
if 'pp_test_index' in dct:
|
||||
del dct['pp_test_index']
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
def _deserialize(cls, dct: Dict[str, int]):
|
||||
if dct['parse_file_type'] == 'schema':
|
||||
# TODO: why are these keys even here
|
||||
if 'pp_files' in dct:
|
||||
del dct['pp_files']
|
||||
if 'pp_test_index' in dct:
|
||||
del dct['pp_test_index']
|
||||
sf = SchemaSourceFile.from_dict(dct)
|
||||
else:
|
||||
sf = SourceFile.from_dict(dct)
|
||||
@@ -223,13 +219,13 @@ class SourceFile(BaseSourceFile):
|
||||
class SchemaSourceFile(BaseSourceFile):
|
||||
dfy: Dict[str, Any] = field(default_factory=dict)
|
||||
# these are in the manifest.nodes dictionary
|
||||
tests: List[str] = field(default_factory=list)
|
||||
tests: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
mcp: List[str] = field(default_factory=list)
|
||||
mcp: Dict[str, str] = field(default_factory=dict)
|
||||
# any source patches in this file. The entries are package, name pairs
|
||||
# Patches are only against external sources. Sources can be
|
||||
# created too, but those are in 'sources'
|
||||
@@ -255,14 +251,53 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
dct = super().__post_serialize__(dct)
|
||||
if 'pp_files' in dct:
|
||||
del dct['pp_files']
|
||||
if 'pp_test_index' in dct:
|
||||
del dct['pp_test_index']
|
||||
# Remove partial parsing specific data
|
||||
for key in ('pp_files', 'pp_test_index', 'pp_dict'):
|
||||
if key in dct:
|
||||
del dct[key]
|
||||
return dct
|
||||
|
||||
def append_patch(self, yaml_key, unique_id):
|
||||
self.node_patches.append(unique_id)
|
||||
|
||||
def add_test(self, node_unique_id, test_from):
|
||||
name = test_from['name']
|
||||
key = test_from['key']
|
||||
if key not in self.tests:
|
||||
self.tests[key] = {}
|
||||
if name not in self.tests[key]:
|
||||
self.tests[key][name] = []
|
||||
self.tests[key][name].append(node_unique_id)
|
||||
|
||||
def remove_tests(self, yaml_key, name):
|
||||
if yaml_key in self.tests:
|
||||
if name in self.tests[yaml_key]:
|
||||
del self.tests[yaml_key][name]
|
||||
|
||||
def get_tests(self, yaml_key, name):
|
||||
if yaml_key in self.tests:
|
||||
if name in self.tests[yaml_key]:
|
||||
return self.tests[yaml_key][name]
|
||||
return []
|
||||
|
||||
def get_key_and_name_for_test(self, test_unique_id):
|
||||
yaml_key = None
|
||||
block_name = None
|
||||
for key in self.tests.keys():
|
||||
for name in self.tests[key]:
|
||||
for unique_id in self.tests[key][name]:
|
||||
if unique_id == test_unique_id:
|
||||
yaml_key = key
|
||||
block_name = name
|
||||
break
|
||||
return (yaml_key, block_name)
|
||||
|
||||
def get_all_test_ids(self):
|
||||
test_ids = []
|
||||
for key in self.tests.keys():
|
||||
for name in self.tests[key]:
|
||||
test_ids.extend(self.tests[key][name])
|
||||
return test_ids
|
||||
|
||||
|
||||
AnySourceFile = Union[SchemaSourceFile, SourceFile]
|
||||
|
||||
@@ -2,13 +2,13 @@ from dbt.contracts.graph.parsed import (
|
||||
HasTestMetadata,
|
||||
ParsedNode,
|
||||
ParsedAnalysisNode,
|
||||
ParsedDataTestNode,
|
||||
ParsedSingularTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedExposure,
|
||||
ParsedResource,
|
||||
ParsedRPCNode,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
ParsedSourceDefinition,
|
||||
@@ -107,17 +107,21 @@ class CompiledSnapshotNode(CompiledNode):
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledDataTestNode(CompiledNode):
|
||||
class CompiledSingularTestNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type:ignore
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSchemaTestNode(CompiledNode, HasTestMetadata):
|
||||
# keep this in sync with ParsedSchemaTestNode!
|
||||
class CompiledGenericTestNode(CompiledNode, HasTestMetadata):
|
||||
# keep this in sync with ParsedGenericTestNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type:ignore
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
@@ -130,7 +134,7 @@ class CompiledSchemaTestNode(CompiledNode, HasTestMetadata):
|
||||
)
|
||||
|
||||
|
||||
CompiledTestNode = Union[CompiledDataTestNode, CompiledSchemaTestNode]
|
||||
CompiledTestNode = Union[CompiledSingularTestNode, CompiledGenericTestNode]
|
||||
|
||||
|
||||
PARSED_TYPES: Dict[Type[CompiledNode], Type[ParsedResource]] = {
|
||||
@@ -140,8 +144,8 @@ PARSED_TYPES: Dict[Type[CompiledNode], Type[ParsedResource]] = {
|
||||
CompiledRPCNode: ParsedRPCNode,
|
||||
CompiledSeedNode: ParsedSeedNode,
|
||||
CompiledSnapshotNode: ParsedSnapshotNode,
|
||||
CompiledDataTestNode: ParsedDataTestNode,
|
||||
CompiledSchemaTestNode: ParsedSchemaTestNode,
|
||||
CompiledSingularTestNode: ParsedSingularTestNode,
|
||||
CompiledGenericTestNode: ParsedGenericTestNode,
|
||||
}
|
||||
|
||||
|
||||
@@ -152,8 +156,8 @@ COMPILED_TYPES: Dict[Type[ParsedResource], Type[CompiledNode]] = {
|
||||
ParsedRPCNode: CompiledRPCNode,
|
||||
ParsedSeedNode: CompiledSeedNode,
|
||||
ParsedSnapshotNode: CompiledSnapshotNode,
|
||||
ParsedDataTestNode: CompiledDataTestNode,
|
||||
ParsedSchemaTestNode: CompiledSchemaTestNode,
|
||||
ParsedSingularTestNode: CompiledSingularTestNode,
|
||||
ParsedGenericTestNode: CompiledGenericTestNode,
|
||||
}
|
||||
|
||||
|
||||
@@ -181,22 +185,22 @@ def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
|
||||
|
||||
NonSourceCompiledNode = Union[
|
||||
CompiledAnalysisNode,
|
||||
CompiledDataTestNode,
|
||||
CompiledSingularTestNode,
|
||||
CompiledModelNode,
|
||||
CompiledHookNode,
|
||||
CompiledRPCNode,
|
||||
CompiledSchemaTestNode,
|
||||
CompiledGenericTestNode,
|
||||
CompiledSeedNode,
|
||||
CompiledSnapshotNode,
|
||||
]
|
||||
|
||||
NonSourceParsedNode = Union[
|
||||
ParsedAnalysisNode,
|
||||
ParsedDataTestNode,
|
||||
ParsedSingularTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedRPCNode,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
]
|
||||
|
||||
@@ -5,7 +5,7 @@ from mashumaro import DataClassMessagePackMixin
|
||||
from multiprocessing.synchronize import Lock
|
||||
from typing import (
|
||||
Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple,
|
||||
TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar
|
||||
TypeVar, Callable, Generic, cast, AbstractSet, ClassVar
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
from uuid import UUID
|
||||
@@ -14,7 +14,7 @@ from dbt.contracts.graph.compiled import (
|
||||
CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,
|
||||
ParsedMacro, ParsedDocumentation,
|
||||
ParsedSourceDefinition, ParsedExposure, HasUniqueID,
|
||||
UnpatchedSourceDefinition, ManifestNodes
|
||||
)
|
||||
@@ -26,9 +26,7 @@ from dbt.contracts.util import (
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
raise_duplicate_resource_name, raise_compiler_error, warn_or_error,
|
||||
raise_duplicate_patch_name,
|
||||
raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name,
|
||||
raise_duplicate_resource_name, raise_compiler_error,
|
||||
)
|
||||
from dbt.helper_types import PathSet
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
@@ -97,24 +95,23 @@ class DocLookup(dbtClassMixin):
|
||||
|
||||
class SourceLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: 'Manifest'):
|
||||
self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {}
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def get_unique_id(self, key, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, key, package)
|
||||
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, search_name, package)
|
||||
|
||||
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
|
||||
unique_id = self.get_unique_id(key, package)
|
||||
def find(self, search_name, package: Optional[PackageName], manifest: 'Manifest'):
|
||||
unique_id = self.get_unique_id(search_name, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_source(self, source: ParsedSourceDefinition):
|
||||
key = (source.source_name, source.name)
|
||||
if key not in self.storage:
|
||||
self.storage[key] = {}
|
||||
if source.search_name not in self.storage:
|
||||
self.storage[source.search_name] = {}
|
||||
|
||||
self.storage[key][source.package_name] = source.unique_id
|
||||
self.storage[source.search_name][source.package_name] = source.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for source in manifest.sources.values():
|
||||
@@ -171,8 +168,45 @@ class RefableLookup(dbtClassMixin):
|
||||
return manifest.nodes[unique_id]
|
||||
|
||||
|
||||
# This handles both models/seeds/snapshots and sources
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
|
||||
def __init__(self, manifest: 'Manifest'):
|
||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def populate(self, manifest):
|
||||
for node in list(chain.from_iterable(manifest.disabled.values())):
|
||||
self.add_node(node)
|
||||
|
||||
def add_node(self, node):
|
||||
if node.search_name not in self.storage:
|
||||
self.storage[node.search_name] = {}
|
||||
if node.package_name not in self.storage[node.search_name]:
|
||||
self.storage[node.search_name][node.package_name] = []
|
||||
self.storage[node.search_name][node.package_name].append(node)
|
||||
|
||||
# This should return a list of disabled nodes. It's different from
|
||||
# the other Lookup functions in that it returns full nodes, not just unique_ids
|
||||
def find(self, search_name, package: Optional[PackageName]):
|
||||
if search_name not in self.storage:
|
||||
return None
|
||||
|
||||
pkg_dct: Mapping[PackageName, List[Any]] = self.storage[search_name]
|
||||
|
||||
if package is None:
|
||||
if not pkg_dct:
|
||||
return None
|
||||
else:
|
||||
return next(iter(pkg_dct.values()))
|
||||
elif package in pkg_dct:
|
||||
return pkg_dct[package]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class AnalysisLookup(RefableLookup):
|
||||
_lookup_types: ClassVar[set] = set(NodeType.Analysis)
|
||||
_lookup_types: ClassVar[set] = set([NodeType.Analysis])
|
||||
|
||||
|
||||
def _search_packages(
|
||||
@@ -225,9 +259,7 @@ class ManifestMetadata(BaseArtifactMetadata):
|
||||
self.user_id = tracking.active_user.id
|
||||
|
||||
if self.send_anonymous_usage_stats is None:
|
||||
self.send_anonymous_usage_stats = (
|
||||
not tracking.active_user.do_not_track
|
||||
)
|
||||
self.send_anonymous_usage_stats = flags.SEND_ANONYMOUS_USAGE_STATS
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
@@ -243,7 +275,7 @@ def _sort_values(dct):
|
||||
return {k: sorted(v) for k, v in dct.items()}
|
||||
|
||||
|
||||
def build_edges(nodes: List[ManifestNode]):
|
||||
def build_node_edges(nodes: List[ManifestNode]):
|
||||
"""Build the forward and backward edges on the given list of ParsedNodes
|
||||
and return them as two separate dictionaries, each mapping unique IDs to
|
||||
lists of edges.
|
||||
@@ -259,6 +291,18 @@ def build_edges(nodes: List[ManifestNode]):
|
||||
return _sort_values(forward_edges), _sort_values(backward_edges)
|
||||
|
||||
|
||||
# Build a map of children of macros and generic tests
|
||||
def build_macro_edges(nodes: List[Any]):
|
||||
forward_edges: Dict[str, List[str]] = {
|
||||
n.unique_id: [] for n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros
|
||||
}
|
||||
for node in nodes:
|
||||
for unique_id in node.depends_on.macros:
|
||||
if unique_id in forward_edges.keys():
|
||||
forward_edges[unique_id].append(node.unique_id)
|
||||
return _sort_values(forward_edges)
|
||||
|
||||
|
||||
def _deepcopy(value):
|
||||
return value.from_dict(value.to_dict(omit_none=True))
|
||||
|
||||
@@ -370,38 +414,6 @@ class Searchable(Protocol):
|
||||
raise NotImplementedError('search_name not implemented')
|
||||
|
||||
|
||||
N = TypeVar('N', bound=Searchable)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NameSearcher(Generic[N]):
|
||||
name: str
|
||||
package: Optional[str]
|
||||
nodetypes: List[NodeType]
|
||||
|
||||
def _matches(self, model: N) -> bool:
|
||||
"""Return True if the model matches the given name, package, and type.
|
||||
|
||||
If package is None, any package is allowed.
|
||||
nodetypes should be a container of NodeTypes that implements the 'in'
|
||||
operator.
|
||||
"""
|
||||
if model.resource_type not in self.nodetypes:
|
||||
return False
|
||||
|
||||
if self.name != model.search_name:
|
||||
return False
|
||||
|
||||
return self.package is None or self.package == model.package_name
|
||||
|
||||
def search(self, haystack: Iterable[N]) -> Optional[N]:
|
||||
"""Find an entry in the given iterable by name."""
|
||||
for model in haystack:
|
||||
if self._matches(model):
|
||||
return model
|
||||
return None
|
||||
|
||||
|
||||
D = TypeVar('D')
|
||||
|
||||
|
||||
@@ -525,6 +537,12 @@ class MacroMethods:
|
||||
return candidates
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsingInfo:
|
||||
static_analysis_parsed_path_count: int = 0
|
||||
static_analysis_path_count: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManifestStateCheck(dbtClassMixin):
|
||||
vars_hash: FileHash = field(default_factory=FileHash.empty)
|
||||
@@ -545,15 +563,13 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
|
||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||
disabled: List[CompileResultNode] = field(default_factory=list)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
flat_graph: Dict[str, Any] = field(default_factory=dict)
|
||||
state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck)
|
||||
# Moved from the ParseResult object
|
||||
source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict)
|
||||
# following is from ParseResult
|
||||
_disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict)
|
||||
disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict)
|
||||
|
||||
_doc_lookup: Optional[DocLookup] = field(
|
||||
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
|
||||
)
|
||||
@@ -563,12 +579,19 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_ref_lookup: Optional[RefableLookup] = field(
|
||||
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
|
||||
)
|
||||
_disabled_lookup: Optional[DisabledLookup] = field(
|
||||
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
|
||||
)
|
||||
_analysis_lookup: Optional[AnalysisLookup] = field(
|
||||
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
|
||||
)
|
||||
_parsing_info: ParsingInfo = field(
|
||||
default_factory=ParsingInfo,
|
||||
metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
|
||||
)
|
||||
_lock: Lock = field(
|
||||
default_factory=flags.MP_CONTEXT.Lock,
|
||||
metadata={'serialize': lambda x: None, 'deserialize': lambda x: flags.MP_CONTEXT.Lock}
|
||||
metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
|
||||
)
|
||||
|
||||
def __pre_serialize__(self):
|
||||
@@ -577,6 +600,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.source_patches = {}
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def __post_deserialize__(cls, obj):
|
||||
obj._lock = flags.MP_CONTEXT.Lock()
|
||||
return obj
|
||||
|
||||
def sync_update_node(
|
||||
self, new_node: NonSourceCompiledNode
|
||||
) -> NonSourceCompiledNode:
|
||||
@@ -627,26 +655,12 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
}
|
||||
}
|
||||
|
||||
def find_disabled_by_name(
|
||||
self, name: str, package: Optional[str] = None
|
||||
) -> Optional[ManifestNode]:
|
||||
searcher: NameSearcher = NameSearcher(
|
||||
name, package, NodeType.refable()
|
||||
)
|
||||
result = searcher.search(self.disabled)
|
||||
return result
|
||||
|
||||
def find_disabled_source_by_name(
|
||||
self, source_name: str, table_name: str, package: Optional[str] = None
|
||||
) -> Optional[ParsedSourceDefinition]:
|
||||
search_name = f'{source_name}.{table_name}'
|
||||
searcher: NameSearcher = NameSearcher(
|
||||
search_name, package, [NodeType.Source]
|
||||
)
|
||||
result = searcher.search(self.disabled)
|
||||
if result is not None:
|
||||
assert isinstance(result, ParsedSourceDefinition)
|
||||
return result
|
||||
def build_disabled_by_file_id(self):
|
||||
disabled_by_file_id = {}
|
||||
for node_list in self.disabled.values():
|
||||
for node in node_list:
|
||||
disabled_by_file_id[node.file_id] = node
|
||||
return disabled_by_file_id
|
||||
|
||||
def _materialization_candidates_for(
|
||||
self, project_name: str,
|
||||
@@ -691,60 +705,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
resource_fqns[resource_type_plural].add(tuple(resource.fqn))
|
||||
return resource_fqns
|
||||
|
||||
# This is called by 'parse_patch' in the NodePatchParser
|
||||
def add_patch(
|
||||
self, source_file: SchemaSourceFile, patch: ParsedNodePatch,
|
||||
) -> None:
|
||||
if patch.yaml_key in ['models', 'seeds', 'snapshots']:
|
||||
unique_id = self.ref_lookup.get_unique_id(patch.name, None)
|
||||
elif patch.yaml_key == 'analyses':
|
||||
unique_id = self.analysis_lookup.get_unique_id(patch.name, None)
|
||||
else:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Unexpected yaml_key {patch.yaml_key} for patch in '
|
||||
f'file {source_file.path.original_file_path}'
|
||||
)
|
||||
if unique_id is None:
|
||||
# This will usually happen when a node is disabled
|
||||
return
|
||||
|
||||
# patches can't be overwritten
|
||||
node = self.nodes.get(unique_id)
|
||||
if node:
|
||||
if node.patch_path:
|
||||
package_name, existing_file_path = node.patch_path.split('://')
|
||||
raise_duplicate_patch_name(patch, existing_file_path)
|
||||
source_file.append_patch(patch.yaml_key, unique_id)
|
||||
node.patch(patch)
|
||||
|
||||
def add_macro_patch(
|
||||
self, source_file: SchemaSourceFile, patch: ParsedMacroPatch,
|
||||
) -> None:
|
||||
# macros are fully namespaced
|
||||
unique_id = f'macro.{patch.package_name}.{patch.name}'
|
||||
macro = self.macros.get(unique_id)
|
||||
if not macro:
|
||||
warn_or_error(
|
||||
f'WARNING: Found documentation for macro "{patch.name}" '
|
||||
f'which was not found'
|
||||
)
|
||||
return
|
||||
if macro.patch_path:
|
||||
package_name, existing_file_path = macro.patch_path.split('://')
|
||||
raise_duplicate_macro_patch_name(patch, existing_file_path)
|
||||
source_file.macro_patches.append(unique_id)
|
||||
macro.patch(patch)
|
||||
|
||||
def add_source_patch(
|
||||
self, source_file: SchemaSourceFile, patch: SourcePatch,
|
||||
) -> None:
|
||||
# source patches must be unique
|
||||
key = (patch.overrides, patch.name)
|
||||
if key in self.source_patches:
|
||||
raise_duplicate_source_patch_name(patch, self.source_patches[key])
|
||||
self.source_patches[key] = patch
|
||||
source_file.source_patches.append(key)
|
||||
|
||||
def get_used_schemas(self, resource_types=None):
|
||||
return frozenset({
|
||||
(node.database, node.schema) for node in
|
||||
@@ -758,7 +718,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
chain(self.nodes.values(), self.sources.values())
|
||||
)
|
||||
|
||||
# This is used in dbt.task.rpc.sql_commands 'add_new_refs'
|
||||
def deepcopy(self):
|
||||
return Manifest(
|
||||
nodes={k: _deepcopy(v) for k, v in self.nodes.items()},
|
||||
@@ -768,7 +727,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||
metadata=self.metadata,
|
||||
disabled=[_deepcopy(n) for n in self.disabled],
|
||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||
files={k: _deepcopy(v) for k, v in self.files.items()},
|
||||
state_check=_deepcopy(self.state_check),
|
||||
)
|
||||
@@ -779,10 +738,18 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.sources.values(),
|
||||
self.exposures.values(),
|
||||
))
|
||||
forward_edges, backward_edges = build_edges(edge_members)
|
||||
forward_edges, backward_edges = build_node_edges(edge_members)
|
||||
self.child_map = forward_edges
|
||||
self.parent_map = backward_edges
|
||||
|
||||
def build_macro_child_map(self):
|
||||
edge_members = list(chain(
|
||||
self.nodes.values(),
|
||||
self.macros.values(),
|
||||
))
|
||||
forward_edges = build_macro_edges(edge_members)
|
||||
return forward_edges
|
||||
|
||||
def writable_manifest(self):
|
||||
self.build_parent_and_child_maps()
|
||||
return WritableManifest(
|
||||
@@ -843,6 +810,15 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
def rebuild_ref_lookup(self):
|
||||
self._ref_lookup = RefableLookup(self)
|
||||
|
||||
@property
|
||||
def disabled_lookup(self) -> DisabledLookup:
|
||||
if self._disabled_lookup is None:
|
||||
self._disabled_lookup = DisabledLookup(self)
|
||||
return self._disabled_lookup
|
||||
|
||||
def rebuild_disabled_lookup(self):
|
||||
self._disabled_lookup = DisabledLookup(self)
|
||||
|
||||
@property
|
||||
def analysis_lookup(self) -> AnalysisLookup:
|
||||
if self._analysis_lookup is None:
|
||||
@@ -860,7 +836,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
) -> MaybeNonSource:
|
||||
|
||||
node: Optional[ManifestNode] = None
|
||||
disabled: Optional[ManifestNode] = None
|
||||
disabled: Optional[List[ManifestNode]] = None
|
||||
|
||||
candidates = _search_packages(
|
||||
current_project, node_package, target_model_package
|
||||
@@ -873,12 +849,12 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.find_disabled_by_name(
|
||||
disabled = self.disabled_lookup.find(
|
||||
target_model_name, pkg
|
||||
)
|
||||
|
||||
if disabled is not None:
|
||||
return Disabled(disabled)
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
# Called by dbt.parser.manifest._resolve_sources_for_exposure
|
||||
@@ -890,24 +866,24 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
current_project: str,
|
||||
node_package: str
|
||||
) -> MaybeParsedSource:
|
||||
key = (target_source_name, target_table_name)
|
||||
search_name = f'{target_source_name}.{target_table_name}'
|
||||
candidates = _search_packages(current_project, node_package)
|
||||
|
||||
source: Optional[ParsedSourceDefinition] = None
|
||||
disabled: Optional[ParsedSourceDefinition] = None
|
||||
disabled: Optional[List[ParsedSourceDefinition]] = None
|
||||
|
||||
for pkg in candidates:
|
||||
source = self.source_lookup.find(key, pkg, self)
|
||||
source = self.source_lookup.find(search_name, pkg, self)
|
||||
if source is not None and source.config.enabled:
|
||||
return source
|
||||
|
||||
if disabled is None:
|
||||
disabled = self.find_disabled_source_by_name(
|
||||
target_source_name, target_table_name, pkg
|
||||
disabled = self.disabled_lookup.find(
|
||||
f'{target_source_name}.{target_table_name}', pkg
|
||||
)
|
||||
|
||||
if disabled is not None:
|
||||
return Disabled(disabled)
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
# Called by DocsRuntimeContext.doc
|
||||
@@ -1016,10 +992,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_check_duplicates(node, self.nodes)
|
||||
self.nodes[node.unique_id] = node
|
||||
|
||||
def add_node(self, source_file: AnySourceFile, node: ManifestNodes):
|
||||
def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None):
|
||||
self.add_node_nofile(node)
|
||||
if isinstance(source_file, SchemaSourceFile):
|
||||
source_file.tests.append(node.unique_id)
|
||||
assert test_from
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
else:
|
||||
source_file.nodes.append(node.unique_id)
|
||||
|
||||
@@ -1029,15 +1006,17 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.exposures.append(exposure.unique_id)
|
||||
|
||||
def add_disabled_nofile(self, node: CompileResultNode):
|
||||
if node.unique_id in self._disabled:
|
||||
self._disabled[node.unique_id].append(node)
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
if node.unique_id in self.disabled:
|
||||
self.disabled[node.unique_id].append(node)
|
||||
else:
|
||||
self._disabled[node.unique_id] = [node]
|
||||
self.disabled[node.unique_id] = [node]
|
||||
|
||||
def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode):
|
||||
def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None):
|
||||
self.add_disabled_nofile(node)
|
||||
if isinstance(source_file, SchemaSourceFile):
|
||||
source_file.tests.append(node.unique_id)
|
||||
assert test_from
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
else:
|
||||
source_file.nodes.append(node.unique_id)
|
||||
|
||||
@@ -1063,16 +1042,17 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.docs,
|
||||
self.exposures,
|
||||
self.selectors,
|
||||
self.disabled,
|
||||
self.files,
|
||||
self.metadata,
|
||||
self.flat_graph,
|
||||
self.state_check,
|
||||
self.source_patches,
|
||||
self._disabled,
|
||||
self.disabled,
|
||||
self._doc_lookup,
|
||||
self._source_lookup,
|
||||
self._ref_lookup,
|
||||
self._disabled_lookup,
|
||||
self._analysis_lookup,
|
||||
)
|
||||
return self.__class__, args
|
||||
|
||||
@@ -1090,7 +1070,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('manifest', 2)
|
||||
@schema_version('manifest', 3)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=(
|
||||
@@ -1122,8 +1102,8 @@ class WritableManifest(ArtifactMixin):
|
||||
'The selectors defined in selectors.yml'
|
||||
))
|
||||
)
|
||||
disabled: Optional[List[CompileResultNode]] = field(metadata=dict(
|
||||
description='A list of the disabled nodes in the target'
|
||||
disabled: Optional[Mapping[UniqueID, List[CompileResultNode]]] = field(metadata=dict(
|
||||
description='A mapping of the disabled nodes in the target'
|
||||
))
|
||||
parent_map: Optional[NodeEdgeMap] = field(metadata=dict(
|
||||
description='A mapping from child nodes to their dependencies',
|
||||
|
||||
@@ -2,13 +2,13 @@ from dataclasses import field, Field, dataclass
|
||||
from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Any, List, Optional, Dict, Union, Type, TypeVar
|
||||
Any, List, Optional, Dict, Union, Type, TypeVar, Callable
|
||||
)
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin, ValidationError, register_pattern,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.exceptions import InternalException, CompilationException
|
||||
from dbt.contracts.util import Replaceable, list_str
|
||||
from dbt import hooks
|
||||
from dbt.node_types import NodeType
|
||||
@@ -204,6 +204,34 @@ class BaseConfig(
|
||||
else:
|
||||
self._extra[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
if hasattr(self, key):
|
||||
msg = (
|
||||
'Error, tried to delete config key "{}": Cannot delete '
|
||||
'built-in keys'
|
||||
).format(key)
|
||||
raise CompilationException(msg)
|
||||
else:
|
||||
del self._extra[key]
|
||||
|
||||
def _content_iterator(self, include_condition: Callable[[Field], bool]):
|
||||
seen = set()
|
||||
for fld, _ in self._get_fields():
|
||||
seen.add(fld.name)
|
||||
if include_condition(fld):
|
||||
yield fld.name
|
||||
|
||||
for key in self._extra:
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
yield from self._content_iterator(include_condition=lambda f: True)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._get_fields()) + len(self._extra)
|
||||
|
||||
@staticmethod
|
||||
def compare_key(
|
||||
unrendered: Dict[str, Any],
|
||||
@@ -239,8 +267,15 @@ class BaseConfig(
|
||||
return False
|
||||
return True
|
||||
|
||||
# This is used in 'add_config_call' to created the combined config_call_dict.
|
||||
# 'meta' moved here from node
|
||||
mergebehavior = {
|
||||
"append": ['pre-hook', 'pre_hook', 'post-hook', 'post_hook', 'tags'],
|
||||
"update": ['quoting', 'column_types', 'meta'],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _extract_dict(
|
||||
def _merge_dicts(
|
||||
cls, src: Dict[str, Any], data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Find all the items in data that match a target_field on this class,
|
||||
@@ -286,10 +321,10 @@ class BaseConfig(
|
||||
|
||||
adapter_config_cls = get_config_class_by_name(adapter_type)
|
||||
|
||||
self_merged = self._extract_dict(dct, data)
|
||||
self_merged = self._merge_dicts(dct, data)
|
||||
dct.update(self_merged)
|
||||
|
||||
adapter_merged = adapter_config_cls._extract_dict(dct, data)
|
||||
adapter_merged = adapter_config_cls._merge_dicts(dct, data)
|
||||
dct.update(adapter_merged)
|
||||
|
||||
# any remaining fields must be "clobber"
|
||||
@@ -321,33 +356,8 @@ class SourceConfig(BaseConfig):
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeConfig(BaseConfig):
|
||||
class NodeAndTestConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
materialized: str = 'view'
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
# this only applies for config v1, so it doesn't participate in comparison
|
||||
vars: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=metas(CompareBehavior.Exclude, MergeBehavior.Update),
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# This is actually only used by seeds. Should it be available to others?
|
||||
# That would be a breaking change!
|
||||
column_types: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# these fields are included in serialized output, but are not part of
|
||||
# config comparison (they are part of database_representation)
|
||||
alias: Optional[str] = field(
|
||||
@@ -368,7 +378,38 @@ class NodeConfig(BaseConfig):
|
||||
MergeBehavior.Append,
|
||||
CompareBehavior.Exclude),
|
||||
)
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeConfig(NodeAndTestConfig):
|
||||
# Note: if any new fields are added with MergeBehavior, also update the
|
||||
# 'mergebehavior' dictionary
|
||||
materialized: str = 'view'
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# This is actually only used by seeds. Should it be available to others?
|
||||
# That would be a breaking change!
|
||||
column_types: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
full_refresh: Optional[bool] = None
|
||||
on_schema_change: Optional[str] = 'ignore'
|
||||
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
@@ -410,7 +451,8 @@ class SeedConfig(NodeConfig):
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestConfig(NodeConfig):
|
||||
class TestConfig(NodeAndTestConfig):
|
||||
# this is repeated because of a different default
|
||||
schema: Optional[str] = field(
|
||||
default='dbt_test__audit',
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
|
||||
@@ -148,6 +148,7 @@ class ParsedNodeMixins(dbtClassMixin):
|
||||
"""Given a ParsedNodePatch, add the new information to the node."""
|
||||
# explicitly pick out the parts to update so we don't inadvertently
|
||||
# step on the model name or anything
|
||||
# Note: config should already be updated
|
||||
self.patch_path: Optional[str] = patch.file_id
|
||||
# update created_at so process_docs will run in partial parsing
|
||||
self.created_at = int(time.time())
|
||||
@@ -155,20 +156,10 @@ class ParsedNodeMixins(dbtClassMixin):
|
||||
self.columns = patch.columns
|
||||
self.meta = patch.meta
|
||||
self.docs = patch.docs
|
||||
if flags.STRICT_MODE:
|
||||
# It seems odd that an instance can be invalid
|
||||
# Maybe there should be validation or restrictions
|
||||
# elsewhere?
|
||||
assert isinstance(self, dbtClassMixin)
|
||||
dct = self.to_dict(omit_none=False)
|
||||
self.validate(dct)
|
||||
|
||||
def get_materialization(self):
|
||||
return self.config.materialized
|
||||
|
||||
def local_vars(self):
|
||||
return self.config.vars
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedNodeMandatory(
|
||||
@@ -191,7 +182,7 @@ class ParsedNodeMandatory(
|
||||
class ParsedNodeDefaults(ParsedNodeMandatory):
|
||||
tags: List[str] = field(default_factory=list)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[Any]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
description: str = field(default='')
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
@@ -203,6 +194,7 @@ class ParsedNodeDefaults(ParsedNodeMandatory):
|
||||
deferred: bool = False
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
created_at: int = field(default_factory=lambda: int(time.time()))
|
||||
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def write_node(self, target_path: str, subdirectory: str, payload: str):
|
||||
if (os.path.basename(self.path) ==
|
||||
@@ -229,6 +221,11 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
def _serialize(self):
|
||||
return self.to_dict()
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
if 'config_call_dict' in dct:
|
||||
del dct['config_call_dict']
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
def _deserialize(cls, dct: Dict[str, int]):
|
||||
# The serialized ParsedNodes do not differ from each other
|
||||
@@ -245,9 +242,9 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
return ParsedRPCNode.from_dict(dct)
|
||||
elif resource_type == 'test':
|
||||
if 'test_metadata' in dct:
|
||||
return ParsedSchemaTestNode.from_dict(dct)
|
||||
return ParsedGenericTestNode.from_dict(dct)
|
||||
else:
|
||||
return ParsedDataTestNode.from_dict(dct)
|
||||
return ParsedSingularTestNode.from_dict(dct)
|
||||
elif resource_type == 'operation':
|
||||
return ParsedHookNode.from_dict(dct)
|
||||
elif resource_type == 'seed':
|
||||
@@ -258,10 +255,16 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
return cls.from_dict(dct)
|
||||
|
||||
def _persist_column_docs(self) -> bool:
|
||||
return bool(self.config.persist_docs.get('columns'))
|
||||
if hasattr(self.config, 'persist_docs'):
|
||||
assert isinstance(self.config, NodeConfig)
|
||||
return bool(self.config.persist_docs.get('columns'))
|
||||
return False
|
||||
|
||||
def _persist_relation_docs(self) -> bool:
|
||||
return bool(self.config.persist_docs.get('relation'))
|
||||
if hasattr(self.config, 'persist_docs'):
|
||||
assert isinstance(self.config, NodeConfig)
|
||||
return bool(self.config.persist_docs.get('relation'))
|
||||
return False
|
||||
|
||||
def same_body(self: T, other: T) -> bool:
|
||||
return self.raw_sql == other.raw_sql
|
||||
@@ -409,17 +412,21 @@ class HasTestMetadata(dbtClassMixin):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedDataTestNode(ParsedNode):
|
||||
class ParsedSingularTestNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedSchemaTestNode(ParsedNode, HasTestMetadata):
|
||||
# keep this in sync with CompiledSchemaTestNode!
|
||||
class ParsedGenericTestNode(ParsedNode, HasTestMetadata):
|
||||
# keep this in sync with CompiledGenericTestNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
@@ -456,6 +463,7 @@ class ParsedPatch(HasYamlMetadata, Replaceable):
|
||||
description: str
|
||||
meta: Dict[str, Any]
|
||||
docs: Docs
|
||||
config: Dict[str, Any]
|
||||
|
||||
|
||||
# The parsed node update is only the 'patch', not the test. The test became a
|
||||
@@ -487,9 +495,6 @@ class ParsedMacro(UnparsedBaseNode, HasUniqueID):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
created_at: int = field(default_factory=lambda: int(time.time()))
|
||||
|
||||
def local_vars(self):
|
||||
return {}
|
||||
|
||||
def patch(self, patch: ParsedMacroPatch):
|
||||
self.patch_path: Optional[str] = patch.file_id
|
||||
self.description = patch.description
|
||||
@@ -497,11 +502,6 @@ class ParsedMacro(UnparsedBaseNode, HasUniqueID):
|
||||
self.meta = patch.meta
|
||||
self.docs = patch.docs
|
||||
self.arguments = patch.arguments
|
||||
if flags.STRICT_MODE:
|
||||
# What does this actually validate?
|
||||
assert isinstance(self, dbtClassMixin)
|
||||
dct = self.to_dict(omit_none=False)
|
||||
self.validate(dct)
|
||||
|
||||
def same_contents(self, other: Optional['ParsedMacro']) -> bool:
|
||||
if other is None:
|
||||
@@ -592,7 +592,8 @@ class ParsedSourceDefinition(
|
||||
UnparsedBaseNode,
|
||||
HasUniqueID,
|
||||
HasRelationMetadata,
|
||||
HasFqn
|
||||
HasFqn,
|
||||
|
||||
):
|
||||
name: str
|
||||
source_name: str
|
||||
@@ -689,6 +690,10 @@ class ParsedSourceDefinition(
|
||||
def depends_on_nodes(self):
|
||||
return []
|
||||
|
||||
@property
|
||||
def depends_on(self):
|
||||
return DependsOn(macros=[], nodes=[])
|
||||
|
||||
@property
|
||||
def refs(self):
|
||||
return []
|
||||
@@ -768,11 +773,11 @@ class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
|
||||
ManifestNodes = Union[
|
||||
ParsedAnalysisNode,
|
||||
ParsedDataTestNode,
|
||||
ParsedSingularTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedRPCNode,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
]
|
||||
|
||||
@@ -44,6 +44,11 @@ class UnparsedMacro(UnparsedBaseNode, HasSQL):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Macro]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedGenericTest(UnparsedBaseNode, HasSQL):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Macro]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNode(UnparsedBaseNode, HasSQL):
|
||||
name: str
|
||||
@@ -126,12 +131,17 @@ class HasYamlMetadata(dbtClassMixin):
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasColumnDocs, HasDocs, HasYamlMetadata):
|
||||
class HasConfig():
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasDocs, HasYamlMetadata):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNodeUpdate(HasColumnTests, HasTests, HasYamlMetadata):
|
||||
class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasTests, HasYamlMetadata):
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
|
||||
@@ -143,7 +153,7 @@ class MacroArgument(dbtClassMixin):
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedMacroUpdate(HasDocs, HasYamlMetadata):
|
||||
class UnparsedMacroUpdate(HasConfig, HasDocs, HasYamlMetadata):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
|
||||
|
||||
@@ -261,6 +271,7 @@ class UnparsedSourceDefinition(dbtClassMixin, Replaceable):
|
||||
loaded_at_field: Optional[str] = None
|
||||
tables: List[UnparsedSourceTableDefinition] = field(default_factory=list)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def yaml_key(self) -> 'str':
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from dbt.contracts.util import Replaceable, Mergeable, list_str
|
||||
from dbt.contracts.connection import UserConfigContract, QueryComment
|
||||
from dbt.contracts.connection import QueryComment, UserConfigContract
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
from dbt import tracking
|
||||
from dbt import ui
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin, ValidationError,
|
||||
HyphenatedDbtClassMixin,
|
||||
@@ -83,6 +81,7 @@ class GitPackage(Package):
|
||||
class RegistryPackage(Package):
|
||||
package: str
|
||||
version: Union[RawVersion, List[RawVersion]]
|
||||
install_prerelease: Optional[bool] = False
|
||||
|
||||
def get_versions(self) -> List[str]:
|
||||
if isinstance(self.version, list):
|
||||
@@ -175,8 +174,10 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
config_version: int
|
||||
project_root: Optional[str] = None
|
||||
source_paths: Optional[List[str]] = None
|
||||
model_paths: Optional[List[str]] = None
|
||||
macro_paths: Optional[List[str]] = None
|
||||
data_paths: Optional[List[str]] = None
|
||||
seed_paths: Optional[List[str]] = None
|
||||
test_paths: Optional[List[str]] = None
|
||||
analysis_paths: Optional[List[str]] = None
|
||||
docs_paths: Optional[List[str]] = None
|
||||
@@ -186,7 +187,7 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
clean_targets: Optional[List[str]] = None
|
||||
profile: Optional[str] = None
|
||||
log_path: Optional[str] = None
|
||||
modules_path: Optional[str] = None
|
||||
packages_install_path: Optional[str] = None
|
||||
quoting: Optional[Quoting] = None
|
||||
on_run_start: Optional[List[str]] = field(default_factory=list_str)
|
||||
on_run_end: Optional[List[str]] = field(default_factory=list_str)
|
||||
@@ -229,25 +230,21 @@ class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract):
|
||||
use_colors: Optional[bool] = None
|
||||
partial_parse: Optional[bool] = None
|
||||
printer_width: Optional[int] = None
|
||||
|
||||
def set_values(self, cookie_dir):
|
||||
if self.send_anonymous_usage_stats:
|
||||
tracking.initialize_tracking(cookie_dir)
|
||||
else:
|
||||
tracking.do_not_track()
|
||||
|
||||
if self.use_colors is not None:
|
||||
ui.use_colors(self.use_colors)
|
||||
|
||||
if self.printer_width:
|
||||
ui.printer_width(self.printer_width)
|
||||
write_json: Optional[bool] = None
|
||||
warn_error: Optional[bool] = None
|
||||
log_format: Optional[bool] = None
|
||||
debug: Optional[bool] = None
|
||||
version_check: Optional[bool] = None
|
||||
fail_fast: Optional[bool] = None
|
||||
use_experimental_parser: Optional[bool] = None
|
||||
static_parser: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProfileConfig(HyphenatedDbtClassMixin, Replaceable):
|
||||
profile_name: str = field(metadata={'preserve_underscore': True})
|
||||
target_name: str = field(metadata={'preserve_underscore': True})
|
||||
config: UserConfig
|
||||
user_config: UserConfig = field(metadata={'preserve_underscore': True})
|
||||
threads: int
|
||||
# TODO: make this a dynamic union of some kind?
|
||||
credentials: Optional[Dict[str, Any]]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass, fields
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Optional, Dict,
|
||||
)
|
||||
@@ -7,9 +7,8 @@ from typing_extensions import Protocol
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.contracts.util import Replaceable
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import raise_dataclass_not_dict, CompilationException
|
||||
from dbt.utils import deep_merge
|
||||
|
||||
|
||||
@@ -43,13 +42,10 @@ class FakeAPIObject(dbtClassMixin, Replaceable, Mapping):
|
||||
raise KeyError(key) from None
|
||||
|
||||
def __iter__(self):
|
||||
deprecations.warn('not-a-dictionary', obj=self)
|
||||
for _, name in self._get_fields():
|
||||
yield name
|
||||
raise_dataclass_not_dict(self)
|
||||
|
||||
def __len__(self):
|
||||
deprecations.warn('not-a-dictionary', obj=self)
|
||||
return len(fields(self.__class__))
|
||||
raise_dataclass_not_dict(self)
|
||||
|
||||
def incorporate(self, **kwargs):
|
||||
value = self.to_dict(omit_none=True)
|
||||
|
||||
@@ -78,6 +78,7 @@ class TestStatus(StrEnum):
|
||||
Error = NodeStatus.Error
|
||||
Fail = NodeStatus.Fail
|
||||
Warn = NodeStatus.Warn
|
||||
Skipped = NodeStatus.Skipped
|
||||
|
||||
|
||||
class FreshnessStatus(StrEnum):
|
||||
@@ -184,7 +185,7 @@ class RunExecutionResult(
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('run-results', 2)
|
||||
@schema_version('run-results', 3)
|
||||
class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
||||
results: Sequence[RunResultOutput]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
@@ -284,6 +285,9 @@ class SourceFreshnessOutput(dbtClassMixin):
|
||||
status: FreshnessStatus
|
||||
criteria: FreshnessThreshold
|
||||
adapter_response: Dict[str, Any]
|
||||
timing: List[TimingInfo]
|
||||
thread_id: str
|
||||
execution_time: float
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -332,7 +336,10 @@ def process_freshness_result(
|
||||
max_loaded_at_time_ago_in_s=result.age,
|
||||
status=result.status,
|
||||
criteria=criteria,
|
||||
adapter_response=result.adapter_response
|
||||
adapter_response=result.adapter_response,
|
||||
timing=result.timing,
|
||||
thread_id=result.thread_id,
|
||||
execution_time=result.execution_time,
|
||||
)
|
||||
|
||||
|
||||
@@ -362,7 +369,7 @@ class FreshnessResult(ExecutionResult):
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('sources', 1)
|
||||
@schema_version('sources', 2)
|
||||
class FreshnessExecutionResultArtifact(
|
||||
ArtifactMixin,
|
||||
VersionedSchema,
|
||||
|
||||
@@ -58,6 +58,7 @@ class RPCExecParameters(RPCParameters):
|
||||
class RPCCompileParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
models: Union[None, str, List[str]] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
@@ -71,12 +72,14 @@ class RPCListParameters(RPCParameters):
|
||||
select: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
output: Optional[str] = 'json'
|
||||
output_keys: Optional[List[str]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCRunParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
models: Union[None, str, List[str]] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
@@ -116,6 +119,17 @@ class RPCDocsGenerateParameters(RPCParameters):
|
||||
state: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCBuildParameters(RPCParameters):
|
||||
resource_types: Optional[List[str]] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
threads: Optional[int] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
defer: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCCliParameters(RPCParameters):
|
||||
cli: str
|
||||
@@ -186,6 +200,8 @@ class RPCRunOperationParameters(RPCParameters):
|
||||
class RPCSourceFreshnessParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -9,6 +9,7 @@ class SelectorDefinition(dbtClassMixin):
|
||||
name: str
|
||||
definition: Union[str, Dict[str, Any]]
|
||||
description: str = ''
|
||||
default: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from pathlib import Path
|
||||
from .graph.manifest import WritableManifest
|
||||
from .results import RunResultsArtifact
|
||||
from typing import Optional
|
||||
from dbt.exceptions import IncompatibleSchemaException
|
||||
|
||||
@@ -8,6 +9,7 @@ class PreviousState:
|
||||
def __init__(self, path: Path):
|
||||
self.path: Path = path
|
||||
self.manifest: Optional[WritableManifest] = None
|
||||
self.results: Optional[RunResultsArtifact] = None
|
||||
|
||||
manifest_path = self.path / 'manifest.json'
|
||||
if manifest_path.exists() and manifest_path.is_file():
|
||||
@@ -16,3 +18,11 @@ class PreviousState:
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(manifest_path))
|
||||
raise
|
||||
|
||||
results_path = self.path / 'run_results.json'
|
||||
if results_path.exists() and results_path.is_file():
|
||||
try:
|
||||
self.results = RunResultsArtifact.read(str(results_path))
|
||||
except IncompatibleSchemaException as exc:
|
||||
exc.add_filename(str(results_path))
|
||||
raise
|
||||
|
||||
@@ -170,6 +170,12 @@ class BaseArtifactMetadata(dbtClassMixin):
|
||||
)
|
||||
env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_env)
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
dct = super().__post_serialize__(dct)
|
||||
if dct['generated_at'] and dct['generated_at'].endswith('+00:00'):
|
||||
dct['generated_at'] = dct['generated_at'].replace('+00:00', '') + "Z"
|
||||
return dct
|
||||
|
||||
|
||||
def schema_version(name: str, version: int):
|
||||
def inner(cls: Type[VersionedSchema]):
|
||||
|
||||
@@ -43,91 +43,28 @@ class DBTDeprecation:
|
||||
active_deprecations.add(self.name)
|
||||
|
||||
|
||||
class DispatchPackagesDeprecation(DBTDeprecation):
|
||||
_name = 'dispatch-packages'
|
||||
class PackageRedirectDeprecation(DBTDeprecation):
|
||||
_name = 'package-redirect'
|
||||
_description = '''\
|
||||
The "packages" argument of adapter.dispatch() has been deprecated.
|
||||
Use the "macro_namespace" argument instead.
|
||||
|
||||
Raised during dispatch for: {macro_name}
|
||||
|
||||
For more information, see:
|
||||
|
||||
https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch
|
||||
The `{old_name}` package is deprecated in favor of `{new_name}`. Please update
|
||||
your `packages.yml` configuration to use `{new_name}` instead.
|
||||
'''
|
||||
|
||||
|
||||
class MaterializationReturnDeprecation(DBTDeprecation):
|
||||
_name = 'materialization-return'
|
||||
|
||||
class PackageInstallPathDeprecation(DBTDeprecation):
|
||||
_name = 'install-packages-path'
|
||||
_description = '''\
|
||||
The materialization ("{materialization}") did not explicitly return a list
|
||||
of relations to add to the cache. By default the target relation will be
|
||||
added, but this behavior will be removed in a future version of dbt.
|
||||
|
||||
|
||||
|
||||
For more information, see:
|
||||
|
||||
https://docs.getdbt.com/v0.15/docs/creating-new-materializations#section-6-returning-relations
|
||||
The default package install path has changed from `dbt_modules` to `dbt_packages`.
|
||||
Please update `clean-targets` in `dbt_project.yml` and check `.gitignore` as well.
|
||||
Or, set `packages-install-path: dbt_modules` if you'd like to keep the current value.
|
||||
'''
|
||||
|
||||
|
||||
class NotADictionaryDeprecation(DBTDeprecation):
|
||||
_name = 'not-a-dictionary'
|
||||
|
||||
class ConfigPathDeprecation(DBTDeprecation):
|
||||
_name = 'project_config_path'
|
||||
_description = '''\
|
||||
The object ("{obj}") was used as a dictionary. In a future version of dbt
|
||||
this capability will be removed from objects of this type.
|
||||
'''
|
||||
|
||||
|
||||
class ColumnQuotingDeprecation(DBTDeprecation):
|
||||
_name = 'column-quoting-unset'
|
||||
|
||||
_description = '''\
|
||||
The quote_columns parameter was not set for seeds, so the default value of
|
||||
False was chosen. The default will change to True in a future release.
|
||||
|
||||
|
||||
|
||||
For more information, see:
|
||||
|
||||
https://docs.getdbt.com/v0.15/docs/seeds#section-specify-column-quoting
|
||||
'''
|
||||
|
||||
|
||||
class ModelsKeyNonModelDeprecation(DBTDeprecation):
|
||||
_name = 'models-key-mismatch'
|
||||
|
||||
_description = '''\
|
||||
"{node.name}" is a {node.resource_type} node, but it is specified in
|
||||
the {patch.yaml_key} section of {patch.original_file_path}.
|
||||
|
||||
|
||||
|
||||
To fix this warning, place the `{node.name}` specification under
|
||||
the {expected_key} key instead.
|
||||
|
||||
This warning will become an error in a future release.
|
||||
'''
|
||||
|
||||
|
||||
class ExecuteMacrosReleaseDeprecation(DBTDeprecation):
|
||||
_name = 'execute-macro-release'
|
||||
_description = '''\
|
||||
The "release" argument to execute_macro is now ignored, and will be removed
|
||||
in a future relase of dbt. At that time, providing a `release` argument
|
||||
will result in an error.
|
||||
'''
|
||||
|
||||
|
||||
class AdapterMacroDeprecation(DBTDeprecation):
|
||||
_name = 'adapter-macro'
|
||||
_description = '''\
|
||||
The "adapter_macro" macro has been deprecated. Instead, use the
|
||||
`adapter.dispatch` method to find a macro and call the result.
|
||||
adapter_macro was called for: {macro_name}
|
||||
The `{deprecated_path}` config has been deprecated in favor of `{exp_path}`.
|
||||
Please update your `dbt_project.yml` configuration to reflect this change.
|
||||
'''
|
||||
|
||||
|
||||
@@ -169,13 +106,9 @@ def warn(name, *args, **kwargs):
|
||||
active_deprecations: Set[str] = set()
|
||||
|
||||
deprecations_list: List[DBTDeprecation] = [
|
||||
DispatchPackagesDeprecation(),
|
||||
MaterializationReturnDeprecation(),
|
||||
NotADictionaryDeprecation(),
|
||||
ColumnQuotingDeprecation(),
|
||||
ModelsKeyNonModelDeprecation(),
|
||||
ExecuteMacrosReleaseDeprecation(),
|
||||
AdapterMacroDeprecation(),
|
||||
ConfigPathDeprecation(),
|
||||
PackageInstallPathDeprecation(),
|
||||
PackageRedirectDeprecation()
|
||||
]
|
||||
|
||||
deprecations: Dict[str, DBTDeprecation] = {
|
||||
|
||||
@@ -91,7 +91,7 @@ class PinnedPackage(BasePackage):
|
||||
|
||||
def get_installation_path(self, project, renderer):
|
||||
dest_dirname = self.get_project_name(project, renderer)
|
||||
return os.path.join(project.modules_path, dest_dirname)
|
||||
return os.path.join(project.packages_install_path, dest_dirname)
|
||||
|
||||
def get_subdirectory(self):
|
||||
return None
|
||||
|
||||
@@ -30,9 +30,13 @@ class RegistryPackageMixin:
|
||||
|
||||
|
||||
class RegistryPinnedPackage(RegistryPackageMixin, PinnedPackage):
|
||||
def __init__(self, package: str, version: str) -> None:
|
||||
def __init__(self,
|
||||
package: str,
|
||||
version: str,
|
||||
version_latest: str) -> None:
|
||||
super().__init__(package)
|
||||
self.version = version
|
||||
self.version_latest = version_latest
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -44,6 +48,9 @@ class RegistryPinnedPackage(RegistryPackageMixin, PinnedPackage):
|
||||
def get_version(self):
|
||||
return self.version
|
||||
|
||||
def get_version_latest(self):
|
||||
return self.version_latest
|
||||
|
||||
def nice_version_name(self):
|
||||
return 'version {}'.format(self.version)
|
||||
|
||||
@@ -61,8 +68,8 @@ class RegistryPinnedPackage(RegistryPackageMixin, PinnedPackage):
|
||||
system.make_directory(os.path.dirname(tar_path))
|
||||
|
||||
download_url = metadata.downloads.tarball
|
||||
system.download(download_url, tar_path)
|
||||
deps_path = project.modules_path
|
||||
system.download_with_retries(download_url, tar_path)
|
||||
deps_path = project.packages_install_path
|
||||
package_name = self.get_project_name(project, renderer)
|
||||
system.untar_package(tar_path, deps_path, package_name)
|
||||
|
||||
@@ -71,10 +78,14 @@ class RegistryUnpinnedPackage(
|
||||
RegistryPackageMixin, UnpinnedPackage[RegistryPinnedPackage]
|
||||
):
|
||||
def __init__(
|
||||
self, package: str, versions: List[semver.VersionSpecifier]
|
||||
self,
|
||||
package: str,
|
||||
versions: List[semver.VersionSpecifier],
|
||||
install_prerelease: bool
|
||||
) -> None:
|
||||
super().__init__(package)
|
||||
self.versions = versions
|
||||
self.install_prerelease = install_prerelease
|
||||
|
||||
def _check_in_index(self):
|
||||
index = registry.index_cached()
|
||||
@@ -91,13 +102,18 @@ class RegistryUnpinnedPackage(
|
||||
semver.VersionSpecifier.from_version_string(v)
|
||||
for v in raw_version
|
||||
]
|
||||
return cls(package=contract.package, versions=versions)
|
||||
return cls(
|
||||
package=contract.package,
|
||||
versions=versions,
|
||||
install_prerelease=contract.install_prerelease
|
||||
)
|
||||
|
||||
def incorporate(
|
||||
self, other: 'RegistryUnpinnedPackage'
|
||||
) -> 'RegistryUnpinnedPackage':
|
||||
return RegistryUnpinnedPackage(
|
||||
package=self.package,
|
||||
install_prerelease=self.install_prerelease,
|
||||
versions=self.versions + other.versions,
|
||||
)
|
||||
|
||||
@@ -111,12 +127,18 @@ class RegistryUnpinnedPackage(
|
||||
raise DependencyException(new_msg) from e
|
||||
|
||||
available = registry.get_available_versions(self.package)
|
||||
installable = semver.filter_installable(
|
||||
available,
|
||||
self.install_prerelease
|
||||
)
|
||||
available_latest = installable[-1]
|
||||
|
||||
# for now, pick a version and then recurse. later on,
|
||||
# we'll probably want to traverse multiple options
|
||||
# so we can match packages. not going to make a difference
|
||||
# right now.
|
||||
target = semver.resolve_to_specific_version(range_, available)
|
||||
target = semver.resolve_to_specific_version(range_, installable)
|
||||
if not target:
|
||||
package_version_not_found(self.package, range_, available)
|
||||
return RegistryPinnedPackage(package=self.package, version=target)
|
||||
package_version_not_found(self.package, range_, installable)
|
||||
return RegistryPinnedPackage(package=self.package, version=target,
|
||||
version_latest=available_latest)
|
||||
|
||||
9
core/dbt/events/README.md
Normal file
9
core/dbt/events/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Events Module
|
||||
|
||||
The Events module is the implmentation for structured logging. These events represent both a programatic interface to dbt processes as well as human-readable messaging in one centralized place. The centralization allows for leveraging mypy to enforce interface invariants across all dbt events, and the distinct type layer allows for decoupling events and libraries such as loggers.
|
||||
|
||||
# Using the Events Module
|
||||
The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `events.functions::fire_event` is the entry point to the module from everywhere in dbt.
|
||||
|
||||
# Adding a New Event
|
||||
In `events.types` add a new class that represents the new event. This may be a simple class with no values, or it may be a dataclass with some values to construct downstream messaging. Only include the data necessary to construct this message within this class. You must extend all destinations (e.g. - if your log message belongs on the cli, extend `CliEventABC`) as well as the loglevel this event belongs to.
|
||||
30
core/dbt/events/functions.py
Normal file
30
core/dbt/events/functions.py
Normal file
@@ -0,0 +1,30 @@
|
||||
|
||||
import dbt.logger as logger # type: ignore # TODO eventually remove dependency on this logger
|
||||
from dbt.events.history import EVENT_HISTORY
|
||||
from dbt.events.types import CliEventABC, Event
|
||||
|
||||
|
||||
# top-level method for accessing the new eventing system
|
||||
# this is where all the side effects happen branched by event type
|
||||
# (i.e. - mutating the event history, printing to stdout, logging
|
||||
# to files, etc.)
|
||||
def fire_event(e: Event) -> None:
|
||||
EVENT_HISTORY.append(e)
|
||||
if isinstance(e, CliEventABC):
|
||||
if e.level_tag() == 'test':
|
||||
# TODO after implmenting #3977 send to new test level
|
||||
logger.GLOBAL_LOGGER.debug(logger.timestamped_line(e.cli_msg()))
|
||||
elif e.level_tag() == 'debug':
|
||||
logger.GLOBAL_LOGGER.debug(logger.timestamped_line(e.cli_msg()))
|
||||
elif e.level_tag() == 'info':
|
||||
logger.GLOBAL_LOGGER.info(logger.timestamped_line(e.cli_msg()))
|
||||
elif e.level_tag() == 'warn':
|
||||
logger.GLOBAL_LOGGER.warning()(logger.timestamped_line(e.cli_msg()))
|
||||
elif e.level_tag() == 'error':
|
||||
logger.GLOBAL_LOGGER.error(logger.timestamped_line(e.cli_msg()))
|
||||
elif e.level_tag() == 'exception':
|
||||
logger.GLOBAL_LOGGER.exception(logger.timestamped_line(e.cli_msg()))
|
||||
else:
|
||||
raise AssertionError(
|
||||
f"Event type {type(e).__name__} has unhandled level: {e.level_tag()}"
|
||||
)
|
||||
7
core/dbt/events/history.py
Normal file
7
core/dbt/events/history.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from dbt.events.types import Event
|
||||
from typing import List
|
||||
|
||||
|
||||
# the global history of events for this session
|
||||
# TODO this is naive and the memory footprint is likely far too large.
|
||||
EVENT_HISTORY: List[Event] = []
|
||||
147
core/dbt/events/types.py
Normal file
147
core/dbt/events/types.py
Normal file
@@ -0,0 +1,147 @@
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
# types to represent log levels
|
||||
|
||||
# in preparation for #3977
|
||||
class TestLevel():
|
||||
def level_tag(self) -> str:
|
||||
return "test"
|
||||
|
||||
|
||||
class DebugLevel():
|
||||
def level_tag(self) -> str:
|
||||
return "debug"
|
||||
|
||||
|
||||
class InfoLevel():
|
||||
def level_tag(self) -> str:
|
||||
return "info"
|
||||
|
||||
|
||||
class WarnLevel():
|
||||
def level_tag(self) -> str:
|
||||
return "warn"
|
||||
|
||||
|
||||
class ErrorLevel():
|
||||
def level_tag(self) -> str:
|
||||
return "error"
|
||||
|
||||
|
||||
class ExceptionLevel():
|
||||
def level_tag(self) -> str:
|
||||
return "exception"
|
||||
|
||||
|
||||
# The following classes represent the data necessary to describe a
|
||||
# particular event to both human readable logs, and machine reliable
|
||||
# event streams. classes extend superclasses that indicate what
|
||||
# destinations they are intended for, which mypy uses to enforce
|
||||
# that the necessary methods are defined.
|
||||
|
||||
|
||||
# top-level superclass for all events
|
||||
class Event(metaclass=ABCMeta):
|
||||
# do not define this yourself. inherit it from one of the above level types.
|
||||
@abstractmethod
|
||||
def level_tag(self) -> str:
|
||||
raise Exception("level_tag not implemented for event")
|
||||
|
||||
|
||||
class CliEventABC(Event, metaclass=ABCMeta):
|
||||
# Solely the human readable message. Timestamps and formatting will be added by the logger.
|
||||
@abstractmethod
|
||||
def cli_msg(self) -> str:
|
||||
raise Exception("cli_msg not implemented for cli event")
|
||||
|
||||
|
||||
class ParsingStart(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Start parsing."
|
||||
|
||||
|
||||
class ParsingCompiling(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Compiling."
|
||||
|
||||
|
||||
class ParsingWritingManifest(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Writing manifest."
|
||||
|
||||
|
||||
class ParsingDone(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Done."
|
||||
|
||||
|
||||
class ManifestDependenciesLoaded(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Dependencies loaded"
|
||||
|
||||
|
||||
class ManifestLoaderCreated(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "ManifestLoader created"
|
||||
|
||||
|
||||
class ManifestLoaded(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Manifest loaded"
|
||||
|
||||
|
||||
class ManifestChecked(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Manifest checked"
|
||||
|
||||
|
||||
class ManifestFlatGraphBuilt(InfoLevel, CliEventABC):
|
||||
def cli_msg(self) -> str:
|
||||
return "Flat graph built"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReportPerformancePath(InfoLevel, CliEventABC):
|
||||
path: str
|
||||
|
||||
def cli_msg(self) -> str:
|
||||
return f"Performance info: {self.path}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroEventInfo(InfoLevel, CliEventABC):
|
||||
msg: str
|
||||
|
||||
def cli_msg(self) -> str:
|
||||
return self.msg
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroEventDebug(DebugLevel, CliEventABC):
|
||||
msg: str
|
||||
|
||||
def cli_msg(self) -> str:
|
||||
return self.msg
|
||||
|
||||
|
||||
# since mypy doesn't run on every file we need to suggest to mypy that every
|
||||
# class gets instantiated. But we don't actually want to run this code.
|
||||
# making the conditional `if False` causes mypy to skip it as dead code so
|
||||
# we need to skirt around that by computing something it doesn't check statically.
|
||||
#
|
||||
# TODO remove these lines once we run mypy everywhere.
|
||||
if 1 == 0:
|
||||
ParsingStart()
|
||||
ParsingCompiling()
|
||||
ParsingWritingManifest()
|
||||
ParsingDone()
|
||||
ManifestDependenciesLoaded()
|
||||
ManifestLoaderCreated()
|
||||
ManifestLoaded()
|
||||
ManifestChecked()
|
||||
ManifestFlatGraphBuilt()
|
||||
ReportPerformancePath(path='')
|
||||
MacroEventInfo(msg='')
|
||||
MacroEventDebug(msg='')
|
||||
@@ -5,7 +5,7 @@ from typing import NoReturn, Optional, Mapping, Any
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.node_types import NodeType
|
||||
from dbt import flags
|
||||
from dbt.ui import line_wrap_message
|
||||
from dbt.ui import line_wrap_message, warning_tag
|
||||
|
||||
import dbt.dataclass_schema
|
||||
|
||||
@@ -466,6 +466,15 @@ def invalid_type_error(method_name, arg_name, got_value, expected_type,
|
||||
got_value=got_value, got_type=got_type))
|
||||
|
||||
|
||||
def invalid_bool_error(got_value, macro_name) -> NoReturn:
|
||||
"""Raise a CompilationException when an macro expects a boolean but gets some
|
||||
other value.
|
||||
"""
|
||||
msg = ("Macro '{macro_name}' returns '{got_value}'. It is not type 'bool' "
|
||||
"and cannot not be converted reliably to a bool.")
|
||||
raise_compiler_error(msg.format(macro_name=macro_name, got_value=got_value))
|
||||
|
||||
|
||||
def ref_invalid_args(model, args) -> NoReturn:
|
||||
raise_compiler_error(
|
||||
"ref() takes at most two arguments ({} given)".format(len(args)),
|
||||
@@ -606,14 +615,6 @@ def source_target_not_found(
|
||||
raise_compiler_error(msg, model)
|
||||
|
||||
|
||||
def ref_disabled_dependency(model, target_model):
|
||||
raise_compiler_error(
|
||||
"Model '{}' depends on model '{}' which is disabled in "
|
||||
"the project config".format(model.unique_id,
|
||||
target_model.unique_id),
|
||||
model)
|
||||
|
||||
|
||||
def dependency_not_found(model, target_model_name):
|
||||
raise_compiler_error(
|
||||
"'{}' depends on '{}' which is not in the graph!"
|
||||
@@ -628,6 +629,20 @@ def macro_not_found(model, target_macro_id):
|
||||
.format(model.unique_id, target_macro_id))
|
||||
|
||||
|
||||
def macro_invalid_dispatch_arg(macro_name) -> NoReturn:
|
||||
msg = '''\
|
||||
The "packages" argument of adapter.dispatch() has been deprecated.
|
||||
Use the "macro_namespace" argument instead.
|
||||
|
||||
Raised during dispatch for: {}
|
||||
|
||||
For more information, see:
|
||||
|
||||
https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch
|
||||
'''
|
||||
raise_compiler_error(msg.format(macro_name))
|
||||
|
||||
|
||||
def materialization_not_available(model, adapter_type):
|
||||
materialization = model.get_materialization()
|
||||
|
||||
@@ -674,6 +689,14 @@ def missing_relation(relation, model=None):
|
||||
model)
|
||||
|
||||
|
||||
def raise_dataclass_not_dict(obj):
|
||||
msg = (
|
||||
'The object ("{obj}") was used as a dictionary. This '
|
||||
'capability has been removed from objects of this type.'
|
||||
)
|
||||
raise_compiler_error(msg)
|
||||
|
||||
|
||||
def relation_wrong_type(relation, expected_type, model=None):
|
||||
raise_compiler_error(
|
||||
('Trying to create {expected_type} {relation}, '
|
||||
@@ -710,11 +733,11 @@ def system_error(operation_name):
|
||||
raise_compiler_error(
|
||||
"dbt encountered an error when attempting to {}. "
|
||||
"If this error persists, please create an issue at: \n\n"
|
||||
"https://github.com/fishtown-analytics/dbt"
|
||||
"https://github.com/dbt-labs/dbt-core"
|
||||
.format(operation_name))
|
||||
|
||||
|
||||
class RegistryException(Exception):
|
||||
class ConnectionException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -916,22 +939,17 @@ def raise_unrecognized_credentials_type(typename, supported_types):
|
||||
)
|
||||
|
||||
|
||||
def raise_invalid_patch(
|
||||
node, patch_section: str, patch_path: str,
|
||||
) -> NoReturn:
|
||||
def warn_invalid_patch(patch, resource_type):
|
||||
msg = line_wrap_message(
|
||||
f'''\
|
||||
'{node.name}' is a {node.resource_type} node, but it is
|
||||
specified in the {patch_section} section of
|
||||
{patch_path}.
|
||||
|
||||
|
||||
|
||||
To fix this error, place the `{node.name}`
|
||||
specification under the {node.resource_type.pluralize()} key instead.
|
||||
'{patch.name}' is a {resource_type} node, but it is
|
||||
specified in the {patch.yaml_key} section of
|
||||
{patch.original_file_path}.
|
||||
To fix this error, place the `{patch.name}`
|
||||
specification under the {resource_type.pluralize()} key instead.
|
||||
'''
|
||||
)
|
||||
raise_compiler_error(msg, node)
|
||||
warn_or_error(msg, log_fmt=warning_tag('{}'))
|
||||
|
||||
|
||||
def raise_not_implemented(msg):
|
||||
@@ -993,6 +1011,7 @@ CONTEXT_EXPORTS = {
|
||||
raise_ambiguous_alias,
|
||||
raise_ambiguous_catalog_match,
|
||||
raise_cache_inconsistent,
|
||||
raise_dataclass_not_dict,
|
||||
raise_compiler_error,
|
||||
raise_database_error,
|
||||
raise_dep_not_found,
|
||||
|
||||
@@ -6,18 +6,52 @@ if os.name != 'nt':
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
# initially all flags are set to None, the on-load call of reset() will set
|
||||
# them for their first time.
|
||||
STRICT_MODE = None
|
||||
FULL_REFRESH = None
|
||||
USE_CACHE = None
|
||||
WARN_ERROR = None
|
||||
TEST_NEW_PARSER = None
|
||||
# PROFILES_DIR must be set before the other flags
|
||||
# It also gets set in main.py and in set_from_args because the rpc server
|
||||
# doesn't go through exactly the same main arg processing.
|
||||
DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser('~'), '.dbt')
|
||||
PROFILES_DIR = os.path.expanduser(
|
||||
os.getenv('DBT_PROFILES_DIR', DEFAULT_PROFILES_DIR)
|
||||
)
|
||||
|
||||
STRICT_MODE = False # Only here for backwards compatibility
|
||||
FULL_REFRESH = False # subcommand
|
||||
STORE_FAILURES = False # subcommand
|
||||
GREEDY = None # subcommand
|
||||
|
||||
# Global CLI commands
|
||||
USE_EXPERIMENTAL_PARSER = None
|
||||
STATIC_PARSER = None
|
||||
WARN_ERROR = None
|
||||
WRITE_JSON = None
|
||||
PARTIAL_PARSE = None
|
||||
USE_COLORS = None
|
||||
STORE_FAILURES = None
|
||||
DEBUG = None
|
||||
LOG_FORMAT = None
|
||||
VERSION_CHECK = None
|
||||
FAIL_FAST = None
|
||||
SEND_ANONYMOUS_USAGE_STATS = None
|
||||
PRINTER_WIDTH = 80
|
||||
WHICH = None
|
||||
|
||||
# Global CLI defaults. These flags are set from three places:
|
||||
# CLI args, environment variables, and user_config (profiles.yml).
|
||||
# Environment variables use the pattern 'DBT_{flag name}', like DBT_PROFILES_DIR
|
||||
flag_defaults = {
|
||||
"USE_EXPERIMENTAL_PARSER": False,
|
||||
"STATIC_PARSER": True,
|
||||
"WARN_ERROR": False,
|
||||
"WRITE_JSON": True,
|
||||
"PARTIAL_PARSE": True,
|
||||
"USE_COLORS": True,
|
||||
"PROFILES_DIR": DEFAULT_PROFILES_DIR,
|
||||
"DEBUG": False,
|
||||
"LOG_FORMAT": None,
|
||||
"VERSION_CHECK": True,
|
||||
"FAIL_FAST": False,
|
||||
"SEND_ANONYMOUS_USAGE_STATS": True,
|
||||
"PRINTER_WIDTH": 80
|
||||
}
|
||||
|
||||
|
||||
def env_set_truthy(key: str) -> Optional[str]:
|
||||
@@ -30,6 +64,12 @@ def env_set_truthy(key: str) -> Optional[str]:
|
||||
return value
|
||||
|
||||
|
||||
def env_set_bool(env_value):
|
||||
if env_value in ('1', 't', 'true', 'y', 'yes'):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def env_set_path(key: str) -> Optional[Path]:
|
||||
value = os.getenv(key)
|
||||
if value is None:
|
||||
@@ -38,8 +78,6 @@ def env_set_path(key: str) -> Optional[Path]:
|
||||
return Path(value)
|
||||
|
||||
|
||||
SINGLE_THREADED_WEBSERVER = env_set_truthy('DBT_SINGLE_THREADED_WEBSERVER')
|
||||
SINGLE_THREADED_HANDLER = env_set_truthy('DBT_SINGLE_THREADED_HANDLER')
|
||||
MACRO_DEBUGGING = env_set_truthy('DBT_MACRO_DEBUGGING')
|
||||
DEFER_MODE = env_set_truthy('DBT_DEFER_TO_STATE')
|
||||
ARTIFACT_STATE_PATH = env_set_path('DBT_ARTIFACT_STATE_PATH')
|
||||
@@ -50,56 +88,79 @@ def _get_context():
|
||||
return multiprocessing.get_context('spawn')
|
||||
|
||||
|
||||
# This is not a flag, it's a place to store the lock
|
||||
MP_CONTEXT = _get_context()
|
||||
|
||||
|
||||
def reset():
|
||||
global STRICT_MODE, FULL_REFRESH, USE_CACHE, WARN_ERROR, TEST_NEW_PARSER, \
|
||||
USE_EXPERIMENTAL_PARSER, WRITE_JSON, PARTIAL_PARSE, MP_CONTEXT, USE_COLORS, \
|
||||
STORE_FAILURES
|
||||
|
||||
STRICT_MODE = False
|
||||
FULL_REFRESH = False
|
||||
USE_CACHE = True
|
||||
WARN_ERROR = False
|
||||
TEST_NEW_PARSER = False
|
||||
USE_EXPERIMENTAL_PARSER = False
|
||||
WRITE_JSON = True
|
||||
PARTIAL_PARSE = False
|
||||
MP_CONTEXT = _get_context()
|
||||
USE_COLORS = True
|
||||
STORE_FAILURES = False
|
||||
|
||||
|
||||
def set_from_args(args):
|
||||
global STRICT_MODE, FULL_REFRESH, USE_CACHE, WARN_ERROR, TEST_NEW_PARSER, \
|
||||
USE_EXPERIMENTAL_PARSER, WRITE_JSON, PARTIAL_PARSE, MP_CONTEXT, USE_COLORS, \
|
||||
STORE_FAILURES
|
||||
|
||||
USE_CACHE = getattr(args, 'use_cache', USE_CACHE)
|
||||
def set_from_args(args, user_config):
|
||||
global STRICT_MODE, FULL_REFRESH, WARN_ERROR, \
|
||||
USE_EXPERIMENTAL_PARSER, STATIC_PARSER, WRITE_JSON, PARTIAL_PARSE, \
|
||||
USE_COLORS, STORE_FAILURES, PROFILES_DIR, DEBUG, LOG_FORMAT, GREEDY, \
|
||||
VERSION_CHECK, FAIL_FAST, SEND_ANONYMOUS_USAGE_STATS, PRINTER_WIDTH, \
|
||||
WHICH
|
||||
|
||||
STRICT_MODE = False # backwards compatibility
|
||||
# cli args without user_config or env var option
|
||||
FULL_REFRESH = getattr(args, 'full_refresh', FULL_REFRESH)
|
||||
STRICT_MODE = getattr(args, 'strict', STRICT_MODE)
|
||||
WARN_ERROR = (
|
||||
STRICT_MODE or
|
||||
getattr(args, 'warn_error', STRICT_MODE or WARN_ERROR)
|
||||
)
|
||||
|
||||
TEST_NEW_PARSER = getattr(args, 'test_new_parser', TEST_NEW_PARSER)
|
||||
USE_EXPERIMENTAL_PARSER = getattr(args, 'use_experimental_parser', USE_EXPERIMENTAL_PARSER)
|
||||
WRITE_JSON = getattr(args, 'write_json', WRITE_JSON)
|
||||
PARTIAL_PARSE = getattr(args, 'partial_parse', None)
|
||||
MP_CONTEXT = _get_context()
|
||||
|
||||
# The use_colors attribute will always have a value because it is assigned
|
||||
# None by default from the add_mutually_exclusive_group function
|
||||
use_colors_override = getattr(args, 'use_colors')
|
||||
|
||||
if use_colors_override is not None:
|
||||
USE_COLORS = use_colors_override
|
||||
|
||||
STORE_FAILURES = getattr(args, 'store_failures', STORE_FAILURES)
|
||||
GREEDY = getattr(args, 'greedy', GREEDY)
|
||||
WHICH = getattr(args, 'which', WHICH)
|
||||
|
||||
# global cli flags with env var and user_config alternatives
|
||||
USE_EXPERIMENTAL_PARSER = get_flag_value('USE_EXPERIMENTAL_PARSER', args, user_config)
|
||||
STATIC_PARSER = get_flag_value('STATIC_PARSER', args, user_config)
|
||||
WARN_ERROR = get_flag_value('WARN_ERROR', args, user_config)
|
||||
WRITE_JSON = get_flag_value('WRITE_JSON', args, user_config)
|
||||
PARTIAL_PARSE = get_flag_value('PARTIAL_PARSE', args, user_config)
|
||||
USE_COLORS = get_flag_value('USE_COLORS', args, user_config)
|
||||
PROFILES_DIR = get_flag_value('PROFILES_DIR', args, user_config)
|
||||
DEBUG = get_flag_value('DEBUG', args, user_config)
|
||||
LOG_FORMAT = get_flag_value('LOG_FORMAT', args, user_config)
|
||||
VERSION_CHECK = get_flag_value('VERSION_CHECK', args, user_config)
|
||||
FAIL_FAST = get_flag_value('FAIL_FAST', args, user_config)
|
||||
SEND_ANONYMOUS_USAGE_STATS = get_flag_value('SEND_ANONYMOUS_USAGE_STATS', args, user_config)
|
||||
PRINTER_WIDTH = get_flag_value('PRINTER_WIDTH', args, user_config)
|
||||
|
||||
|
||||
# initialize everything to the defaults on module load
|
||||
reset()
|
||||
def get_flag_value(flag, args, user_config):
|
||||
lc_flag = flag.lower()
|
||||
flag_value = getattr(args, lc_flag, None)
|
||||
if flag_value is None:
|
||||
# Environment variables use pattern 'DBT_{flag name}'
|
||||
env_flag = f"DBT_{flag}"
|
||||
env_value = os.getenv(env_flag)
|
||||
if env_value is not None and env_value != '':
|
||||
env_value = env_value.lower()
|
||||
# non Boolean values
|
||||
if flag in ['LOG_FORMAT', 'PRINTER_WIDTH', 'PROFILES_DIR']:
|
||||
flag_value = env_value
|
||||
else:
|
||||
flag_value = env_set_bool(env_value)
|
||||
elif user_config is not None and getattr(user_config, lc_flag, None) is not None:
|
||||
flag_value = getattr(user_config, lc_flag)
|
||||
else:
|
||||
flag_value = flag_defaults[flag]
|
||||
if flag == 'PRINTER_WIDTH': # printer_width must be an int or it hangs
|
||||
flag_value = int(flag_value)
|
||||
if flag == 'PROFILES_DIR':
|
||||
flag_value = os.path.abspath(flag_value)
|
||||
|
||||
return flag_value
|
||||
|
||||
|
||||
def get_flag_dict():
|
||||
return {
|
||||
"use_experimental_parser": USE_EXPERIMENTAL_PARSER,
|
||||
"static_parser": STATIC_PARSER,
|
||||
"warn_error": WARN_ERROR,
|
||||
"write_json": WRITE_JSON,
|
||||
"partial_parse": PARTIAL_PARSE,
|
||||
"use_colors": USE_COLORS,
|
||||
"profiles_dir": PROFILES_DIR,
|
||||
"debug": DEBUG,
|
||||
"log_format": LOG_FORMAT,
|
||||
"version_check": VERSION_CHECK,
|
||||
"fail_fast": FAIL_FAST,
|
||||
"send_anonymous_usage_stats": SEND_ANONYMOUS_USAGE_STATS,
|
||||
"printer_width": PRINTER_WIDTH,
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ from .selector import ( # noqa: F401
|
||||
)
|
||||
from .cli import ( # noqa: F401
|
||||
parse_difference,
|
||||
parse_test_selectors,
|
||||
parse_from_selectors_definition,
|
||||
)
|
||||
from .queue import GraphQueue # noqa: F401
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# special support for CLI argument parsing.
|
||||
from dbt import flags
|
||||
import itertools
|
||||
from dbt.clients.yaml_helper import yaml, Loader, Dumper # noqa: F401
|
||||
|
||||
@@ -21,8 +22,6 @@ INTERSECTION_DELIMITER = ','
|
||||
|
||||
DEFAULT_INCLUDES: List[str] = ['fqn:*', 'source:*', 'exposure:*']
|
||||
DEFAULT_EXCLUDES: List[str] = []
|
||||
DATA_TEST_SELECTOR: str = 'test_type:data'
|
||||
SCHEMA_TEST_SELECTOR: str = 'test_type:schema'
|
||||
|
||||
|
||||
def parse_union(
|
||||
@@ -66,42 +65,11 @@ def parse_union_from_default(
|
||||
def parse_difference(
|
||||
include: Optional[List[str]], exclude: Optional[List[str]]
|
||||
) -> SelectionDifference:
|
||||
included = parse_union_from_default(include, DEFAULT_INCLUDES)
|
||||
included = parse_union_from_default(include, DEFAULT_INCLUDES, greedy=bool(flags.GREEDY))
|
||||
excluded = parse_union_from_default(exclude, DEFAULT_EXCLUDES, greedy=True)
|
||||
return SelectionDifference(components=[included, excluded])
|
||||
|
||||
|
||||
def parse_test_selectors(
|
||||
data: bool, schema: bool, base: SelectionSpec
|
||||
) -> SelectionSpec:
|
||||
union_components = []
|
||||
|
||||
if data:
|
||||
union_components.append(
|
||||
SelectionCriteria.from_single_spec(DATA_TEST_SELECTOR)
|
||||
)
|
||||
if schema:
|
||||
union_components.append(
|
||||
SelectionCriteria.from_single_spec(SCHEMA_TEST_SELECTOR)
|
||||
)
|
||||
|
||||
intersect_with: SelectionSpec
|
||||
if not union_components:
|
||||
return base
|
||||
elif len(union_components) == 1:
|
||||
intersect_with = union_components[0]
|
||||
else: # data and schema tests
|
||||
intersect_with = SelectionUnion(
|
||||
components=union_components,
|
||||
expect_exists=True,
|
||||
raw=[DATA_TEST_SELECTOR, SCHEMA_TEST_SELECTOR],
|
||||
)
|
||||
|
||||
return SelectionIntersection(
|
||||
components=[base, intersect_with], expect_exists=True
|
||||
)
|
||||
|
||||
|
||||
RawDefinition = Union[str, Dict[str, Any]]
|
||||
|
||||
|
||||
@@ -180,7 +148,7 @@ def parse_union_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
union_def_parts = _get_list_dicts(definition, 'union')
|
||||
include, exclude = _parse_include_exclude_subdefs(union_def_parts)
|
||||
|
||||
union = SelectionUnion(components=include)
|
||||
union = SelectionUnion(components=include, greedy_warning=False)
|
||||
|
||||
if exclude is None:
|
||||
union.raw = definition
|
||||
@@ -188,7 +156,8 @@ def parse_union_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
else:
|
||||
return SelectionDifference(
|
||||
components=[union, exclude],
|
||||
raw=definition
|
||||
raw=definition,
|
||||
greedy_warning=False
|
||||
)
|
||||
|
||||
|
||||
@@ -197,7 +166,7 @@ def parse_intersection_definition(
|
||||
) -> SelectionSpec:
|
||||
intersection_def_parts = _get_list_dicts(definition, 'intersection')
|
||||
include, exclude = _parse_include_exclude_subdefs(intersection_def_parts)
|
||||
intersection = SelectionIntersection(components=include)
|
||||
intersection = SelectionIntersection(components=include, greedy_warning=False)
|
||||
|
||||
if exclude is None:
|
||||
intersection.raw = definition
|
||||
@@ -205,7 +174,8 @@ def parse_intersection_definition(
|
||||
else:
|
||||
return SelectionDifference(
|
||||
components=[intersection, exclude],
|
||||
raw=definition
|
||||
raw=definition,
|
||||
greedy_warning=False
|
||||
)
|
||||
|
||||
|
||||
@@ -239,7 +209,7 @@ def parse_dict_definition(definition: Dict[str, Any]) -> SelectionSpec:
|
||||
if diff_arg is None:
|
||||
return base
|
||||
else:
|
||||
return SelectionDifference(components=[base, diff_arg])
|
||||
return SelectionDifference(components=[base, diff_arg], greedy_warning=False)
|
||||
|
||||
|
||||
def parse_from_definition(
|
||||
@@ -271,10 +241,12 @@ def parse_from_definition(
|
||||
|
||||
def parse_from_selectors_definition(
|
||||
source: SelectorFile
|
||||
) -> Dict[str, SelectionSpec]:
|
||||
result: Dict[str, SelectionSpec] = {}
|
||||
) -> Dict[str, Dict[str, Union[SelectionSpec, bool]]]:
|
||||
result: Dict[str, Dict[str, Union[SelectionSpec, bool]]] = {}
|
||||
selector: SelectorDefinition
|
||||
for selector in source.selectors:
|
||||
result[selector.name] = parse_from_definition(selector.definition,
|
||||
rootlevel=True)
|
||||
result[selector.name] = {
|
||||
"default": selector.default,
|
||||
"definition": parse_from_definition(selector.definition, rootlevel=True)
|
||||
}
|
||||
return result
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import threading
|
||||
from queue import PriorityQueue
|
||||
from typing import (
|
||||
Dict, Set, Optional
|
||||
)
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
import threading
|
||||
|
||||
from queue import PriorityQueue
|
||||
from typing import Dict, Set, List, Generator, Optional
|
||||
|
||||
from .graph import UniqueId
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedExposure
|
||||
@@ -21,9 +19,8 @@ class GraphQueue:
|
||||
that separate threads do not call `.empty()` or `__len__()` and `.get()` at
|
||||
the same time, as there is an unlocked race!
|
||||
"""
|
||||
def __init__(
|
||||
self, graph: nx.DiGraph, manifest: Manifest, selected: Set[UniqueId]
|
||||
):
|
||||
|
||||
def __init__(self, graph: nx.DiGraph, manifest: Manifest, selected: Set[UniqueId]):
|
||||
self.graph = graph
|
||||
self.manifest = manifest
|
||||
self._selected = selected
|
||||
@@ -37,7 +34,7 @@ class GraphQueue:
|
||||
# this lock controls most things
|
||||
self.lock = threading.Lock()
|
||||
# store the 'score' of each node as a number. Lower is higher priority.
|
||||
self._scores = self._calculate_scores()
|
||||
self._scores = self._get_scores(self.graph)
|
||||
# populate the initial queue
|
||||
self._find_new_additions()
|
||||
# awaits after task end
|
||||
@@ -56,30 +53,59 @@ class GraphQueue:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _calculate_scores(self) -> Dict[UniqueId, int]:
|
||||
"""Calculate the 'value' of each node in the graph based on how many
|
||||
blocking descendants it has. We use this score for the internal
|
||||
priority queue's ordering, so the quality of this metric is important.
|
||||
@staticmethod
|
||||
def _grouped_topological_sort(
|
||||
graph: nx.DiGraph,
|
||||
) -> Generator[List[str], None, None]:
|
||||
"""Topological sort of given graph that groups ties.
|
||||
|
||||
The score is stored as a negative number because the internal
|
||||
PriorityQueue picks lowest values first.
|
||||
Adapted from `nx.topological_sort`, this function returns a topo sort of a graph however
|
||||
instead of arbitrarily ordering ties in the sort order, ties are grouped together in
|
||||
lists.
|
||||
|
||||
We could do this in one pass over the graph instead of len(self.graph)
|
||||
passes but this is easy. For large graphs this may hurt performance.
|
||||
Args:
|
||||
graph: The graph to be sorted.
|
||||
|
||||
This operates on the graph, so it would require a lock if called from
|
||||
outside __init__.
|
||||
|
||||
:return Dict[str, int]: The score dict, mapping unique IDs to integer
|
||||
scores. Lower scores are higher priority.
|
||||
Returns:
|
||||
A generator that yields lists of nodes, one list per graph depth level.
|
||||
"""
|
||||
indegree_map = {v: d for v, d in graph.in_degree() if d > 0}
|
||||
zero_indegree = [v for v, d in graph.in_degree() if d == 0]
|
||||
|
||||
while zero_indegree:
|
||||
yield zero_indegree
|
||||
new_zero_indegree = []
|
||||
for v in zero_indegree:
|
||||
for _, child in graph.edges(v):
|
||||
indegree_map[child] -= 1
|
||||
if not indegree_map[child]:
|
||||
new_zero_indegree.append(child)
|
||||
zero_indegree = new_zero_indegree
|
||||
|
||||
def _get_scores(self, graph: nx.DiGraph) -> Dict[str, int]:
|
||||
"""Scoring nodes for processing order.
|
||||
|
||||
Scores are calculated by the graph depth level. Lowest score (0) should be processed first.
|
||||
|
||||
Args:
|
||||
graph: The graph to be scored.
|
||||
|
||||
Returns:
|
||||
A dictionary consisting of `node name`:`score` pairs.
|
||||
"""
|
||||
# split graph by connected subgraphs
|
||||
subgraphs = (
|
||||
graph.subgraph(x) for x in nx.connected_components(nx.Graph(graph))
|
||||
)
|
||||
|
||||
# score all nodes in all subgraphs
|
||||
scores = {}
|
||||
for node in self.graph.nodes():
|
||||
score = -1 * len([
|
||||
d for d in nx.descendants(self.graph, node)
|
||||
if self._include_in_cost(d)
|
||||
])
|
||||
scores[node] = score
|
||||
for subgraph in subgraphs:
|
||||
grouped_nodes = self._grouped_topological_sort(subgraph)
|
||||
for level, group in enumerate(grouped_nodes):
|
||||
for node in group:
|
||||
scores[node] = level
|
||||
|
||||
return scores
|
||||
|
||||
def get(
|
||||
@@ -133,8 +159,6 @@ class GraphQueue:
|
||||
def _find_new_additions(self) -> None:
|
||||
"""Find any nodes in the graph that need to be added to the internal
|
||||
queue and add them.
|
||||
|
||||
Callers must hold the lock.
|
||||
"""
|
||||
for node, in_degree in self.graph.in_degree():
|
||||
if not self._already_known(node) and in_degree == 0:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
from typing import Set, List, Optional, Tuple
|
||||
|
||||
from .graph import Graph, UniqueId
|
||||
@@ -30,6 +29,24 @@ def alert_non_existence(raw_spec, nodes):
|
||||
)
|
||||
|
||||
|
||||
def alert_unused_nodes(raw_spec, node_names):
|
||||
summary_nodes_str = ("\n - ").join(node_names[:3])
|
||||
debug_nodes_str = ("\n - ").join(node_names)
|
||||
and_more_str = f"\n - and {len(node_names) - 3} more" if len(node_names) > 4 else ""
|
||||
summary_msg = (
|
||||
f"\nSome tests were excluded because at least one parent is not selected. "
|
||||
f"Use the --greedy flag to include them."
|
||||
f"\n - {summary_nodes_str}{and_more_str}"
|
||||
)
|
||||
logger.info(summary_msg)
|
||||
if len(node_names) > 4:
|
||||
debug_msg = (
|
||||
f"Full list of tests that were excluded:"
|
||||
f"\n - {debug_nodes_str}"
|
||||
)
|
||||
logger.debug(debug_msg)
|
||||
|
||||
|
||||
def can_select_indirectly(node):
|
||||
"""If a node is not selected itself, but its parent(s) are, it may qualify
|
||||
for indirect selection.
|
||||
@@ -151,16 +168,16 @@ class NodeSelector(MethodManager):
|
||||
|
||||
return direct_nodes, indirect_nodes
|
||||
|
||||
def select_nodes(self, spec: SelectionSpec) -> Set[UniqueId]:
|
||||
def select_nodes(self, spec: SelectionSpec) -> Tuple[Set[UniqueId], Set[UniqueId]]:
|
||||
"""Select the nodes in the graph according to the spec.
|
||||
|
||||
This is the main point of entry for turning a spec into a set of nodes:
|
||||
- Recurse through spec, select by criteria, combine by set operation
|
||||
- Return final (unfiltered) selection set
|
||||
"""
|
||||
|
||||
direct_nodes, indirect_nodes = self.select_nodes_recursively(spec)
|
||||
return direct_nodes
|
||||
indirect_only = indirect_nodes.difference(direct_nodes)
|
||||
return direct_nodes, indirect_only
|
||||
|
||||
def _is_graph_member(self, unique_id: UniqueId) -> bool:
|
||||
if unique_id in self.manifest.sources:
|
||||
@@ -213,6 +230,8 @@ class NodeSelector(MethodManager):
|
||||
# - If ANY parent is missing, return it separately. We'll keep it around
|
||||
# for later and see if its other parents show up.
|
||||
# We use this for INCLUSION.
|
||||
# Users can also opt in to inclusive GREEDY mode by passing --greedy flag,
|
||||
# or by specifying `greedy: true` in a yaml selector
|
||||
|
||||
direct_nodes = set(selected)
|
||||
indirect_nodes = set()
|
||||
@@ -251,15 +270,24 @@ class NodeSelector(MethodManager):
|
||||
|
||||
- node selection. Based on the include/exclude sets, the set
|
||||
of matched unique IDs is returned
|
||||
- expand the graph at each leaf node, before combination
|
||||
- selectors might override this. for example, this is where
|
||||
tests are added
|
||||
- includes direct + indirect selection (for tests)
|
||||
- filtering:
|
||||
- selectors can filter the nodes after all of them have been
|
||||
selected
|
||||
"""
|
||||
selected_nodes = self.select_nodes(spec)
|
||||
selected_nodes, indirect_only = self.select_nodes(spec)
|
||||
filtered_nodes = self.filter_selection(selected_nodes)
|
||||
|
||||
if indirect_only:
|
||||
filtered_unused_nodes = self.filter_selection(indirect_only)
|
||||
if filtered_unused_nodes and spec.greedy_warning:
|
||||
# log anything that didn't make the cut
|
||||
unused_node_names = []
|
||||
for unique_id in filtered_unused_nodes:
|
||||
name = self.manifest.nodes[unique_id].name
|
||||
unused_node_names.append(name)
|
||||
alert_unused_nodes(spec, unused_node_names)
|
||||
|
||||
return filtered_nodes
|
||||
|
||||
def get_graph_queue(self, spec: SelectionSpec) -> GraphQueue:
|
||||
|
||||
@@ -8,27 +8,25 @@ from dbt.dataclass_schema import StrEnum
|
||||
from .graph import UniqueId
|
||||
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledDataTestNode,
|
||||
CompiledSchemaTestNode,
|
||||
CompiledSingularTestNode,
|
||||
CompiledGenericTestNode,
|
||||
CompileResultNode,
|
||||
ManifestNode,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest, WritableManifest
|
||||
from dbt.contracts.graph.parsed import (
|
||||
HasTestMetadata,
|
||||
ParsedDataTestNode,
|
||||
ParsedSingularTestNode,
|
||||
ParsedExposure,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
from dbt.contracts.state import PreviousState
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
|
||||
SELECTOR_GLOB = '*'
|
||||
@@ -47,6 +45,7 @@ class MethodName(StrEnum):
|
||||
ResourceType = 'resource_type'
|
||||
State = 'state'
|
||||
Exposure = 'exposure'
|
||||
Result = 'result'
|
||||
|
||||
|
||||
def is_selected_node(fqn: List[str], node_selector: str):
|
||||
@@ -363,14 +362,15 @@ class TestTypeSelectorMethod(SelectorMethod):
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
search_types: Tuple[Type, ...]
|
||||
if selector == 'schema':
|
||||
search_types = (ParsedSchemaTestNode, CompiledSchemaTestNode)
|
||||
elif selector == 'data':
|
||||
search_types = (ParsedDataTestNode, CompiledDataTestNode)
|
||||
# continue supporting 'schema' + 'data' for backwards compatibility
|
||||
if selector in ('generic', 'schema'):
|
||||
search_types = (ParsedGenericTestNode, CompiledGenericTestNode)
|
||||
elif selector in ('singular', 'data'):
|
||||
search_types = (ParsedSingularTestNode, CompiledSingularTestNode)
|
||||
else:
|
||||
raise RuntimeException(
|
||||
f'Invalid test type selector {selector}: expected "data" or '
|
||||
'"schema"'
|
||||
f'Invalid test type selector {selector}: expected "generic" or '
|
||||
'"singular"'
|
||||
)
|
||||
|
||||
for node, real_node in self.parsed_nodes(included_nodes):
|
||||
@@ -381,7 +381,7 @@ class TestTypeSelectorMethod(SelectorMethod):
|
||||
class StateSelectorMethod(SelectorMethod):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.macros_were_modified: Optional[List[str]] = None
|
||||
self.modified_macros: Optional[List[str]] = None
|
||||
|
||||
def _macros_modified(self) -> List[str]:
|
||||
# we checked in the caller!
|
||||
@@ -394,44 +394,85 @@ class StateSelectorMethod(SelectorMethod):
|
||||
|
||||
modified = []
|
||||
for uid, macro in new_macros.items():
|
||||
name = f'{macro.package_name}.{macro.name}'
|
||||
if uid in old_macros:
|
||||
old_macro = old_macros[uid]
|
||||
if macro.macro_sql != old_macro.macro_sql:
|
||||
modified.append(f'{name} changed')
|
||||
modified.append(uid)
|
||||
else:
|
||||
modified.append(f'{name} added')
|
||||
modified.append(uid)
|
||||
|
||||
for uid, macro in old_macros.items():
|
||||
if uid not in new_macros:
|
||||
modified.append(f'{macro.package_name}.{macro.name} removed')
|
||||
modified.append(uid)
|
||||
|
||||
return modified[:3]
|
||||
return modified
|
||||
|
||||
def check_modified(
|
||||
self,
|
||||
old: Optional[SelectorTarget],
|
||||
new: SelectorTarget,
|
||||
def recursively_check_macros_modified(self, node, previous_macros):
|
||||
# loop through all macros that this node depends on
|
||||
for macro_uid in node.depends_on.macros:
|
||||
# avoid infinite recursion if we've already seen this macro
|
||||
if macro_uid in previous_macros:
|
||||
continue
|
||||
previous_macros.append(macro_uid)
|
||||
# is this macro one of the modified macros?
|
||||
if macro_uid in self.modified_macros:
|
||||
return True
|
||||
# if not, and this macro depends on other macros, keep looping
|
||||
macro_node = self.manifest.macros[macro_uid]
|
||||
if len(macro_node.depends_on.macros) > 0:
|
||||
return self.recursively_check_macros_modified(macro_node, previous_macros)
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_macros_modified(self, node):
|
||||
# check if there are any changes in macros the first time
|
||||
if self.modified_macros is None:
|
||||
self.modified_macros = self._macros_modified()
|
||||
# no macros have been modified, skip looping entirely
|
||||
if not self.modified_macros:
|
||||
return False
|
||||
# recursively loop through upstream macros to see if any is modified
|
||||
else:
|
||||
previous_macros = []
|
||||
return self.recursively_check_macros_modified(node, previous_macros)
|
||||
|
||||
def check_modified(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
different_contents = not new.same_contents(old) # type: ignore
|
||||
upstream_macro_change = self.check_macros_modified(new)
|
||||
return different_contents or upstream_macro_change
|
||||
|
||||
def check_modified_body(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
if hasattr(new, "same_body"):
|
||||
return not new.same_body(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_modified_configs(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
if hasattr(new, "same_config"):
|
||||
return not new.same_config(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_modified_persisted_descriptions(
|
||||
self, old: Optional[SelectorTarget], new: SelectorTarget
|
||||
) -> bool:
|
||||
# check if there are any changes in macros, if so, log a warning the
|
||||
# first time
|
||||
if self.macros_were_modified is None:
|
||||
self.macros_were_modified = self._macros_modified()
|
||||
if self.macros_were_modified:
|
||||
log_str = ', '.join(self.macros_were_modified)
|
||||
logger.warning(warning_tag(
|
||||
f'During a state comparison, dbt detected a change in '
|
||||
f'macros. This will not be marked as a modification. Some '
|
||||
f'macros: {log_str}'
|
||||
))
|
||||
if hasattr(new, "same_persisted_description"):
|
||||
return not new.same_persisted_description(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
return not new.same_contents(old) # type: ignore
|
||||
|
||||
def check_new(
|
||||
self,
|
||||
old: Optional[SelectorTarget],
|
||||
new: SelectorTarget,
|
||||
def check_modified_relation(
|
||||
self, old: Optional[SelectorTarget], new: SelectorTarget
|
||||
) -> bool:
|
||||
if hasattr(new, "same_database_representation"):
|
||||
return not new.same_database_representation(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_modified_macros(self, _, new: SelectorTarget) -> bool:
|
||||
return self.check_macros_modified(new)
|
||||
|
||||
def check_new(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
return old is None
|
||||
|
||||
def search(
|
||||
@@ -443,8 +484,15 @@ class StateSelectorMethod(SelectorMethod):
|
||||
)
|
||||
|
||||
state_checks = {
|
||||
# it's new if there is no old version
|
||||
'new': lambda old, _: old is None,
|
||||
# use methods defined above to compare properties of old + new
|
||||
'modified': self.check_modified,
|
||||
'new': self.check_new,
|
||||
'modified.body': self.check_modified_body,
|
||||
'modified.configs': self.check_modified_configs,
|
||||
'modified.persisted_descriptions': self.check_modified_persisted_descriptions,
|
||||
'modified.relation': self.check_modified_relation,
|
||||
'modified.macros': self.check_modified_macros,
|
||||
}
|
||||
if selector in state_checks:
|
||||
checker = state_checks[selector]
|
||||
@@ -469,6 +517,23 @@ class StateSelectorMethod(SelectorMethod):
|
||||
yield node
|
||||
|
||||
|
||||
class ResultSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
if self.previous_state is None or self.previous_state.results is None:
|
||||
raise InternalException(
|
||||
'No comparison run_results'
|
||||
)
|
||||
matches = set(
|
||||
result.unique_id for result in self.previous_state.results
|
||||
if result.status == selector
|
||||
)
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if node in matches:
|
||||
yield node
|
||||
|
||||
|
||||
class MethodManager:
|
||||
SELECTOR_METHODS: Dict[MethodName, Type[SelectorMethod]] = {
|
||||
MethodName.FQN: QualifiedNameSelectorMethod,
|
||||
@@ -481,6 +546,7 @@ class MethodManager:
|
||||
MethodName.TestType: TestTypeSelectorMethod,
|
||||
MethodName.State: StateSelectorMethod,
|
||||
MethodName.Exposure: ExposureSelectorMethod,
|
||||
MethodName.Result: ResultSelectorMethod,
|
||||
}
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -67,6 +67,7 @@ class SelectionCriteria:
|
||||
children: bool
|
||||
children_depth: Optional[int]
|
||||
greedy: bool = False
|
||||
greedy_warning: bool = False # do not raise warning for yaml selectors
|
||||
|
||||
def __post_init__(self):
|
||||
if self.children and self.childrens_parents:
|
||||
@@ -124,11 +125,11 @@ class SelectionCriteria:
|
||||
parents_depth=parents_depth,
|
||||
children=bool(dct.get('children')),
|
||||
children_depth=children_depth,
|
||||
greedy=greedy
|
||||
greedy=(greedy or bool(dct.get('greedy'))),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dict_from_single_spec(cls, raw: str, greedy: bool = False):
|
||||
def dict_from_single_spec(cls, raw: str):
|
||||
result = RAW_SELECTOR_PATTERN.match(raw)
|
||||
if result is None:
|
||||
return {'error': 'Invalid selector spec'}
|
||||
@@ -145,6 +146,8 @@ class SelectionCriteria:
|
||||
dct['parents'] = bool(dct.get('parents'))
|
||||
if 'children' in dct:
|
||||
dct['children'] = bool(dct.get('children'))
|
||||
if 'greedy' in dct:
|
||||
dct['greedy'] = bool(dct.get('greedy'))
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
@@ -162,10 +165,12 @@ class BaseSelectionGroup(Iterable[SelectionSpec], metaclass=ABCMeta):
|
||||
self,
|
||||
components: Iterable[SelectionSpec],
|
||||
expect_exists: bool = False,
|
||||
greedy_warning: bool = True,
|
||||
raw: Any = None,
|
||||
):
|
||||
self.components: List[SelectionSpec] = list(components)
|
||||
self.expect_exists = expect_exists
|
||||
self.greedy_warning = greedy_warning
|
||||
self.raw = raw
|
||||
|
||||
def __iter__(self) -> Iterator[SelectionSpec]:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{% macro get_columns_in_query(select_sql) -%}
|
||||
{{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}
|
||||
{{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_columns_in_query(select_sql) %}
|
||||
@@ -15,7 +15,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_schema(relation) -%}
|
||||
{{ adapter.dispatch('create_schema')(relation) }}
|
||||
{{ adapter.dispatch('create_schema', 'dbt')(relation) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__create_schema(relation) -%}
|
||||
@@ -25,7 +25,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro drop_schema(relation) -%}
|
||||
{{ adapter.dispatch('drop_schema')(relation) }}
|
||||
{{ adapter.dispatch('drop_schema', 'dbt')(relation) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__drop_schema(relation) -%}
|
||||
@@ -35,7 +35,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_table_as(temporary, relation, sql) -%}
|
||||
{{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}
|
||||
{{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__create_table_as(temporary, relation, sql) -%}
|
||||
@@ -52,7 +52,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_create_index_sql(relation, index_dict) -%}
|
||||
{{ return(adapter.dispatch('get_create_index_sql')(relation, index_dict)) }}
|
||||
{{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_create_index_sql(relation, index_dict) -%}
|
||||
@@ -60,7 +60,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_indexes(relation) -%}
|
||||
{{ adapter.dispatch('create_indexes')(relation) }}
|
||||
{{ adapter.dispatch('create_indexes', 'dbt')(relation) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__create_indexes(relation) -%}
|
||||
@@ -75,7 +75,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro create_view_as(relation, sql) -%}
|
||||
{{ adapter.dispatch('create_view_as')(relation, sql) }}
|
||||
{{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__create_view_as(relation, sql) -%}
|
||||
@@ -89,7 +89,7 @@
|
||||
|
||||
|
||||
{% macro get_catalog(information_schema, schemas) -%}
|
||||
{{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}
|
||||
{{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_catalog(information_schema, schemas) -%}
|
||||
@@ -104,7 +104,7 @@
|
||||
|
||||
|
||||
{% macro get_columns_in_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}
|
||||
{{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro sql_convert_columns_in_relation(table) -%}
|
||||
@@ -121,13 +121,13 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alter_column_type(relation, column_name, new_column_type) -%}
|
||||
{{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}
|
||||
{{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
{% macro alter_column_comment(relation, column_dict) -%}
|
||||
{{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}
|
||||
{{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__alter_column_comment(relation, column_dict) -%}
|
||||
@@ -136,7 +136,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro alter_relation_comment(relation, relation_comment) -%}
|
||||
{{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}
|
||||
{{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__alter_relation_comment(relation, relation_comment) -%}
|
||||
@@ -145,7 +145,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}
|
||||
{{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}
|
||||
{{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}
|
||||
@@ -180,7 +180,7 @@
|
||||
|
||||
|
||||
{% macro drop_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('drop_relation')(relation)) }}
|
||||
{{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
@@ -191,7 +191,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro truncate_relation(relation) -%}
|
||||
{{ return(adapter.dispatch('truncate_relation')(relation)) }}
|
||||
{{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
@@ -202,7 +202,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro rename_relation(from_relation, to_relation) -%}
|
||||
{{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}
|
||||
{{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__rename_relation(from_relation, to_relation) -%}
|
||||
@@ -214,7 +214,7 @@
|
||||
|
||||
|
||||
{% macro information_schema_name(database) %}
|
||||
{{ return(adapter.dispatch('information_schema_name')(database)) }}
|
||||
{{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__information_schema_name(database) -%}
|
||||
@@ -227,7 +227,7 @@
|
||||
|
||||
|
||||
{% macro list_schemas(database) -%}
|
||||
{{ return(adapter.dispatch('list_schemas')(database)) }}
|
||||
{{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__list_schemas(database) -%}
|
||||
@@ -241,7 +241,7 @@
|
||||
|
||||
|
||||
{% macro check_schema_exists(information_schema, schema) -%}
|
||||
{{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}
|
||||
{{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__check_schema_exists(information_schema, schema) -%}
|
||||
@@ -256,7 +256,7 @@
|
||||
|
||||
|
||||
{% macro list_relations_without_caching(schema_relation) %}
|
||||
{{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}
|
||||
{{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
@@ -267,7 +267,7 @@
|
||||
|
||||
|
||||
{% macro current_timestamp() -%}
|
||||
{{ adapter.dispatch('current_timestamp')() }}
|
||||
{{ adapter.dispatch('current_timestamp', 'dbt')() }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
@@ -278,7 +278,7 @@
|
||||
|
||||
|
||||
{% macro collect_freshness(source, loaded_at_field, filter) %}
|
||||
{{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}
|
||||
{{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
@@ -296,7 +296,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}
|
||||
{{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}
|
||||
{{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__make_temp_relation(base_relation, suffix) %}
|
||||
@@ -311,3 +311,34 @@
|
||||
{{ config.set('sql_header', caller()) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}
|
||||
{{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}
|
||||
|
||||
{% if add_columns is none %}
|
||||
{% set add_columns = [] %}
|
||||
{% endif %}
|
||||
{% if remove_columns is none %}
|
||||
{% set remove_columns = [] %}
|
||||
{% endif %}
|
||||
|
||||
{% set sql -%}
|
||||
|
||||
alter {{ relation.type }} {{ relation }}
|
||||
|
||||
{% for column in add_columns %}
|
||||
add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}
|
||||
{% endfor %}{{ ',' if remove_columns | length > 0 }}
|
||||
|
||||
{% for column in remove_columns %}
|
||||
drop column {{ column.name }}{{ ',' if not loop.last }}
|
||||
{% endfor %}
|
||||
|
||||
{%- endset -%}
|
||||
|
||||
{% do run_query(sql) %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@@ -13,6 +13,10 @@
|
||||
|
||||
#}
|
||||
{% macro generate_alias_name(custom_alias_name=none, node=none) -%}
|
||||
{% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}
|
||||
|
||||
{%- if custom_alias_name is none -%}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
#}
|
||||
{% macro generate_database_name(custom_database_name=none, node=none) -%}
|
||||
{% do return(adapter.dispatch('generate_database_name')(custom_database_name, node)) %}
|
||||
{% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__generate_database_name(custom_database_name=none, node=none) -%}
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
|
||||
#}
|
||||
{% macro generate_schema_name(custom_schema_name, node) -%}
|
||||
{{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__generate_schema_name(custom_schema_name, node) -%}
|
||||
|
||||
{%- set default_schema = target.schema -%}
|
||||
{%- if custom_schema_name is none -%}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
{% macro get_where_subquery(relation) -%}
|
||||
{% do return(adapter.dispatch('get_where_subquery')(relation)) %}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_where_subquery(relation) -%}
|
||||
{% set where = config.get('where', '') %}
|
||||
{% if where %}
|
||||
{%- set filtered -%}
|
||||
(select * from {{ relation }} where {{ where }}) dbt_subquery
|
||||
{%- endset -%}
|
||||
{% do return(filtered) %}
|
||||
{%- else -%}
|
||||
{% do return(relation) %}
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
@@ -7,7 +7,7 @@ with all_values as (
|
||||
count(*) as n_records
|
||||
|
||||
from {{ model }}
|
||||
group by 1
|
||||
group by {{ column_name }}
|
||||
|
||||
)
|
||||
|
||||
@@ -28,6 +28,6 @@ where value_field not in (
|
||||
|
||||
|
||||
{% test accepted_values(model, column_name, values, quote=True) %}
|
||||
{% set macro = adapter.dispatch('test_accepted_values') %}
|
||||
{% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}
|
||||
{{ macro(model, column_name, values, quote) }}
|
||||
{% endtest %}
|
||||
@@ -8,6 +8,6 @@ where {{ column_name }} is null
|
||||
|
||||
|
||||
{% test not_null(model, column_name) %}
|
||||
{% set macro = adapter.dispatch('test_not_null') %}
|
||||
{% set macro = adapter.dispatch('test_not_null', 'dbt') %}
|
||||
{{ macro(model, column_name) }}
|
||||
{% endtest %}
|
||||
@@ -0,0 +1,30 @@
|
||||
|
||||
{% macro default__test_relationships(model, column_name, to, field) %}
|
||||
|
||||
with child as (
|
||||
select {{ column_name }} as from_field
|
||||
from {{ model }}
|
||||
where {{ column_name }} is not null
|
||||
),
|
||||
|
||||
parent as (
|
||||
select {{ field }} as to_field
|
||||
from {{ to }}
|
||||
)
|
||||
|
||||
select
|
||||
from_field
|
||||
|
||||
from child
|
||||
left join parent
|
||||
on child.from_field = parent.to_field
|
||||
|
||||
where parent.to_field is null
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% test relationships(model, column_name, to, field) %}
|
||||
{% set macro = adapter.dispatch('test_relationships', 'dbt') %}
|
||||
{{ macro(model, column_name, to, field) }}
|
||||
{% endtest %}
|
||||
@@ -0,0 +1,18 @@
|
||||
{% macro default__test_unique(model, column_name) %}
|
||||
|
||||
select
|
||||
{{ column_name }} as unique_field,
|
||||
count(*) as n_records
|
||||
|
||||
from {{ model }}
|
||||
where {{ column_name }} is not null
|
||||
group by {{ column_name }}
|
||||
having count(*) > 1
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% test unique(model, column_name) %}
|
||||
{% set macro = adapter.dispatch('test_unique', 'dbt') %}
|
||||
{{ macro(model, column_name) }}
|
||||
{% endtest %}
|
||||
@@ -1,17 +1,17 @@
|
||||
|
||||
|
||||
{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}
|
||||
{{ adapter.dispatch('get_merge_sql')(target, source, unique_key, dest_columns, predicates) }}
|
||||
{{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}
|
||||
{{ adapter.dispatch('get_delete_insert_merge_sql')(target, source, unique_key, dest_columns) }}
|
||||
{{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}
|
||||
{{ adapter.dispatch('get_insert_overwrite_merge_sql')(target, source, dest_columns, predicates, include_sql_header) }}
|
||||
{{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@
|
||||
(
|
||||
select {{ dest_cols_csv }}
|
||||
from {{ source }}
|
||||
);
|
||||
)
|
||||
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
|
||||
{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name="main") %}
|
||||
|
||||
{%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}
|
||||
{%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}
|
||||
|
||||
|
||||
@@ -5,6 +5,26 @@
|
||||
|
||||
{% set target_relation = this.incorporate(type='table') %}
|
||||
{% set existing_relation = load_relation(this) %}
|
||||
{% set tmp_relation = make_temp_relation(target_relation) %}
|
||||
{%- set full_refresh_mode = (should_full_refresh()) -%}
|
||||
|
||||
{% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}
|
||||
|
||||
{% set tmp_identifier = model['name'] + '__dbt_tmp' %}
|
||||
{% set backup_identifier = model['name'] + "__dbt_backup" %}
|
||||
|
||||
-- the intermediate_ and backup_ relations should not already exist in the database; get_relation
|
||||
-- will return None in that case. Otherwise, we get a relation that we can drop
|
||||
-- later, before we try to use this name for the current operation. This has to happen before
|
||||
-- BEGIN, in a separate transaction
|
||||
{% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,
|
||||
schema=schema,
|
||||
database=database) %}
|
||||
{% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,
|
||||
schema=schema,
|
||||
database=database) %}
|
||||
{{ drop_relation_if_exists(preexisting_intermediate_relation) }}
|
||||
{{ drop_relation_if_exists(preexisting_backup_relation) }}
|
||||
|
||||
{{ run_hooks(pre_hooks, inside_transaction=False) }}
|
||||
|
||||
@@ -12,29 +32,30 @@
|
||||
{{ run_hooks(pre_hooks, inside_transaction=True) }}
|
||||
|
||||
{% set to_drop = [] %}
|
||||
|
||||
{# -- first check whether we want to full refresh for source view or config reasons #}
|
||||
{% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}
|
||||
|
||||
{% if existing_relation is none %}
|
||||
{% set build_sql = create_table_as(False, target_relation, sql) %}
|
||||
{% elif existing_relation.is_view or should_full_refresh() %}
|
||||
{% elif trigger_full_refresh %}
|
||||
{#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}
|
||||
{% set tmp_identifier = model['name'] + '__dbt_tmp' %}
|
||||
{% set backup_identifier = model['name'] + "__dbt_backup" %}
|
||||
|
||||
{% set backup_identifier = model['name'] + '__dbt_backup' %}
|
||||
{% set intermediate_relation = existing_relation.incorporate(path={"identifier": tmp_identifier}) %}
|
||||
{% set backup_relation = existing_relation.incorporate(path={"identifier": backup_identifier}) %}
|
||||
|
||||
{% do adapter.drop_relation(intermediate_relation) %}
|
||||
{% do adapter.drop_relation(backup_relation) %}
|
||||
|
||||
{% set build_sql = create_table_as(False, intermediate_relation, sql) %}
|
||||
{% set need_swap = true %}
|
||||
{% do to_drop.append(backup_relation) %}
|
||||
{% else %}
|
||||
{% set tmp_relation = make_temp_relation(target_relation) %}
|
||||
{% do run_query(create_table_as(True, tmp_relation, sql)) %}
|
||||
{% do adapter.expand_target_column_types(
|
||||
{% do run_query(create_table_as(True, tmp_relation, sql)) %}
|
||||
{% do adapter.expand_target_column_types(
|
||||
from_relation=tmp_relation,
|
||||
to_relation=target_relation) %}
|
||||
{% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}
|
||||
{% do process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}
|
||||
{% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% call statement("main") %}
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}
|
||||
|
||||
{% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}
|
||||
|
||||
{% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}
|
||||
{% do log(log_message) %}
|
||||
|
||||
{{ return(default) }}
|
||||
|
||||
{% else %}
|
||||
|
||||
{{ return(on_schema_change) }}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro diff_columns(source_columns, target_columns) %}
|
||||
|
||||
{% set result = [] %}
|
||||
{% set source_names = source_columns | map(attribute = 'column') | list %}
|
||||
{% set target_names = target_columns | map(attribute = 'column') | list %}
|
||||
|
||||
{# --check whether the name attribute exists in the target - this does not perform a data type check #}
|
||||
{% for sc in source_columns %}
|
||||
{% if sc.name not in target_names %}
|
||||
{{ result.append(sc) }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{{ return(result) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro diff_column_data_types(source_columns, target_columns) %}
|
||||
|
||||
{% set result = [] %}
|
||||
{% for sc in source_columns %}
|
||||
{% set tc = target_columns | selectattr("name", "equalto", sc.name) | list | first %}
|
||||
{% if tc %}
|
||||
{% if sc.data_type != tc.data_type %}
|
||||
{{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{{ return(result) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro check_for_schema_changes(source_relation, target_relation) %}
|
||||
|
||||
{% set schema_changed = False %}
|
||||
|
||||
{%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}
|
||||
{%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}
|
||||
{%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}
|
||||
{%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}
|
||||
|
||||
{% set new_target_types = diff_column_data_types(source_columns, target_columns) %}
|
||||
|
||||
{% if source_not_in_target != [] %}
|
||||
{% set schema_changed = True %}
|
||||
{% elif target_not_in_source != [] or new_target_types != [] %}
|
||||
{% set schema_changed = True %}
|
||||
{% elif new_target_types != [] %}
|
||||
{% set schema_changed = True %}
|
||||
{% endif %}
|
||||
|
||||
{% set changes_dict = {
|
||||
'schema_changed': schema_changed,
|
||||
'source_not_in_target': source_not_in_target,
|
||||
'target_not_in_source': target_not_in_source,
|
||||
'new_target_types': new_target_types
|
||||
} %}
|
||||
|
||||
{% set msg %}
|
||||
In {{ target_relation }}:
|
||||
Schema changed: {{ schema_changed }}
|
||||
Source columns not in target: {{ source_not_in_target }}
|
||||
Target columns not in source: {{ target_not_in_source }}
|
||||
New column types: {{ new_target_types }}
|
||||
{% endset %}
|
||||
|
||||
{% do log(msg) %}
|
||||
|
||||
{{ return(changes_dict) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}
|
||||
|
||||
{%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}
|
||||
|
||||
{%- if on_schema_change == 'append_new_columns'-%}
|
||||
{%- if add_to_target_arr | length > 0 -%}
|
||||
{%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{% elif on_schema_change == 'sync_all_columns' %}
|
||||
{%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}
|
||||
{%- set new_target_types = schema_changes_dict['new_target_types'] -%}
|
||||
|
||||
{% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}
|
||||
{%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}
|
||||
{% endif %}
|
||||
|
||||
{% if new_target_types != [] %}
|
||||
{% for ntt in new_target_types %}
|
||||
{% set column_name = ntt['column_name'] %}
|
||||
{% set new_type = ntt['new_type'] %}
|
||||
{% do alter_column_type(target_relation, column_name, new_type) %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% set schema_change_message %}
|
||||
In {{ target_relation }}:
|
||||
Schema change approach: {{ on_schema_change }}
|
||||
Columns added: {{ add_to_target_arr }}
|
||||
Columns removed: {{ remove_from_target_arr }}
|
||||
Data types changed: {{ new_target_types }}
|
||||
{% endset %}
|
||||
|
||||
{% do log(schema_change_message) %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}
|
||||
|
||||
{% if on_schema_change != 'ignore' %}
|
||||
|
||||
{% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}
|
||||
|
||||
{% if schema_changes_dict['schema_changed'] %}
|
||||
|
||||
{% if on_schema_change == 'fail' %}
|
||||
|
||||
{% set fail_msg %}
|
||||
The source and target schemas on this incremental model are out of sync!
|
||||
They can be reconciled in several ways:
|
||||
- set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.
|
||||
- Re-run the incremental model with `full_refresh: True` to update the target schema.
|
||||
- update the schema manually and re-run the process.
|
||||
{% endset %}
|
||||
|
||||
{% do exceptions.raise_compiler_error(fail_msg) %}
|
||||
|
||||
{# -- unless we ignore, run the sync operation per the config #}
|
||||
{% else %}
|
||||
|
||||
{% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@@ -1,14 +1,6 @@
|
||||
|
||||
{% macro create_csv_table(model, agate_table) -%}
|
||||
{{ adapter.dispatch('create_csv_table')(model, agate_table) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}
|
||||
{{ adapter.dispatch('reset_csv_table')(model, full_refresh, old_relation, agate_table) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro load_csv_rows(model, agate_table) -%}
|
||||
{{ adapter.dispatch('load_csv_rows')(model, agate_table) }}
|
||||
{{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__create_csv_table(model, agate_table) %}
|
||||
@@ -33,6 +25,9 @@
|
||||
{{ return(sql) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}
|
||||
{{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}
|
||||
{% set sql = "" %}
|
||||
@@ -47,6 +42,21 @@
|
||||
{{ return(sql) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_binding_char() -%}
|
||||
{{ adapter.dispatch('get_binding_char', 'dbt')() }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_binding_char() %}
|
||||
{{ return('%s') }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_batch_size() -%}
|
||||
{{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_batch_size() %}
|
||||
{{ return(10000) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_seed_column_quoted_csv(model, column_names) %}
|
||||
{%- set quote_seed_column = model['config'].get('quote_columns', None) -%}
|
||||
@@ -59,47 +69,47 @@
|
||||
{{ return(dest_cols_csv) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro basic_load_csv_rows(model, batch_size, agate_table) %}
|
||||
{% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}
|
||||
{% set bindings = [] %}
|
||||
|
||||
{% set statements = [] %}
|
||||
|
||||
{% for chunk in agate_table.rows | batch(batch_size) %}
|
||||
{% set bindings = [] %}
|
||||
|
||||
{% for row in chunk %}
|
||||
{% do bindings.extend(row) %}
|
||||
{% endfor %}
|
||||
|
||||
{% set sql %}
|
||||
insert into {{ this.render() }} ({{ cols_sql }}) values
|
||||
{% for row in chunk -%}
|
||||
({%- for column in agate_table.column_names -%}
|
||||
%s
|
||||
{%- if not loop.last%},{%- endif %}
|
||||
{%- endfor -%})
|
||||
{%- if not loop.last%},{%- endif %}
|
||||
{%- endfor %}
|
||||
{% endset %}
|
||||
|
||||
{% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}
|
||||
|
||||
{% if loop.index0 == 0 %}
|
||||
{% do statements.append(sql) %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{# Return SQL so we can render it out into the compiled files #}
|
||||
{{ return(statements[0]) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro load_csv_rows(model, agate_table) -%}
|
||||
{{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__load_csv_rows(model, agate_table) %}
|
||||
{{ return(basic_load_csv_rows(model, 10000, agate_table) )}}
|
||||
{% endmacro %}
|
||||
|
||||
{% set batch_size = get_batch_size() %}
|
||||
|
||||
{% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}
|
||||
{% set bindings = [] %}
|
||||
|
||||
{% set statements = [] %}
|
||||
|
||||
{% for chunk in agate_table.rows | batch(batch_size) %}
|
||||
{% set bindings = [] %}
|
||||
|
||||
{% for row in chunk %}
|
||||
{% do bindings.extend(row) %}
|
||||
{% endfor %}
|
||||
|
||||
{% set sql %}
|
||||
insert into {{ this.render() }} ({{ cols_sql }}) values
|
||||
{% for row in chunk -%}
|
||||
({%- for column in agate_table.column_names -%}
|
||||
{{ get_binding_char() }}
|
||||
{%- if not loop.last%},{%- endif %}
|
||||
{%- endfor -%})
|
||||
{%- if not loop.last%},{%- endif %}
|
||||
{%- endfor %}
|
||||
{% endset %}
|
||||
|
||||
{% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}
|
||||
|
||||
{% if loop.index0 == 0 %}
|
||||
{% do statements.append(sql) %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{# Return SQL so we can render it out into the compiled files #}
|
||||
{{ return(statements[0]) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% materialization seed, default %}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Add new columns to the table if applicable
|
||||
#}
|
||||
{% macro create_columns(relation, columns) %}
|
||||
{{ adapter.dispatch('create_columns')(relation, columns) }}
|
||||
{{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__create_columns(relation, columns) %}
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
{% macro post_snapshot(staging_relation) %}
|
||||
{{ adapter.dispatch('post_snapshot')(staging_relation) }}
|
||||
{{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__post_snapshot(staging_relation) %}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user