mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-23 15:01:28 +00:00
Compare commits
327 Commits
feature/re
...
experiment
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3144df1fa6 | ||
|
|
992dc5ce5c | ||
|
|
243c2cb0ed | ||
|
|
c888fe52d6 | ||
|
|
32ff2fbfd4 | ||
|
|
7599b9bca1 | ||
|
|
0b1d93a18b | ||
|
|
2b48152da6 | ||
|
|
e743e23d6b | ||
|
|
f846f921f2 | ||
|
|
1060035838 | ||
|
|
69cc20013e | ||
|
|
3572bfd37d | ||
|
|
a6b82990f5 | ||
|
|
540c1fd9c6 | ||
|
|
46d36cd412 | ||
|
|
a170764fc5 | ||
|
|
f72873a1ce | ||
|
|
82496c30b1 | ||
|
|
cb3c007acd | ||
|
|
cb460a797c | ||
|
|
df24c7d2f8 | ||
|
|
133c15c0e2 | ||
|
|
116e18a19e | ||
|
|
ec0af7c97b | ||
|
|
a34a877737 | ||
|
|
f018794465 | ||
|
|
d45f5e9791 | ||
|
|
04bd0d834c | ||
|
|
ed4f0c4713 | ||
|
|
c747068d4a | ||
|
|
aa0fbdc993 | ||
|
|
b50bfa7277 | ||
|
|
e91988f679 | ||
|
|
3ed1fce3fb | ||
|
|
e3ea0b511a | ||
|
|
c411c663de | ||
|
|
1c6f66fc14 | ||
|
|
1f927a374c | ||
|
|
07c4225aa8 | ||
|
|
42a85ac39f | ||
|
|
16e6d31ee3 | ||
|
|
a6db5b436d | ||
|
|
47675f2e28 | ||
|
|
0642bbefa7 | ||
|
|
43da603d52 | ||
|
|
f9e1f4d111 | ||
|
|
1508564e10 | ||
|
|
c14e6f4dcc | ||
|
|
75b6a20134 | ||
|
|
d82a07c221 | ||
|
|
c6f7dbcaa5 | ||
|
|
82cd099e48 | ||
|
|
546c011dd8 | ||
|
|
10b33ccaf6 | ||
|
|
bc01572176 | ||
|
|
ccd2064722 | ||
|
|
0fb42901dd | ||
|
|
a4280d7457 | ||
|
|
6966ede68b | ||
|
|
27dd14a5a2 | ||
|
|
2494301f1e | ||
|
|
f13143accb | ||
|
|
26d340a917 | ||
|
|
cc75cd4102 | ||
|
|
cf8615b231 | ||
|
|
30f473a2b1 | ||
|
|
4618709baa | ||
|
|
16b098ea42 | ||
|
|
b31c4d407a | ||
|
|
28c36cc5e2 | ||
|
|
6bfbcb842e | ||
|
|
a0eade4fdd | ||
|
|
ee24b7e88a | ||
|
|
c9baddf9a4 | ||
|
|
c5c780a685 | ||
|
|
421aaabf62 | ||
|
|
86788f034f | ||
|
|
232d3758cf | ||
|
|
71bcf9b31d | ||
|
|
bf4ee4f064 | ||
|
|
aa3bdfeb17 | ||
|
|
ce6967d396 | ||
|
|
330065f5e0 | ||
|
|
944db82553 | ||
|
|
c257361f05 | ||
|
|
ffdbfb018a | ||
|
|
cfa2bd6b08 | ||
|
|
51e90c3ce0 | ||
|
|
d69149f43e | ||
|
|
f261663f3d | ||
|
|
e5948dd1d3 | ||
|
|
5f13aab7d8 | ||
|
|
292d489592 | ||
|
|
0a01f20e35 | ||
|
|
2bd08d5c4c | ||
|
|
adae5126db | ||
|
|
dddf1bcb76 | ||
|
|
d23d4b0fd4 | ||
|
|
658f7550b3 | ||
|
|
cfb50ae21e | ||
|
|
9b0a365822 | ||
|
|
97ab130619 | ||
|
|
3578fde290 | ||
|
|
f382da69b8 | ||
|
|
2da3d215c6 | ||
|
|
43ed29c14c | ||
|
|
9df0283689 | ||
|
|
04b82cf4a5 | ||
|
|
274c3012b0 | ||
|
|
2b24a4934f | ||
|
|
692a423072 | ||
|
|
148f55335f | ||
|
|
2f752842a1 | ||
|
|
aff72996a1 | ||
|
|
08e425bcf6 | ||
|
|
454ddc601a | ||
|
|
b025f208a8 | ||
|
|
b60e533b9d | ||
|
|
37af0e0d59 | ||
|
|
ac1de5bce9 | ||
|
|
ef7ff55e07 | ||
|
|
608db5b982 | ||
|
|
8dd69efd48 | ||
|
|
73f7fba793 | ||
|
|
867e2402d2 | ||
|
|
a3b9e61967 | ||
|
|
cd149b68e8 | ||
|
|
cd3583c736 | ||
|
|
441f86f3ed | ||
|
|
f62bea65a1 | ||
|
|
886b574987 | ||
|
|
2888bac275 | ||
|
|
35c9206916 | ||
|
|
c4c5b59312 | ||
|
|
f25fb4e5ac | ||
|
|
868bfec5e6 | ||
|
|
e7c242213a | ||
|
|
862552ead4 | ||
|
|
9d90e0c167 | ||
|
|
a281f227cd | ||
|
|
5b981278db | ||
|
|
c1091ed3d1 | ||
|
|
08aed63455 | ||
|
|
90a550ee4f | ||
|
|
34869fc2a2 | ||
|
|
3deb10156d | ||
|
|
8c0e84de05 | ||
|
|
23be083c39 | ||
|
|
217aafce39 | ||
|
|
03210c63f4 | ||
|
|
a90510f6f2 | ||
|
|
36d91aded6 | ||
|
|
9afe8a1297 | ||
|
|
1e6f272034 | ||
|
|
a1aa2f81ef | ||
|
|
62899ef308 | ||
|
|
7f3396c002 | ||
|
|
453bc18196 | ||
|
|
dbb6b57b76 | ||
|
|
d7137db78c | ||
|
|
5ac4f2d80b | ||
|
|
5ba5271da9 | ||
|
|
b834e3015a | ||
|
|
c8721ded62 | ||
|
|
1e97372d24 | ||
|
|
fd4e111784 | ||
|
|
75094e7e21 | ||
|
|
8db2d674ed | ||
|
|
ffb140fab3 | ||
|
|
e93543983c | ||
|
|
0d066f80ff | ||
|
|
ccca1b2016 | ||
|
|
fec0e31a25 | ||
|
|
d246aa8f6d | ||
|
|
66bfba2258 | ||
|
|
b53b4373cb | ||
|
|
0810f93883 | ||
|
|
a4e696a252 | ||
|
|
0951d08f52 | ||
|
|
dbf367e070 | ||
|
|
6447ba8ec8 | ||
|
|
43e260966f | ||
|
|
b0e301b046 | ||
|
|
c8a9ea4979 | ||
|
|
afb7fc05da | ||
|
|
14124ccca8 | ||
|
|
df5022dbc3 | ||
|
|
015e798a31 | ||
|
|
c19125bb02 | ||
|
|
0e6ac5baf1 | ||
|
|
2c8d1b5b8c | ||
|
|
f7c0c1c21a | ||
|
|
4edd98f7ce | ||
|
|
df0abb7000 | ||
|
|
4f93da307f | ||
|
|
a8765d54aa | ||
|
|
bb834358d4 | ||
|
|
ec0f3d22e7 | ||
|
|
009b75cab6 | ||
|
|
d64668df1e | ||
|
|
72e808c9a7 | ||
|
|
96cc9223be | ||
|
|
13b099fbd0 | ||
|
|
1a8416c297 | ||
|
|
8538bec99e | ||
|
|
f983900597 | ||
|
|
3af02020ff | ||
|
|
8c71488757 | ||
|
|
74316bf702 | ||
|
|
7aa8c435c9 | ||
|
|
daeb51253d | ||
|
|
0ce2f41db4 | ||
|
|
02e5a962d7 | ||
|
|
dcc32dc69f | ||
|
|
af3d6681dd | ||
|
|
106968a3be | ||
|
|
2cd56ca044 | ||
|
|
eff198d079 | ||
|
|
c3b5b88cd2 | ||
|
|
4e19e87bbc | ||
|
|
6be6f6585d | ||
|
|
d7579f0c99 | ||
|
|
b741679c9c | ||
|
|
852990e967 | ||
|
|
21fd75b500 | ||
|
|
3e5d9010a3 | ||
|
|
784616ec29 | ||
|
|
6251d19946 | ||
|
|
17b1332a2a | ||
|
|
74eec3bdbe | ||
|
|
a9901c4ea7 | ||
|
|
348a2f91ee | ||
|
|
7115d862ea | ||
|
|
52ed4aa631 | ||
|
|
92cedf8931 | ||
|
|
e1097f11b5 | ||
|
|
eb34c0e46b | ||
|
|
ee2181b371 | ||
|
|
2a5d090e91 | ||
|
|
857bebe819 | ||
|
|
9728152768 | ||
|
|
2566a85429 | ||
|
|
46b3130198 | ||
|
|
8664516c8d | ||
|
|
0733c246ea | ||
|
|
4203985e3e | ||
|
|
900298bce7 | ||
|
|
09c37f508e | ||
|
|
c9e01bcc81 | ||
|
|
b079545e0f | ||
|
|
c3bf0f8cbf | ||
|
|
e945bca1d9 | ||
|
|
bf5835de5e | ||
|
|
7503f0cb10 | ||
|
|
3a751bcf9b | ||
|
|
c31ba101d6 | ||
|
|
ecadc74d44 | ||
|
|
63d25aaf19 | ||
|
|
5af82c3c05 | ||
|
|
8b4d74ed17 | ||
|
|
6a6a9064d5 | ||
|
|
b188a9488a | ||
|
|
7c2635f65d | ||
|
|
c67d0a0e1a | ||
|
|
7ee78e89c9 | ||
|
|
40370e104f | ||
|
|
a8809baa6c | ||
|
|
244d5d2c3b | ||
|
|
a0370a6617 | ||
|
|
eb077fcc75 | ||
|
|
c5adc50eed | ||
|
|
6e71b6fd31 | ||
|
|
278382589d | ||
|
|
6f0f6cf21a | ||
|
|
01331ed311 | ||
|
|
f638a3d50c | ||
|
|
512c41dbaf | ||
|
|
f6bab4adcf | ||
|
|
526ecee3da | ||
|
|
1bc9815d53 | ||
|
|
78bd7c9465 | ||
|
|
d74df8692b | ||
|
|
eda86412cc | ||
|
|
cce5945fd2 | ||
|
|
1aac869738 | ||
|
|
493554ea30 | ||
|
|
139b353a28 | ||
|
|
fc474a07d0 | ||
|
|
8fd8fa09a5 | ||
|
|
b2bd5a5548 | ||
|
|
aa6b333e79 | ||
|
|
0cb9740535 | ||
|
|
6b032b49fe | ||
|
|
35f78ee0f9 | ||
|
|
5ec36df7f0 | ||
|
|
f918fd65b6 | ||
|
|
d08a39483d | ||
|
|
9191f4ff2d | ||
|
|
b4a83414ac | ||
|
|
cb0e62576d | ||
|
|
e3f557406f | ||
|
|
a93e288d6a | ||
|
|
8cf9311ced | ||
|
|
713e781473 | ||
|
|
e265ab67c7 | ||
|
|
fde1f13b4e | ||
|
|
9c3839c7e2 | ||
|
|
c0fd702cc7 | ||
|
|
429419c4af | ||
|
|
56ae20602d | ||
|
|
40c6499d3a | ||
|
|
3a78efd83c | ||
|
|
eb33cf75e3 | ||
|
|
863d8e6405 | ||
|
|
1fc5a45b9e | ||
|
|
7751fece35 | ||
|
|
7670c42462 | ||
|
|
b72fc3cd25 | ||
|
|
9c24fc25f5 | ||
|
|
4f1a6d56c1 | ||
|
|
1dd4187cd0 | ||
|
|
9e36ebdaab | ||
|
|
aaa0127354 | ||
|
|
e60280c4d6 | ||
|
|
aef7866e29 | ||
|
|
70694e3bb9 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.19.0a1
|
||||
current_version = 0.19.0
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
|
||||
@@ -121,6 +121,45 @@ jobs:
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
integration-postgres-py39:
|
||||
docker: *test_and_postgres
|
||||
steps:
|
||||
- checkout
|
||||
- run: *setupdb
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-postgres-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-snowflake-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-snowflake-py39
|
||||
no_output_timeout: 1h
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-redshift-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-redshift-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
integration-bigquery-py39:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Run tests
|
||||
command: tox -e integration-bigquery-py39
|
||||
- store_artifacts:
|
||||
path: ./logs
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test-everything:
|
||||
@@ -150,6 +189,18 @@ workflows:
|
||||
- integration-snowflake-py38:
|
||||
requires:
|
||||
- integration-postgres-py38
|
||||
- integration-postgres-py39:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift-py39:
|
||||
requires:
|
||||
- integration-postgres-py39
|
||||
- integration-bigquery-py39:
|
||||
requires:
|
||||
- integration-postgres-py39
|
||||
# - integration-snowflake-py39:
|
||||
# requires:
|
||||
# - integration-postgres-py39
|
||||
- build-wheels:
|
||||
requires:
|
||||
- unit
|
||||
@@ -161,3 +212,7 @@ workflows:
|
||||
- integration-redshift-py38
|
||||
- integration-bigquery-py38
|
||||
- integration-snowflake-py38
|
||||
- integration-postgres-py39
|
||||
- integration-redshift-py39
|
||||
- integration-bigquery-py39
|
||||
# - integration-snowflake-py39
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -8,7 +8,7 @@ __pycache__/
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
env*/
|
||||
dbt_env/
|
||||
build/
|
||||
develop-eggs/
|
||||
|
||||
169
CHANGELOG.md
169
CHANGELOG.md
@@ -1,31 +1,172 @@
|
||||
## dbt 0.19.0 (Release TBD)
|
||||
## dbt 0.20.0 (Release TBD)
|
||||
|
||||
### Breaking changes
|
||||
- The format for sources.json, run-results.json, manifest.json, and catalog.json has changed to include a common metadata field ([#2761](https://github.com/fishtown-analytics/dbt/issues/2761), [#2778](https://github.com/fishtown-analytics/dbt/pull/2778), [#2763](https://github.com/fishtown-analytics/dbt/issues/2763), [#2784](https://github.com/fishtown-analytics/dbt/pull/2784), [#2764](https://github.com/fishtown-analytics/dbt/issues/2764), [#2785](https://github.com/fishtown-analytics/dbt/pull/2785))
|
||||
### Fixes
|
||||
|
||||
- Fix exit code from dbt debug not returning a failure when one of the tests fail ([#3017](https://github.com/fishtown-analytics/dbt/issues/3017))
|
||||
- Auto-generated CTEs in tests and ephemeral models have lowercase names to comply with dbt coding conventions ([#3027](https://github.com/fishtown-analytics/dbt/issues/3027), [#3028](https://github.com/fishtown-analytics/dbt/issues/3028))
|
||||
|
||||
### Features
|
||||
- dbt will compare configurations using the un-rendered form of the config block in dbt_project.yml ([#2713](https://github.com/fishtown-analytics/dbt/issues/2713), [#2735](https://github.com/fishtown-analytics/dbt/pull/2735))
|
||||
- Add optional configs for `require_partition_filter` and `partition_expiration_days` in BigQuery ([#1843](https://github.com/fishtown-analytics/dbt/issues/1843), [#2928](https://github.com/fishtown-analytics/dbt/pull/2928))
|
||||
- Fix for EOL SQL comments prevent entire line execution ([#2731](https://github.com/fishtown-analytics/dbt/issues/2731), [#2974](https://github.com/fishtown-analytics/dbt/pull/2974))
|
||||
|
||||
Contributors:
|
||||
- [@yu-iskw](https://github.com/yu-iskw) ([#2928](https://github.com/fishtown-analytics/dbt/pull/2928))
|
||||
- [@sdebruyn](https://github.com/sdebruyn) / [@lynxcare](https://github.com/lynxcare) ([#3018](https://github.com/fishtown-analytics/dbt/pull/3018))
|
||||
- [@rvacaru](https://github.com/rvacaru) ([#2974](https://github.com/fishtown-analytics/dbt/pull/2974))
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#3028](https://github.com/fishtown-analytics/dbt/pull/3028))
|
||||
|
||||
## dbt 0.19.1 (Release TBD)
|
||||
|
||||
### Under the hood
|
||||
- Bump werkzeug upper bound dependency to `<v2.0` ([#3011](https://github.com/fishtown-analytics/dbt/pull/3011))
|
||||
|
||||
Contributors:
|
||||
- [@Bl3f](https://github.com/Bl3f) ([#3011](https://github.com/fishtown-analytics/dbt/pull/3011))
|
||||
|
||||
## dbt 0.19.0 (January 27, 2021)
|
||||
|
||||
## dbt 0.19.0rc3 (January 27, 2021)
|
||||
|
||||
### Under the hood
|
||||
- Cleanup docker resources, use single `docker/Dockerfile` for publishing dbt as a docker image ([dbt-release#3](https://github.com/fishtown-analytics/dbt-release/issues/3), [#3019](https://github.com/fishtown-analytics/dbt/pull/3019))
|
||||
|
||||
## dbt 0.19.0rc2 (January 14, 2021)
|
||||
|
||||
### Fixes
|
||||
- Fix regression with defining exposures and other resources with the same name ([#2969](https://github.com/fishtown-analytics/dbt/issues/2969), [#3009](https://github.com/fishtown-analytics/dbt/pull/3009))
|
||||
- Remove ellipses printed while parsing ([#2971](https://github.com/fishtown-analytics/dbt/issues/2971), [#2996](https://github.com/fishtown-analytics/dbt/pull/2996))
|
||||
|
||||
### Under the hood
|
||||
- Rewrite macro for snapshot_merge_sql to make compatible with other SQL dialects ([#3003](https://github.com/fishtown-analytics/dbt/pull/3003)
|
||||
- Rewrite logic in `snapshot_check_strategy()` to make compatible with other SQL dialects ([#3000](https://github.com/fishtown-analytics/dbt/pull/3000), [#3001](https://github.com/fishtown-analytics/dbt/pull/3001))
|
||||
- Remove version restrictions on `botocore` ([#3006](https://github.com/fishtown-analytics/dbt/pull/3006))
|
||||
- Include `exposures` in start-of-invocation stdout summary: `Found ...` ([#3007](https://github.com/fishtown-analytics/dbt/pull/3007), [#3008](https://github.com/fishtown-analytics/dbt/pull/3008))
|
||||
|
||||
Contributors:
|
||||
- [@mikaelene](https://github.com/mikaelene) ([#3003](https://github.com/fishtown-analytics/dbt/pull/3003))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([dbt-adapter-tests#10](https://github.com/fishtown-analytics/dbt-adapter-tests/pull/10))
|
||||
- [@swanderz](https://github.com/swanderz) ([#3000](https://github.com/fishtown-analytics/dbt/pull/3000))
|
||||
- [@stpierre](https://github.com/stpierre) ([#3006](https://github.com/fishtown-analytics/dbt/pull/3006))
|
||||
|
||||
## dbt 0.19.0rc1 (December 29, 2020)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Defer if and only if upstream reference does not exist in current environment namespace ([#2909](https://github.com/fishtown-analytics/dbt/issues/2909), [#2946](https://github.com/fishtown-analytics/dbt/pull/2946))
|
||||
- Rationalize run result status reporting and clean up artifact schema ([#2493](https://github.com/fishtown-analytics/dbt/issues/2493), [#2943](https://github.com/fishtown-analytics/dbt/pull/2943))
|
||||
- Add adapter specific query execution info to run results and source freshness results artifacts. Statement call blocks return `response` instead of `status`, and the adapter method `get_status` is now `get_response` ([#2747](https://github.com/fishtown-analytics/dbt/issues/2747), [#2961](https://github.com/fishtown-analytics/dbt/pull/2961))
|
||||
|
||||
### Features
|
||||
- Added macro `get_partitions_metadata(table)` to return partition metadata for BigQuery partitioned tables ([#2552](https://github.com/fishtown-analytics/dbt/pull/2552), [#2596](https://github.com/fishtown-analytics/dbt/pull/2596))
|
||||
- Added `--defer` flag for `dbt test` as well ([#2701](https://github.com/fishtown-analytics/dbt/issues/2701), [#2954](https://github.com/fishtown-analytics/dbt/pull/2954))
|
||||
- Added native python `re` module for regex in jinja templates ([#1755](https://github.com/fishtown-analytics/dbt/pull/2851), [#1755](https://github.com/fishtown-analytics/dbt/pull/2851))
|
||||
- Store resolved node names in manifest ([#2647](https://github.com/fishtown-analytics/dbt/issues/2647), [#2837](https://github.com/fishtown-analytics/dbt/pull/2837))
|
||||
- Save selectors dictionary to manifest, allow descriptions ([#2693](https://github.com/fishtown-analytics/dbt/issues/2693), [#2866](https://github.com/fishtown-analytics/dbt/pull/2866))
|
||||
- Normalize cli-style-strings in manifest selectors dictionary ([#2879](https://github.com/fishtown-anaytics/dbt/issues/2879), [#2895](https://github.com/fishtown-analytics/dbt/pull/2895))
|
||||
- Hourly, monthly and yearly partitions available in BigQuery ([#2476](https://github.com/fishtown-analytics/dbt/issues/2476), [#2903](https://github.com/fishtown-analytics/dbt/pull/2903))
|
||||
- Allow BigQuery to default to the environment's default project ([#2828](https://github.com/fishtown-analytics/dbt/pull/2828), [#2908](https://github.com/fishtown-analytics/dbt/pull/2908))
|
||||
- Rationalize run result status reporting and clean up artifact schema ([#2493](https://github.com/fishtown-analytics/dbt/issues/2493), [#2943](https://github.com/fishtown-analytics/dbt/pull/2943))
|
||||
|
||||
### Fixes
|
||||
- Respect `--project-dir` in `dbt clean` command ([#2840](https://github.com/fishtown-analytics/dbt/issues/2840), [#2841](https://github.com/fishtown-analytics/dbt/pull/2841))
|
||||
- Fix Redshift adapter `get_columns_in_relation` macro to push schema filter down to the `svv_external_columns` view ([#2854](https://github.com/fishtown-analytics/dbt/issues/2854), [#2854](https://github.com/fishtown-analytics/dbt/issues/2854))
|
||||
- Increased the supported relation name length in postgres from 29 to 51 ([#2850](https://github.com/fishtown-analytics/dbt/pull/2850))
|
||||
- `dbt list` command always return `0` as exit code ([#2886](https://github.com/fishtown-analytics/dbt/issues/2886), [#2892](https://github.com/fishtown-analytics/dbt/issues/2892))
|
||||
- Set default `materialized` for test node configs to `test` ([#2806](https://github.com/fishtown-analytics/dbt/issues/2806), [#2902](https://github.com/fishtown-analytics/dbt/pull/2902))
|
||||
- Allow `docs` blocks in `exposure` descriptions ([#2913](https://github.com/fishtown-analytics/dbt/issues/2913), [#2920](https://github.com/fishtown-analytics/dbt/pull/2920))
|
||||
- Use original file path instead of absolute path as checksum for big seeds ([#2927](https://github.com/fishtown-analytics/dbt/issues/2927), [#2939](https://github.com/fishtown-analytics/dbt/pull/2939))
|
||||
- Fix KeyError if deferring to a manifest with a since-deleted source, ephemeral model, or test ([#2875](https://github.com/fishtown-analytics/dbt/issues/2875), [#2958](https://github.com/fishtown-analytics/dbt/pull/2958))
|
||||
|
||||
### Under the hood
|
||||
- Add `unixodbc-dev` package to testing docker image ([#2859](https://github.com/fishtown-analytics/dbt/pull/2859))
|
||||
- Add event tracking for project parser/load times ([#2823](https://github.com/fishtown-analytics/dbt/issues/2823),[#2893](https://github.com/fishtown-analytics/dbt/pull/2893))
|
||||
- Bump `cryptography` version to `>= 3.2` and bump snowflake connector to `2.3.6` ([#2896](https://github.com/fishtown-analytics/dbt/issues/2896), [#2922](https://github.com/fishtown-analytics/dbt/issues/2922))
|
||||
- Widen supported Google Cloud libraries dependencies ([#2794](https://github.com/fishtown-analytics/dbt/pull/2794), [#2877](https://github.com/fishtown-analytics/dbt/pull/2877)).
|
||||
- Bump `hologram` version to `0.0.11`. Add `scripts/dtr.py` ([#2888](https://github.com/fishtown-analytics/dbt/issues/2840),[#2889](https://github.com/fishtown-analytics/dbt/pull/2889))
|
||||
- Bump `hologram` version to `0.0.12`. Add testing support for python3.9 ([#2822](https://github.com/fishtown-analytics/dbt/issues/2822),[#2960](https://github.com/fishtown-analytics/dbt/pull/2960))
|
||||
- Bump the version requirements for `boto3` in dbt-redshift to the upper limit `1.16` to match dbt-redshift and the `snowflake-python-connector` as of version `2.3.6`. ([#2931](https://github.com/fishtown-analytics/dbt/issues/2931), ([#2963](https://github.com/fishtown-analytics/dbt/issues/2963))
|
||||
|
||||
### Docs
|
||||
- Fixed issue where data tests with tags were not showing up in graph viz ([docs#147](https://github.com/fishtown-analytics/dbt-docs/issues/147), [docs#157](https://github.com/fishtown-analytics/dbt-docs/pull/157))
|
||||
|
||||
Contributors:
|
||||
- [@feluelle](https://github.com/feluelle) ([#2841](https://github.com/fishtown-analytics/dbt/pull/2841))
|
||||
- [ran-eh](https://github.com/ran-eh) ([#2596](https://github.com/fishtown-analytics/dbt/pull/2596))
|
||||
- [@hochoy](https://github.com/hochoy) ([#2851](https://github.com/fishtown-analytics/dbt/pull/2851))
|
||||
- [@brangisom](https://github.com/brangisom) ([#2855](https://github.com/fishtown-analytics/dbt/pull/2855))
|
||||
- [@elexisvenator](https://github.com/elexisvenator) ([#2850](https://github.com/fishtown-analytics/dbt/pull/2850))
|
||||
- [@franloza](https://github.com/franloza) ([#2837](https://github.com/fishtown-analytics/dbt/pull/2837))
|
||||
- [@max-sixty](https://github.com/max-sixty) ([#2877](https://github.com/fishtown-analytics/dbt/pull/2877), [#2908](https://github.com/fishtown-analytics/dbt/pull/2908))
|
||||
- [@rsella](https://github.com/rsella) ([#2892](https://github.com/fishtown-analytics/dbt/issues/2892))
|
||||
- [@joellabes](https://github.com/joellabes) ([#2913](https://github.com/fishtown-analytics/dbt/issues/2913))
|
||||
- [@plotneishestvo](https://github.com/plotneishestvo) ([#2896](https://github.com/fishtown-analytics/dbt/issues/2896))
|
||||
- [@db-magnus](https://github.com/db-magnus) ([#2892](https://github.com/fishtown-analytics/dbt/issues/2892))
|
||||
- [@tyang209](https:/github.com/tyang209) ([#2931](https://github.com/fishtown-analytics/dbt/issues/2931))
|
||||
|
||||
## dbt 0.19.0b1 (October 21, 2020)
|
||||
|
||||
### Breaking changes
|
||||
- The format for `sources.json`, `run-results.json`, `manifest.json`, and `catalog.json` has changed:
|
||||
- Each now has a common metadata dictionary ([#2761](https://github.com/fishtown-analytics/dbt/issues/2761), [#2778](https://github.com/fishtown-analytics/dbt/pull/2778)). The contents include: schema and dbt versions ([#2670](https://github.com/fishtown-analytics/dbt/issues/2670), [#2767](https://github.com/fishtown-analytics/dbt/pull/2767)); `invocation_id` ([#2763](https://github.com/fishtown-analytics/dbt/issues/2763), [#2784](https://github.com/fishtown-analytics/dbt/pull/2784)); custom environment variables prefixed with `DBT_ENV_CUSTOM_ENV_` ([#2764](https://github.com/fishtown-analytics/dbt/issues/2764), [#2785](https://github.com/fishtown-analytics/dbt/pull/2785)); cli and rpc arguments in the `run_results.json` ([#2510](https://github.com/fishtown-analytics/dbt/issues/2510), [#2813](https://github.com/fishtown-analytics/dbt/pull/2813)).
|
||||
- Remove `injected_sql` from manifest nodes, use `compiled_sql` instead ([#2762](https://github.com/fishtown-analytics/dbt/issues/2762), [#2834](https://github.com/fishtown-analytics/dbt/pull/2834))
|
||||
|
||||
### Features
|
||||
- dbt will compare configurations using the un-rendered form of the config block in `dbt_project.yml` ([#2713](https://github.com/fishtown-analytics/dbt/issues/2713), [#2735](https://github.com/fishtown-analytics/dbt/pull/2735))
|
||||
- Added state and defer arguments to the RPC client, matching the CLI ([#2678](https://github.com/fishtown-analytics/dbt/issues/2678), [#2736](https://github.com/fishtown-analytics/dbt/pull/2736))
|
||||
- Added schema and dbt versions to JSON artifacts ([#2670](https://github.com/fishtown-analytics/dbt/issues/2670), [#2767](https://github.com/fishtown-analytics/dbt/pull/2767))
|
||||
- Added ability to snapshot hard-deleted records (opt-in with `invalidate_hard_deletes` config option). ([#249](https://github.com/fishtown-analytics/dbt/issues/249), [#2749](https://github.com/fishtown-analytics/dbt/pull/2749))
|
||||
- Added revival for snapshotting hard-deleted records. ([#2819](https://github.com/fishtown-analytics/dbt/issues/2819), [#2821](https://github.com/fishtown-analytics/dbt/pull/2821))
|
||||
- Improved error messages for YAML selectors ([#2700](https://github.com/fishtown-analytics/dbt/issues/2700), [#2781](https://github.com/fishtown-analytics/dbt/pull/2781))
|
||||
- Save manifest at the same time we save the run_results at the end of a run ([#2765](https://github.com/fishtown-analytics/dbt/issues/2765), [#2799](https://github.com/fishtown-analytics/dbt/pull/2799))
|
||||
- Added dbt_invocation_id for each BigQuery job to enable performance analysis ([#2808](https://github.com/fishtown-analytics/dbt/issues/2808), [#2809](https://github.com/fishtown-analytics/dbt/pull/2809))
|
||||
- Save cli and rpc arguments in run_results.json ([#2510](https://github.com/fishtown-analytics/dbt/issues/2510), [#2813](https://github.com/fishtown-analytics/dbt/pull/2813))
|
||||
- Added `dbt_invocation_id` for each BigQuery job to enable performance analysis ([#2808](https://github.com/fishtown-analytics/dbt/issues/2808), [#2809](https://github.com/fishtown-analytics/dbt/pull/2809))
|
||||
- Added support for BigQuery connections using refresh tokens ([#2344](https://github.com/fishtown-analytics/dbt/issues/2344), [#2805](https://github.com/fishtown-analytics/dbt/pull/2805))
|
||||
|
||||
### Under the hood
|
||||
- Save `manifest.json` at the same time we save the `run_results.json` at the end of a run ([#2765](https://github.com/fishtown-analytics/dbt/issues/2765), [#2799](https://github.com/fishtown-analytics/dbt/pull/2799))
|
||||
- Added strategy-specific validation to improve the relevancy of compilation errors for the `timestamp` and `check` snapshot strategies. (([#2787](https://github.com/fishtown-analytics/dbt/issues/2787), [#2791](https://github.com/fishtown-analytics/dbt/pull/2791))
|
||||
- Changed rpc test timeouts to avoid locally run test failures ([#2803](https://github.com/fishtown-analytics/dbt/issues/2803),[#2804](https://github.com/fishtown-analytics/dbt/pull/2804))
|
||||
- Added a debug_query on the base adapter that will allow plugin authors to create custom debug queries ([#2751](https://github.com/fishtown-analytics/dbt/issues/2751),[#2871](https://github.com/fishtown-analytics/dbt/pull/2817))
|
||||
- Added a `debug_query` on the base adapter that will allow plugin authors to create custom debug queries ([#2751](https://github.com/fishtown-analytics/dbt/issues/2751),[#2871](https://github.com/fishtown-analytics/dbt/pull/2817))
|
||||
|
||||
### Docs
|
||||
- Add select/deselect option in DAG view dropups. ([docs#98](https://github.com/fishtown-analytics/dbt-docs/issues/98), [docs#138](https://github.com/fishtown-analytics/dbt-docs/pull/138))
|
||||
- Fixed issue where sources with tags were not showing up in graph viz ([docs#93](https://github.com/fishtown-analytics/dbt-docs/issues/93), [docs#139](https://github.com/fishtown-analytics/dbt-docs/pull/139))
|
||||
- Use `compiled_sql` instead of `injected_sql` for "Compiled" ([docs#146](https://github.com/fishtown-analytics/dbt-docs/issues/146), [docs#148](https://github.com/fishtown-analytics/dbt-docs/issues/148))
|
||||
|
||||
Contributors:
|
||||
- [@joelluijmes](https://github.com/joelluijmes) ([#2749](https://github.com/fishtown-analytics/dbt/pull/2749), [#2821](https://github.com/fishtown-analytics/dbt/pull/2821))
|
||||
- [@kingfink](https://github.com/kingfink) ([#2791](https://github.com/fishtown-analytics/dbt/pull/2791))
|
||||
- [@zmac12](https://github.com/zmac12) ([#2871](https://github.com/fishtown-analytics/dbt/pull/2817))
|
||||
- [@zmac12](https://github.com/zmac12) ([#2817](https://github.com/fishtown-analytics/dbt/pull/2817))
|
||||
- [@Mr-Nobody99](https://github.com/Mr-Nobody99) ([docs#138](https://github.com/fishtown-analytics/dbt-docs/pull/138))
|
||||
- [@jplynch77](https://github.com/jplynch77) ([docs#139](https://github.com/fishtown-analytics/dbt-docs/pull/139))
|
||||
|
||||
## dbt 0.18.1 (Release TBD)
|
||||
## dbt 0.18.1 (October 13, 2020)
|
||||
|
||||
## dbt 0.18.1rc1 (October 01, 2020)
|
||||
|
||||
|
||||
### Features
|
||||
- Added retry support for rateLimitExceeded error from BigQuery, ([#2795](https://github.com/fishtown-analytics/dbt/issues/2795), [#2796](https://github.com/fishtown-analytics/dbt/issues/2796))
|
||||
|
||||
Contributors:
|
||||
- [@championj-foxtel](https://github.com/championj-foxtel) ([#2796](https://github.com/fishtown-analytics/dbt/issues/2796))
|
||||
|
||||
## dbt 0.18.1b3 (September 25, 2020)
|
||||
|
||||
|
||||
### Feature
|
||||
- Added 'Last Modified' stat in snowflake catalog macro. Now should be available in docs. ([#2728](https://github.com/fishtown-analytics/dbt/issues/2728))
|
||||
|
||||
### Fixes
|
||||
- `dbt compile` and `dbt run` failed with `KeyError: 'endpoint_resolver'` when threads > 1 and `method: iam` had been specified in the profiles.yaml ([#2756](https://github.com/fishtown-analytics/dbt/issues/2756), [#2766](https://github.com/fishtown-analytics/dbt/pull/2766))
|
||||
- Fix Redshift adapter to include columns from external tables when using the get_columns_in_relation macro ([#2753](https://github.com/fishtown-analytics/dbt/issues/2753), [#2754](https://github.com/fishtown-analytics/dbt/pull/2754))
|
||||
|
||||
### Under the hood
|
||||
- Require extra `snowflake-connector-python[secure-local-storage]` on all dbt-snowflake installations ([#2779](https://github.com/fishtown-analytics/dbt/issues/2779), [#2789](https://github.com/fishtown-analytics/dbt/pull/2789))
|
||||
|
||||
Contributors:
|
||||
- [@Mr-Nobody99](https://github.com/Mr-Nobody99) ([#2732](https://github.com/fishtown-analytics/dbt/pull/2732))
|
||||
- [@jweibel22](https://github.com/jweibel22) ([#2766](https://github.com/fishtown-analytics/dbt/pull/2766))
|
||||
- [@aiguofer](https://github.com/aiguofer) ([#2754](https://github.com/fishtown-analytics/dbt/pull/2754))
|
||||
|
||||
## dbt 0.18.1b1 (September 17, 2020)
|
||||
|
||||
### Under the hood
|
||||
- If column config says quote, use quoting in SQL for adding a comment. ([#2539](https://github.com/fishtown-analytics/dbt/issues/2539), [#2733](https://github.com/fishtown-analytics/dbt/pull/2733))
|
||||
@@ -33,10 +174,10 @@ Contributors:
|
||||
|
||||
### Features
|
||||
- Specify all three logging levels (`INFO`, `WARNING`, `ERROR`) in result logs for commands `test`, `seed`, `run`, `snapshot` and `source snapshot-freshness` ([#2680](https://github.com/fishtown-analytics/dbt/pull/2680), [#2723](https://github.com/fishtown-analytics/dbt/pull/2723))
|
||||
- Added "reports" ([#2730](https://github.com/fishtown-analytics/dbt/issues/2730), [#2752](https://github.com/fishtown-analytics/dbt/pull/2752))
|
||||
- Added "exposures" ([#2730](https://github.com/fishtown-analytics/dbt/issues/2730), [#2752](https://github.com/fishtown-analytics/dbt/pull/2752), [#2777](https://github.com/fishtown-analytics/dbt/issues/2777))
|
||||
|
||||
### Docs
|
||||
- Add Report nodes ([docs#135](https://github.com/fishtown-analytics/dbt-docs/issues/135), [docs#136](https://github.com/fishtown-analytics/dbt-docs/pull/136))
|
||||
- Add Exposure nodes ([docs#135](https://github.com/fishtown-analytics/dbt-docs/issues/135), [docs#136](https://github.com/fishtown-analytics/dbt-docs/pull/136), [docs#137](https://github.com/fishtown-analytics/dbt-docs/pull/137))
|
||||
|
||||
Contributors:
|
||||
- [@tpilewicz](https://github.com/tpilewicz) ([#2723](https://github.com/fishtown-analytics/dbt/pull/2723))
|
||||
@@ -97,7 +238,6 @@ Contributors:
|
||||
- Add relevance criteria to site search ([docs#113](https://github.com/fishtown-analytics/dbt-docs/pull/113))
|
||||
- Support new selector methods, intersection, and arbitrary parent/child depth in DAG selection syntax ([docs#118](https://github.com/fishtown-analytics/dbt-docs/pull/118))
|
||||
- Revise anonymous event tracking: simpler URL fuzzing; differentiate between Cloud-hosted and non-Cloud docs ([docs#121](https://github.com/fishtown-analytics/dbt-docs/pull/121))
|
||||
|
||||
Contributors:
|
||||
- [@bbhoss](https://github.com/bbhoss) ([#2677](https://github.com/fishtown-analytics/dbt/pull/2677))
|
||||
- [@kconvey](https://github.com/kconvey) ([#2694](https://github.com/fishtown-analytics/dbt/pull/2694), [#2709](https://github.com/fishtown-analytics/dbt/pull/2709)), [#2711](https://github.com/fishtown-analytics/dbt/pull/2711))
|
||||
@@ -817,7 +957,6 @@ Thanks for your contributions to dbt!
|
||||
- [@bastienboutonnet](https://github.com/bastienboutonnet) ([#1591](https://github.com/fishtown-analytics/dbt/pull/1591), [#1689](https://github.com/fishtown-analytics/dbt/pull/1689))
|
||||
|
||||
|
||||
|
||||
## dbt 0.14.0 - Wilt Chamberlain (July 10, 2019)
|
||||
|
||||
### Overview
|
||||
|
||||
51
Dockerfile
51
Dockerfile
@@ -1,51 +0,0 @@
|
||||
FROM ubuntu:18.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get dist-upgrade -y && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
netcat postgresql curl git ssh software-properties-common \
|
||||
make build-essential ca-certificates libpq-dev \
|
||||
libsasl2-dev libsasl2-2 libsasl2-modules-gssapi-mit libyaml-dev \
|
||||
&& \
|
||||
add-apt-repository ppa:deadsnakes/ppa && \
|
||||
apt-get install -y \
|
||||
python python-dev python-pip \
|
||||
python3.6 python3.6-dev python3-pip python3.6-venv \
|
||||
python3.7 python3.7-dev python3.7-venv \
|
||||
python3.8 python3.8-dev python3.8-venv \
|
||||
python3.9 python3.9-dev python3.9-venv && \
|
||||
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
ARG DOCKERIZE_VERSION=v0.6.1
|
||||
RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz && \
|
||||
tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz && \
|
||||
rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||
|
||||
RUN pip3 install -U "tox==3.14.4" wheel "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
# tox fails if the 'python' interpreter (python2) doesn't have `tox` installed
|
||||
RUN pip install -U "tox==3.14.4" "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
|
||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||
# On Linux, run `make .env` to create the .env file for the current user.
|
||||
# On MacOS and Windows, these can stay unset.
|
||||
ARG USER_ID
|
||||
ARG GROUP_ID
|
||||
|
||||
RUN if [ ${USER_ID:-0} -ne 0 ] && [ ${GROUP_ID:-0} -ne 0 ]; then \
|
||||
groupadd -g ${GROUP_ID} dbt_test_user && \
|
||||
useradd -m -l -u ${USER_ID} -g ${GROUP_ID} dbt_test_user; \
|
||||
else \
|
||||
useradd -mU -l dbt_test_user; \
|
||||
fi
|
||||
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
||||
RUN mkdir /home/tox && chown dbt_test_user /home/tox
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_test_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
74
Dockerfile.test
Normal file
74
Dockerfile.test
Normal file
@@ -0,0 +1,74 @@
|
||||
FROM ubuntu:18.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get dist-upgrade -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
netcat \
|
||||
postgresql \
|
||||
curl \
|
||||
git \
|
||||
ssh \
|
||||
software-properties-common \
|
||||
make \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
libpq-dev \
|
||||
libsasl2-dev \
|
||||
libsasl2-2 \
|
||||
libsasl2-modules-gssapi-mit \
|
||||
libyaml-dev \
|
||||
unixodbc-dev \
|
||||
&& add-apt-repository ppa:deadsnakes/ppa \
|
||||
&& apt-get install -y \
|
||||
python \
|
||||
python-dev \
|
||||
python-pip \
|
||||
python3.6 \
|
||||
python3.6-dev \
|
||||
python3-pip \
|
||||
python3.6-venv \
|
||||
python3.7 \
|
||||
python3.7-dev \
|
||||
python3.7-venv \
|
||||
python3.8 \
|
||||
python3.8-dev \
|
||||
python3.8-venv \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
ARG DOCKERIZE_VERSION=v0.6.1
|
||||
RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSION/dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||
|
||||
RUN pip3 install -U "tox==3.14.4" wheel "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
# tox fails if the 'python' interpreter (python2) doesn't have `tox` installed
|
||||
RUN pip install -U "tox==3.14.4" "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools
|
||||
|
||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||
# On Linux, run `make .env` to create the .env file for the current user.
|
||||
# On MacOS and Windows, these can stay unset.
|
||||
ARG USER_ID
|
||||
ARG GROUP_ID
|
||||
|
||||
RUN if [ ${USER_ID:-0} -ne 0 ] && [ ${GROUP_ID:-0} -ne 0 ]; then \
|
||||
groupadd -g ${GROUP_ID} dbt_test_user && \
|
||||
useradd -m -l -u ${USER_ID} -g ${GROUP_ID} dbt_test_user; \
|
||||
else \
|
||||
useradd -mU -l dbt_test_user; \
|
||||
fi
|
||||
RUN mkdir /usr/app && chown dbt_test_user /usr/app
|
||||
RUN mkdir /home/tox && chown dbt_test_user /home/tox
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_test_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
13
Makefile
13
Makefile
@@ -7,25 +7,30 @@ install:
|
||||
|
||||
test: .env
|
||||
@echo "Full test run starting..."
|
||||
@time docker-compose run test tox
|
||||
@time docker-compose run --rm test tox
|
||||
|
||||
test-unit: .env
|
||||
@echo "Unit test run starting..."
|
||||
@time docker-compose run test tox -e unit-py36,flake8
|
||||
@time docker-compose run --rm test tox -e unit-py36,flake8
|
||||
|
||||
test-integration: .env
|
||||
@echo "Integration test run starting..."
|
||||
@time docker-compose run test tox -e integration-postgres-py36,integration-redshift-py36,integration-snowflake-py36,integration-bigquery-py36
|
||||
@time docker-compose run --rm test tox -e integration-postgres-py36,integration-redshift-py36,integration-snowflake-py36,integration-bigquery-py36
|
||||
|
||||
test-quick: .env
|
||||
@echo "Integration test run starting..."
|
||||
@time docker-compose run test tox -e integration-postgres-py36 -- -x
|
||||
@time docker-compose run --rm test tox -e integration-postgres-py36 -- -x
|
||||
|
||||
# This rule creates a file named .env that is used by docker-compose for passing
|
||||
# the USER_ID and GROUP_ID arguments to the Docker image.
|
||||
.env:
|
||||
@touch .env
|
||||
ifneq ($(OS),Windows_NT)
|
||||
ifneq ($(shell uname -s), Darwin)
|
||||
@echo USER_ID=$(shell id -u) > .env
|
||||
@echo GROUP_ID=$(shell id -g) >> .env
|
||||
endif
|
||||
endif
|
||||
@time docker-compose build
|
||||
|
||||
clean:
|
||||
|
||||
@@ -4,14 +4,15 @@ import os
|
||||
from multiprocessing.synchronize import RLock
|
||||
from threading import get_ident
|
||||
from typing import (
|
||||
Dict, Tuple, Hashable, Optional, ContextManager, List
|
||||
Dict, Tuple, Hashable, Optional, ContextManager, List, Union
|
||||
)
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.connection import (
|
||||
Connection, Identifier, ConnectionState, AdapterRequiredConfig, LazyHandle
|
||||
Connection, Identifier, ConnectionState,
|
||||
AdapterRequiredConfig, LazyHandle, AdapterResponse
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
@@ -290,7 +291,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[str, agate.Table]:
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
"""Execute the given SQL.
|
||||
|
||||
:param str sql: The sql to execute.
|
||||
@@ -298,7 +299,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[str, agate.Table]
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`execute` is not implemented for this adapter!'
|
||||
|
||||
@@ -35,7 +35,7 @@ from dbt.node_types import NodeType
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.utils import filter_null_values, executor
|
||||
|
||||
from dbt.adapters.base.connections import Connection
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
from dbt.adapters.base.relation import (
|
||||
ComponentName, BaseRelation, InformationSchema, SchemaSearchMap
|
||||
@@ -213,7 +213,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse(lambda *a, **k: ('', empty_table()))
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[str, agate.Table]:
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
"""Execute the given SQL. This is a thin wrapper around
|
||||
ConnectionManager.execute.
|
||||
|
||||
@@ -222,7 +222,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the status and the results (empty if fetch=False).
|
||||
:rtype: Tuple[str, agate.Table]
|
||||
:rtype: Tuple[Union[str, AdapterResponse], agate.Table]
|
||||
"""
|
||||
return self.connections.execute(
|
||||
sql=sql,
|
||||
@@ -230,6 +230,21 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
fetch=fetch
|
||||
)
|
||||
|
||||
@available.parse(lambda *a, **k: ('', empty_table()))
|
||||
def get_partitions_metadata(
|
||||
self, table: str
|
||||
) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
:param str table_id: a partitioned table id, in standard SQL format.
|
||||
:return: a partition metadata tuple, as described in
|
||||
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
|
||||
:rtype: agate.Table
|
||||
"""
|
||||
return self.connections.get_partitions_metadata(
|
||||
table=table
|
||||
)
|
||||
|
||||
###
|
||||
# Methods that should never be overridden
|
||||
###
|
||||
|
||||
@@ -203,7 +203,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@staticmethod
|
||||
def add_ephemeral_prefix(name: str):
|
||||
return f'__dbt__CTE__{name}'
|
||||
return f'__dbt__cte__{name}'
|
||||
|
||||
@classmethod
|
||||
def create_ephemeral_from_node(
|
||||
|
||||
@@ -7,7 +7,9 @@ from typing_extensions import Protocol
|
||||
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig
|
||||
from dbt.contracts.connection import (
|
||||
Connection, AdapterRequiredConfig, AdapterResponse
|
||||
)
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledNode, ManifestNode, NonSourceCompiledNode
|
||||
)
|
||||
@@ -154,7 +156,7 @@ class AdapterProtocol(
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[str, agate.Table]:
|
||||
) -> Tuple[Union[str, AdapterResponse], agate.Table]:
|
||||
...
|
||||
|
||||
def get_compiler(self) -> Compiler_T:
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
|
||||
import agate
|
||||
|
||||
import dbt.clients.agate_helper
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import Connection, ConnectionState
|
||||
from dbt.contracts.connection import (
|
||||
Connection, ConnectionState, AdapterResponse
|
||||
)
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt import flags
|
||||
|
||||
@@ -18,7 +20,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
Methods to implement:
|
||||
- exception_handler
|
||||
- cancel
|
||||
- get_status
|
||||
- get_response
|
||||
- open
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
@@ -76,20 +78,19 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
cursor.execute(sql, bindings)
|
||||
|
||||
logger.debug(
|
||||
"SQL status: {status} in {elapsed:0.2f} seconds",
|
||||
status=self.get_status(cursor),
|
||||
status=self.get_response(cursor),
|
||||
elapsed=(time.time() - pre)
|
||||
)
|
||||
|
||||
return connection, cursor
|
||||
|
||||
@abc.abstractclassmethod
|
||||
def get_status(cls, cursor: Any) -> str:
|
||||
def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
'`get_status` is not implemented for this adapter!'
|
||||
'`get_response` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -118,15 +119,15 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[str, agate.Table]:
|
||||
) -> Tuple[Union[AdapterResponse, str], agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql, auto_begin)
|
||||
status = self.get_status(cursor)
|
||||
response = self.get_response(cursor)
|
||||
if fetch:
|
||||
table = self.get_result_from_cursor(cursor)
|
||||
else:
|
||||
table = dbt.clients.agate_helper.empty_table()
|
||||
return status, table
|
||||
return response, table
|
||||
|
||||
def add_begin_query(self):
|
||||
return self.add_query('BEGIN', auto_begin=False)
|
||||
|
||||
@@ -52,6 +52,7 @@ def print_compile_stats(stats):
|
||||
NodeType.Operation: 'operation',
|
||||
NodeType.Seed: 'seed file',
|
||||
NodeType.Source: 'source',
|
||||
NodeType.Exposure: 'exposure',
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -81,6 +82,8 @@ def _generate_stats(manifest: Manifest):
|
||||
|
||||
for source in manifest.sources.values():
|
||||
stats[source.resource_type] += 1
|
||||
for exposure in manifest.exposures.values():
|
||||
stats[exposure.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
@@ -148,12 +151,15 @@ class Compiler:
|
||||
make_directory(self.config.target_path)
|
||||
make_directory(self.config.modules_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
def _create_node_context(
|
||||
self,
|
||||
node: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
context = generate_runtime_model(
|
||||
node, self.config, manifest
|
||||
)
|
||||
@@ -169,35 +175,14 @@ class Compiler:
|
||||
relation_cls = adapter.Relation
|
||||
return relation_cls.add_ephemeral_prefix(name)
|
||||
|
||||
def _get_compiled_model(
|
||||
self,
|
||||
manifest: Manifest,
|
||||
cte_id: str,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> NonSourceCompiledNode:
|
||||
|
||||
if cte_id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
f'During compilation, found a cte reference that could not be '
|
||||
f'resolved: {cte_id}'
|
||||
)
|
||||
cte_model = manifest.nodes[cte_id]
|
||||
if getattr(cte_model, 'compiled', False):
|
||||
assert isinstance(cte_model, tuple(COMPILED_TYPES.values()))
|
||||
return cast(NonSourceCompiledNode, cte_model)
|
||||
elif cte_model.is_ephemeral_model:
|
||||
# this must be some kind of parsed node that we can compile.
|
||||
# we know it's not a parsed source definition
|
||||
assert isinstance(cte_model, tuple(COMPILED_TYPES))
|
||||
# update the node so
|
||||
node = self.compile_node(cte_model, manifest, extra_context)
|
||||
manifest.sync_update_node(node)
|
||||
return node
|
||||
else:
|
||||
raise InternalException(
|
||||
f'During compilation, found an uncompiled cte that '
|
||||
f'was not an ephemeral model: {cte_id}'
|
||||
)
|
||||
def _get_relation_name(self, node: ParsedNode):
|
||||
relation_name = None
|
||||
if (node.resource_type in NodeType.refable() and
|
||||
not node.is_ephemeral_model):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
return relation_name
|
||||
|
||||
def _inject_ctes_into_sql(self, sql: str, ctes: List[InjectedCTE]) -> str:
|
||||
"""
|
||||
@@ -206,11 +191,11 @@ class Compiler:
|
||||
[
|
||||
InjectedCTE(
|
||||
id="cte_id_1",
|
||||
sql="__dbt__CTE__ephemeral as (select * from table)",
|
||||
sql="__dbt__cte__ephemeral as (select * from table)",
|
||||
),
|
||||
InjectedCTE(
|
||||
id="cte_id_2",
|
||||
sql="__dbt__CTE__events as (select id, type from events)",
|
||||
sql="__dbt__cte__events as (select id, type from events)",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -221,8 +206,8 @@ class Compiler:
|
||||
|
||||
This will spit out:
|
||||
|
||||
"with __dbt__CTE__ephemeral as (select * from table),
|
||||
__dbt__CTE__events as (select id, type from events),
|
||||
"with __dbt__cte__ephemeral as (select * from table),
|
||||
__dbt__cte__events as (select id, type from events),
|
||||
with internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
@@ -260,85 +245,119 @@ class Compiler:
|
||||
|
||||
return str(parsed)
|
||||
|
||||
def _model_prepend_ctes(
|
||||
self,
|
||||
model: NonSourceCompiledNode,
|
||||
prepended_ctes: List[InjectedCTE]
|
||||
) -> NonSourceCompiledNode:
|
||||
if model.compiled_sql is None:
|
||||
raise RuntimeException(
|
||||
'Cannot prepend ctes to an unparsed node', model
|
||||
)
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_sql,
|
||||
prepended_ctes,
|
||||
)
|
||||
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.injected_sql = injected_sql
|
||||
model.validate(model.to_dict())
|
||||
return model
|
||||
|
||||
def _get_dbt_test_name(self) -> str:
|
||||
return 'dbt__CTE__INTERNAL_test'
|
||||
return 'dbt__cte__internal_test'
|
||||
|
||||
# This method is called by the 'compile_node' method. Starting
|
||||
# from the node that it is passed in, it will recursively call
|
||||
# itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
# not produce SQL that is executed directly, instead they
|
||||
# are rolled up into the models that refer to them by
|
||||
# inserting CTEs into the SQL.
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
||||
|
||||
if model.compiled_sql is None:
|
||||
raise RuntimeException(
|
||||
'Cannot inject ctes into an unparsed node', model
|
||||
)
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
if flags.STRICT_MODE:
|
||||
if not isinstance(model, tuple(COMPILED_TYPES.values())):
|
||||
raise InternalException(
|
||||
f'Bad model type: {type(model)}'
|
||||
)
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
prepended_ctes: List[InjectedCTE] = []
|
||||
|
||||
dbt_test_name = self._get_dbt_test_name()
|
||||
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id == dbt_test_name:
|
||||
sql = cte.sql
|
||||
else:
|
||||
cte_model = self._get_compiled_model(
|
||||
manifest,
|
||||
cte.id,
|
||||
extra_context,
|
||||
)
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
if cte.id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
f'During compilation, found a cte reference that '
|
||||
f'could not be resolved: {cte.id}'
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f'{cte.id} is not ephemeral')
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, 'compiled', False):
|
||||
assert isinstance(cte_model,
|
||||
tuple(COMPILED_TYPES.values()))
|
||||
cte_model = cast(NonSourceCompiledNode, cte_model)
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(
|
||||
cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
cte_model, new_prepended_ctes = \
|
||||
self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
sql = f' {new_cte_name} as (\n{cte_model.compiled_sql}\n)'
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
model = self._model_prepend_ctes(model, prepended_ctes)
|
||||
# We don't save injected_sql into compiled sql for ephemeral models
|
||||
# because it will cause problems with processing of subsequent models.
|
||||
# Ephemeral models do not produce executable SQL of their own.
|
||||
if not model.is_ephemeral_model:
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_sql,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.compiled_sql = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict())
|
||||
|
||||
manifest.update_node(model)
|
||||
|
||||
return model, prepended_ctes
|
||||
|
||||
def _insert_ctes(
|
||||
def _add_ctes(
|
||||
self,
|
||||
compiled_node: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> NonSourceCompiledNode:
|
||||
"""Insert the CTEs for the model."""
|
||||
"""Wrap the data test SQL in a CTE."""
|
||||
|
||||
# for data tests, we need to insert a special CTE at the end of the
|
||||
# list containing the test query, and then have the "real" query be a
|
||||
# select count(*) from that model.
|
||||
# the benefit of doing it this way is that _insert_ctes() can be
|
||||
# rewritten for different adapters to handle databses that don't
|
||||
# the benefit of doing it this way is that _add_ctes() can be
|
||||
# rewritten for different adapters to handle databases that don't
|
||||
# support CTEs, or at least don't have full support.
|
||||
if isinstance(compiled_node, CompiledDataTestNode):
|
||||
# the last prepend (so last in order) should be the data test body.
|
||||
@@ -352,11 +371,12 @@ class Compiler:
|
||||
compiled_node.extra_ctes.append(cte)
|
||||
compiled_node.compiled_sql = f'\nselect count(*) from {name}'
|
||||
|
||||
injected_node, _ = self._recursively_prepend_ctes(
|
||||
compiled_node, manifest, extra_context
|
||||
)
|
||||
return injected_node
|
||||
return compiled_node
|
||||
|
||||
# creates a compiled_node from the ManifestNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_sql" using the node, the
|
||||
# raw_sql and the context.
|
||||
def _compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
@@ -374,7 +394,6 @@ class Compiler:
|
||||
'compiled_sql': None,
|
||||
'extra_ctes_injected': False,
|
||||
'extra_ctes': [],
|
||||
'injected_sql': None,
|
||||
})
|
||||
compiled_node = _compiled_type_for(node).from_dict(data)
|
||||
|
||||
@@ -388,13 +407,17 @@ class Compiler:
|
||||
node,
|
||||
)
|
||||
|
||||
compiled_node.relation_name = self._get_relation_name(node)
|
||||
|
||||
compiled_node.compiled = True
|
||||
|
||||
injected_node = self._insert_ctes(
|
||||
# add ctes for specific test nodes, and also for
|
||||
# possible future use in adapters
|
||||
compiled_node = self._add_ctes(
|
||||
compiled_node, manifest, extra_context
|
||||
)
|
||||
|
||||
return injected_node
|
||||
return compiled_node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
@@ -426,9 +449,9 @@ class Compiler:
|
||||
linker.add_node(source.unique_id)
|
||||
for node in manifest.nodes.values():
|
||||
self.link_node(linker, node, manifest)
|
||||
for report in manifest.reports.values():
|
||||
self.link_node(linker, report, manifest)
|
||||
# linker.add_node(report.unique_id)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(linker, exposure, manifest)
|
||||
# linker.add_node(exposure.unique_id)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
@@ -449,26 +472,26 @@ class Compiler:
|
||||
|
||||
return Graph(linker.graph)
|
||||
|
||||
# writes the "compiled_sql" into the target/compiled directory
|
||||
def _write_node(self, node: NonSourceCompiledNode) -> ManifestNode:
|
||||
if not _is_writable(node):
|
||||
if (not node.extra_ctes_injected or
|
||||
node.resource_type == NodeType.Snapshot):
|
||||
return node
|
||||
logger.debug(f'Writing injected SQL for node "{node.unique_id}"')
|
||||
|
||||
if node.injected_sql is None:
|
||||
# this should not really happen, but it'd be a shame to crash
|
||||
# over it
|
||||
logger.error(
|
||||
f'Compiled node "{node.unique_id}" had no injected_sql, '
|
||||
'cannot write sql!'
|
||||
)
|
||||
else:
|
||||
if node.compiled_sql:
|
||||
node.build_path = node.write_node(
|
||||
self.config.target_path,
|
||||
'compiled',
|
||||
node.injected_sql
|
||||
node.compiled_sql
|
||||
)
|
||||
return node
|
||||
|
||||
# This is the main entry point into this code. It's called by
|
||||
# CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
# RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
# the node into a compiled node, and then calls the
|
||||
# recursive method to "prepend" the ctes.
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
@@ -478,16 +501,9 @@ class Compiler:
|
||||
) -> NonSourceCompiledNode:
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
|
||||
if write and _is_writable(node):
|
||||
node, _ = self._recursively_prepend_ctes(
|
||||
node, manifest, extra_context
|
||||
)
|
||||
if write:
|
||||
self._write_node(node)
|
||||
return node
|
||||
|
||||
|
||||
def _is_writable(node):
|
||||
if not node.injected_sql:
|
||||
return False
|
||||
|
||||
if node.resource_type == NodeType.Snapshot:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -81,6 +81,8 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
return UserConfig()
|
||||
|
||||
|
||||
# The Profile class is included in RuntimeConfig, so any attribute
|
||||
# additions must also be set where the RuntimeConfig class is created
|
||||
@dataclass
|
||||
class Profile(HasCredentials):
|
||||
profile_name: str
|
||||
|
||||
@@ -25,6 +25,7 @@ from dbt.semver import versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.config.selectors import SelectorDict
|
||||
|
||||
from dbt.contracts.project import (
|
||||
Project as ProjectContract,
|
||||
@@ -370,6 +371,12 @@ class PartialProject(RenderComponents):
|
||||
|
||||
packages = package_config_from_data(rendered.packages_dict)
|
||||
selectors = selector_config_from_data(rendered.selectors_dict)
|
||||
manifest_selectors: Dict[str, Any] = {}
|
||||
if rendered.selectors_dict and rendered.selectors_dict['selectors']:
|
||||
# this is a dict with a single key 'selectors' pointing to a list
|
||||
# of dicts.
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict['selectors'])
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
@@ -396,6 +403,7 @@ class PartialProject(RenderComponents):
|
||||
snapshots=snapshots,
|
||||
dbt_version=dbt_version,
|
||||
packages=packages,
|
||||
manifest_selectors=manifest_selectors,
|
||||
selectors=selectors,
|
||||
query_comment=query_comment,
|
||||
sources=sources,
|
||||
@@ -458,6 +466,7 @@ class PartialProject(RenderComponents):
|
||||
|
||||
class VarProvider:
|
||||
"""Var providers are tied to a particular Project."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vars: Dict[str, Dict[str, Any]]
|
||||
@@ -476,6 +485,8 @@ class VarProvider:
|
||||
return self.vars
|
||||
|
||||
|
||||
# The Project class is included in RuntimeConfig, so any attribute
|
||||
# additions must also be set where the RuntimeConfig class is created
|
||||
@dataclass
|
||||
class Project:
|
||||
project_name: str
|
||||
@@ -504,6 +515,7 @@ class Project:
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
packages: Dict[str, Any]
|
||||
manifest_selectors: Dict[str, Any]
|
||||
selectors: SelectorConfig
|
||||
query_comment: QueryComment
|
||||
config_version: int
|
||||
|
||||
@@ -106,6 +106,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
@@ -483,6 +484,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
|
||||
@@ -15,6 +15,7 @@ from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.selection import SelectorFile
|
||||
from dbt.exceptions import DbtSelectorsError, RuntimeException
|
||||
from dbt.graph import parse_from_selectors_definition, SelectionSpec
|
||||
from dbt.graph.selector_spec import SelectionCriteria
|
||||
|
||||
MALFORMED_SELECTOR_ERROR = """\
|
||||
The selectors.yml file in this project is malformed. Please double check
|
||||
@@ -113,3 +114,67 @@ def selector_config_from_data(
|
||||
result_type='invalid_selector',
|
||||
) from e
|
||||
return selectors
|
||||
|
||||
|
||||
# These are utilities to clean up the dictionary created from
|
||||
# selectors.yml by turning the cli-string format entries into
|
||||
# normalized dictionary entries. It parallels the flow in
|
||||
# dbt/graph/cli.py. If changes are made there, it might
|
||||
# be necessary to make changes here. Ideally it would be
|
||||
# good to combine the two flows into one at some point.
|
||||
class SelectorDict:
|
||||
|
||||
@classmethod
|
||||
def parse_dict_definition(cls, definition):
|
||||
key = list(definition)[0]
|
||||
value = definition[key]
|
||||
if isinstance(value, list):
|
||||
new_values = []
|
||||
for sel_def in value:
|
||||
new_value = cls.parse_from_definition(sel_def)
|
||||
new_values.append(new_value)
|
||||
value = new_values
|
||||
if key == 'exclude':
|
||||
definition = {key: value}
|
||||
elif len(definition) == 1:
|
||||
definition = {'method': key, 'value': value}
|
||||
return definition
|
||||
|
||||
@classmethod
|
||||
def parse_a_definition(cls, def_type, definition):
|
||||
# this definition must be a list
|
||||
new_dict = {def_type: []}
|
||||
for sel_def in definition[def_type]:
|
||||
if isinstance(sel_def, dict):
|
||||
sel_def = cls.parse_from_definition(sel_def)
|
||||
new_dict[def_type].append(sel_def)
|
||||
elif isinstance(sel_def, str):
|
||||
sel_def = SelectionCriteria.dict_from_single_spec(sel_def)
|
||||
new_dict[def_type].append(sel_def)
|
||||
else:
|
||||
new_dict[def_type].append(sel_def)
|
||||
return new_dict
|
||||
|
||||
@classmethod
|
||||
def parse_from_definition(cls, definition):
|
||||
if isinstance(definition, str):
|
||||
definition = SelectionCriteria.dict_from_single_spec(definition)
|
||||
elif 'union' in definition:
|
||||
definition = cls.parse_a_definition('union', definition)
|
||||
elif 'intersection' in definition:
|
||||
definition = cls.parse_a_definition('intersection', definition)
|
||||
elif isinstance(definition, dict):
|
||||
definition = cls.parse_dict_definition(definition)
|
||||
return definition
|
||||
|
||||
# This is the normal entrypoint of this code. Give it the
|
||||
# list of selectors generated from the selectors.yml file.
|
||||
@classmethod
|
||||
def parse_from_selectors_list(cls, selectors):
|
||||
selector_dict = {}
|
||||
for selector in selectors:
|
||||
sel_name = selector['name']
|
||||
selector_dict[sel_name] = selector
|
||||
definition = cls.parse_from_definition(selector['definition'])
|
||||
selector_dict[sel_name]['definition'] = definition
|
||||
return selector_dict
|
||||
|
||||
@@ -18,6 +18,7 @@ import yaml
|
||||
# approaches which will extend well to potentially many modules
|
||||
import pytz
|
||||
import datetime
|
||||
import re
|
||||
|
||||
|
||||
def get_pytz_module_context() -> Dict[str, Any]:
|
||||
@@ -42,10 +43,19 @@ def get_datetime_module_context() -> Dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def get_re_module_context() -> Dict[str, Any]:
|
||||
context_exports = re.__all__
|
||||
|
||||
return {
|
||||
name: getattr(re, name) for name in context_exports
|
||||
}
|
||||
|
||||
|
||||
def get_context_modules() -> Dict[str, Dict[str, Any]]:
|
||||
return {
|
||||
'pytz': get_pytz_module_context(),
|
||||
'datetime': get_datetime_module_context(),
|
||||
're': get_re_module_context(),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ from .context_config import ContextConfig
|
||||
from .macros import MacroNamespaceBuilder, MacroNamespace
|
||||
from .manifest import ManifestContext
|
||||
from dbt.contracts.graph.manifest import Manifest, Disabled
|
||||
from dbt.contracts.connection import AdapterResponse
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledResource,
|
||||
CompiledSeedNode,
|
||||
@@ -25,7 +26,7 @@ from dbt.contracts.graph.compiled import (
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
ParsedSeedNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
@@ -83,6 +84,7 @@ class BaseDatabaseWrapper:
|
||||
Wrapper for runtime database interaction. Applies the runtime quote policy
|
||||
via a relation proxy.
|
||||
"""
|
||||
|
||||
def __init__(self, adapter, namespace: MacroNamespace):
|
||||
self._adapter = adapter
|
||||
self.Relation = RelationProxy(adapter)
|
||||
@@ -379,6 +381,7 @@ class ParseDatabaseWrapper(BaseDatabaseWrapper):
|
||||
"""The parser subclass of the database wrapper applies any explicit
|
||||
parse-time overrides.
|
||||
"""
|
||||
|
||||
def __getattr__(self, name):
|
||||
override = (name in self._adapter._available_ and
|
||||
name in self._adapter._parse_replacements_)
|
||||
@@ -399,6 +402,7 @@ class RuntimeDatabaseWrapper(BaseDatabaseWrapper):
|
||||
"""The runtime database wrapper exposes everything the adapter marks
|
||||
available.
|
||||
"""
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in self._adapter._available_:
|
||||
return getattr(self._adapter, name)
|
||||
@@ -660,18 +664,33 @@ class ProviderContext(ManifestContext):
|
||||
|
||||
@contextmember
|
||||
def store_result(
|
||||
self, name: str, status: Any, agate_table: Optional[agate.Table] = None
|
||||
self, name: str,
|
||||
response: Any,
|
||||
agate_table: Optional[agate.Table] = None
|
||||
) -> str:
|
||||
if agate_table is None:
|
||||
agate_table = agate_helper.empty_table()
|
||||
|
||||
self.sql_results[name] = AttrDict({
|
||||
'status': status,
|
||||
'response': response,
|
||||
'data': agate_helper.as_matrix(agate_table),
|
||||
'table': agate_table
|
||||
})
|
||||
return ''
|
||||
|
||||
@contextmember
|
||||
def store_raw_result(
|
||||
self,
|
||||
name: str,
|
||||
message=Optional[str],
|
||||
code=Optional[str],
|
||||
rows_affected=Optional[str],
|
||||
agate_table: Optional[agate.Table] = None
|
||||
) -> str:
|
||||
response = AdapterResponse(
|
||||
_message=message, code=code, rows_affected=rows_affected)
|
||||
return self.store_result(name, response, agate_table)
|
||||
|
||||
@contextproperty
|
||||
def validation(self):
|
||||
def validate_any(*args) -> Callable[[T], None]:
|
||||
@@ -1179,6 +1198,7 @@ class MacroContext(ProviderContext):
|
||||
- 'schema' does not use any 'model' information
|
||||
- they can't be configured with config() directives
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: ParsedMacro,
|
||||
@@ -1217,7 +1237,9 @@ class ModelContext(ProviderContext):
|
||||
|
||||
@contextproperty
|
||||
def sql(self) -> Optional[str]:
|
||||
return getattr(self.model, 'injected_sql', None)
|
||||
if getattr(self.model, 'extra_ctes_injected', None):
|
||||
return self.model.compiled_sql
|
||||
return None
|
||||
|
||||
@contextproperty
|
||||
def database(self) -> str:
|
||||
@@ -1325,7 +1347,7 @@ def generate_runtime_macro(
|
||||
return ctx.to_dict()
|
||||
|
||||
|
||||
class ReportRefResolver(BaseResolver):
|
||||
class ExposureRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) not in (1, 2):
|
||||
ref_invalid_args(self.model, args)
|
||||
@@ -1333,7 +1355,7 @@ class ReportRefResolver(BaseResolver):
|
||||
return ''
|
||||
|
||||
|
||||
class ReportSourceResolver(BaseResolver):
|
||||
class ExposureSourceResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) != 2:
|
||||
raise_compiler_error(
|
||||
@@ -1344,23 +1366,23 @@ class ReportSourceResolver(BaseResolver):
|
||||
return ''
|
||||
|
||||
|
||||
def generate_parse_report(
|
||||
report: ParsedReport,
|
||||
def generate_parse_exposure(
|
||||
exposure: ParsedExposure,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
project = config.load_dependencies()[package_name]
|
||||
return {
|
||||
'ref': ReportRefResolver(
|
||||
'ref': ExposureRefResolver(
|
||||
None,
|
||||
report,
|
||||
exposure,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
'source': ReportSourceResolver(
|
||||
'source': ExposureSourceResolver(
|
||||
None,
|
||||
report,
|
||||
exposure,
|
||||
project,
|
||||
manifest,
|
||||
)
|
||||
|
||||
@@ -22,6 +22,16 @@ Identifier = NewType('Identifier', str)
|
||||
register_pattern(Identifier, r'^[A-Za-z_][A-Za-z0-9_]+$')
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterResponse(JsonSchemaMixin):
|
||||
_message: str
|
||||
code: Optional[str] = None
|
||||
rows_affected: Optional[int] = None
|
||||
|
||||
def __str__(self):
|
||||
return self._message
|
||||
|
||||
|
||||
class ConnectionState(StrEnum):
|
||||
INIT = 'init'
|
||||
OPEN = 'open'
|
||||
@@ -85,6 +95,7 @@ class LazyHandle:
|
||||
"""Opener must be a callable that takes a Connection object and opens the
|
||||
connection, updating the handle on the Connection.
|
||||
"""
|
||||
|
||||
def __init__(self, opener: Callable[[Connection], Connection]):
|
||||
self.opener = opener
|
||||
|
||||
|
||||
@@ -121,7 +121,7 @@ class SourceFile(JsonSchemaMixin):
|
||||
docs: List[str] = field(default_factory=list)
|
||||
macros: List[str] = field(default_factory=list)
|
||||
sources: List[str] = field(default_factory=list)
|
||||
reports: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
# any node patches in this file. The entries are names, not unique ids!
|
||||
patches: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file. The entries are package, name pairs.
|
||||
@@ -156,7 +156,7 @@ class SourceFile(JsonSchemaMixin):
|
||||
@classmethod
|
||||
def big_seed(cls, path: FilePath) -> 'SourceFile':
|
||||
"""Parse seeds over the size limit with just the path"""
|
||||
self = cls(path=path, checksum=FileHash.path(path.absolute_path))
|
||||
self = cls(path=path, checksum=FileHash.path(path.original_file_path))
|
||||
self.contents = ''
|
||||
return self
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedDataTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
ParsedResource,
|
||||
ParsedRPCNode,
|
||||
ParsedSchemaTestNode,
|
||||
@@ -42,7 +42,7 @@ class CompiledNode(ParsedNode, CompiledNodeMixin):
|
||||
compiled_sql: Optional[str] = None
|
||||
extra_ctes_injected: bool = False
|
||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||
injected_sql: Optional[str] = None
|
||||
relation_name: Optional[str] = None
|
||||
|
||||
def set_cte(self, cte_id: str, sql: str):
|
||||
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
|
||||
@@ -219,8 +219,8 @@ CompileResultNode = Union[
|
||||
ParsedSourceDefinition,
|
||||
]
|
||||
|
||||
# anything that participates in the graph: sources, reports, manifest nodes
|
||||
# anything that participates in the graph: sources, exposures, manifest nodes
|
||||
GraphMemberNode = Union[
|
||||
CompileResultNode,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
]
|
||||
|
||||
@@ -15,7 +15,7 @@ from dbt.contracts.graph.compiled import (
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,
|
||||
ParsedSourceDefinition, ParsedReport
|
||||
ParsedSourceDefinition, ParsedExposure
|
||||
)
|
||||
from dbt.contracts.files import SourceFile
|
||||
from dbt.contracts.util import (
|
||||
@@ -234,7 +234,8 @@ def build_edges(nodes: List[ManifestNode]):
|
||||
for node in nodes:
|
||||
backward_edges[node.unique_id] = node.depends_on_nodes[:]
|
||||
for unique_id in node.depends_on_nodes:
|
||||
forward_edges[unique_id].append(node.unique_id)
|
||||
if unique_id in forward_edges.keys():
|
||||
forward_edges[unique_id].append(node.unique_id)
|
||||
return _sort_values(forward_edges), _sort_values(backward_edges)
|
||||
|
||||
|
||||
@@ -431,11 +432,15 @@ def _update_into(dest: MutableMapping[str, T], new_item: T):
|
||||
class Manifest:
|
||||
"""The manifest for the full graph, after parsing and during compilation.
|
||||
"""
|
||||
# These attributes are both positional and by keyword. If an attribute
|
||||
# is added it must all be added in the __reduce_ex__ method in the
|
||||
# args tuple in the right position.
|
||||
nodes: MutableMapping[str, ManifestNode]
|
||||
sources: MutableMapping[str, ParsedSourceDefinition]
|
||||
macros: MutableMapping[str, ParsedMacro]
|
||||
docs: MutableMapping[str, ParsedDocumentation]
|
||||
reports: MutableMapping[str, ParsedReport]
|
||||
exposures: MutableMapping[str, ParsedExposure]
|
||||
selectors: MutableMapping[str, Any]
|
||||
disabled: List[CompileResultNode]
|
||||
files: MutableMapping[str, SourceFile]
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
@@ -460,7 +465,8 @@ class Manifest:
|
||||
sources={},
|
||||
macros=macros,
|
||||
docs={},
|
||||
reports={},
|
||||
exposures={},
|
||||
selectors={},
|
||||
disabled=[],
|
||||
files=files,
|
||||
)
|
||||
@@ -485,8 +491,8 @@ class Manifest:
|
||||
_update_into(self.nodes, new_node)
|
||||
return new_node
|
||||
|
||||
def update_report(self, new_report: ParsedReport):
|
||||
_update_into(self.reports, new_report)
|
||||
def update_exposure(self, new_exposure: ParsedExposure):
|
||||
_update_into(self.exposures, new_exposure)
|
||||
|
||||
def update_node(self, new_node: ManifestNode):
|
||||
_update_into(self.nodes, new_node)
|
||||
@@ -729,9 +735,10 @@ class Manifest:
|
||||
sources={k: _deepcopy(v) for k, v in self.sources.items()},
|
||||
macros={k: _deepcopy(v) for k, v in self.macros.items()},
|
||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||
reports={k: _deepcopy(v) for k, v in self.reports.items()},
|
||||
disabled=[_deepcopy(n) for n in self.disabled],
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
selectors=self.root_project.manifest_selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=[_deepcopy(n) for n in self.disabled],
|
||||
files={k: _deepcopy(v) for k, v in self.files.items()},
|
||||
)
|
||||
|
||||
@@ -739,7 +746,7 @@ class Manifest:
|
||||
edge_members = list(chain(
|
||||
self.nodes.values(),
|
||||
self.sources.values(),
|
||||
self.reports.values(),
|
||||
self.exposures.values(),
|
||||
))
|
||||
forward_edges, backward_edges = build_edges(edge_members)
|
||||
|
||||
@@ -748,7 +755,8 @@ class Manifest:
|
||||
sources=self.sources,
|
||||
macros=self.macros,
|
||||
docs=self.docs,
|
||||
reports=self.reports,
|
||||
exposures=self.exposures,
|
||||
selectors=self.selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=self.disabled,
|
||||
child_map=forward_edges,
|
||||
@@ -768,8 +776,8 @@ class Manifest:
|
||||
return self.nodes[unique_id]
|
||||
elif unique_id in self.sources:
|
||||
return self.sources[unique_id]
|
||||
elif unique_id in self.reports:
|
||||
return self.reports[unique_id]
|
||||
elif unique_id in self.exposures:
|
||||
return self.exposures[unique_id]
|
||||
else:
|
||||
# something terrible has happened
|
||||
raise dbt.exceptions.InternalException(
|
||||
@@ -880,6 +888,7 @@ class Manifest:
|
||||
|
||||
def merge_from_artifact(
|
||||
self,
|
||||
adapter,
|
||||
other: 'WritableManifest',
|
||||
selected: AbstractSet[UniqueID],
|
||||
) -> None:
|
||||
@@ -891,10 +900,14 @@ class Manifest:
|
||||
refables = set(NodeType.refable())
|
||||
merged = set()
|
||||
for unique_id, node in other.nodes.items():
|
||||
if (
|
||||
current = self.nodes.get(unique_id)
|
||||
if current and (
|
||||
node.resource_type in refables and
|
||||
not node.is_ephemeral and
|
||||
unique_id not in selected
|
||||
unique_id not in selected and
|
||||
not adapter.get_relation(
|
||||
current.database, current.schema, current.identifier
|
||||
)
|
||||
):
|
||||
merged.add(unique_id)
|
||||
self.nodes[unique_id] = node.replace(deferred=True)
|
||||
@@ -905,14 +918,21 @@ class Manifest:
|
||||
f'Merged {len(merged)} items from state (sample: {sample})'
|
||||
)
|
||||
|
||||
# provide support for copy.deepcopy() - we jsut need to avoid the lock!
|
||||
# Provide support for copy.deepcopy() - we just need to avoid the lock!
|
||||
# pickle and deepcopy use this. It returns a callable object used to
|
||||
# create the initial version of the object and a tuple of arguments
|
||||
# for the object, i.e. the Manifest.
|
||||
# The order of the arguments must match the order of the attributes
|
||||
# in the Manifest class declaration, because they are used as
|
||||
# positional arguments to construct a Manifest.
|
||||
def __reduce_ex__(self, protocol):
|
||||
args = (
|
||||
self.nodes,
|
||||
self.sources,
|
||||
self.macros,
|
||||
self.docs,
|
||||
self.reports,
|
||||
self.exposures,
|
||||
self.selectors,
|
||||
self.disabled,
|
||||
self.files,
|
||||
self.metadata,
|
||||
@@ -947,9 +967,14 @@ class WritableManifest(ArtifactMixin):
|
||||
'The docs defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
reports: Mapping[UniqueID, ParsedReport] = field(
|
||||
exposures: Mapping[UniqueID, ParsedExposure] = field(
|
||||
metadata=dict(description=(
|
||||
'The reports defined in the dbt project and its dependencies'
|
||||
'The exposures defined in the dbt project and its dependencies'
|
||||
))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=(
|
||||
'The selectors defined in selectors.yml'
|
||||
))
|
||||
)
|
||||
disabled: Optional[List[CompileResultNode]] = field(metadata=dict(
|
||||
|
||||
@@ -3,7 +3,7 @@ from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Any, List, Optional, Dict, MutableMapping, Union, Type, NewType, Tuple,
|
||||
TypeVar, Callable
|
||||
TypeVar, Callable, cast, Hashable
|
||||
)
|
||||
|
||||
# TODO: patch+upgrade hologram to avoid this jsonschema import
|
||||
@@ -450,6 +450,7 @@ class SeedConfig(NodeConfig):
|
||||
|
||||
@dataclass
|
||||
class TestConfig(NodeConfig):
|
||||
materialized: str = 'test'
|
||||
severity: Severity = Severity('ERROR')
|
||||
|
||||
|
||||
@@ -492,7 +493,8 @@ class SnapshotWrapper(JsonSchemaMixin):
|
||||
to_validate = config
|
||||
|
||||
else:
|
||||
schema = _validate_schema(cls)
|
||||
h_cls = cast(Hashable, cls)
|
||||
schema = _validate_schema(h_cls)
|
||||
to_validate = data
|
||||
|
||||
validator = jsonschema.Draft7Validator(schema)
|
||||
|
||||
@@ -23,7 +23,7 @@ from dbt.contracts.graph.unparsed import (
|
||||
UnparsedBaseNode, FreshnessThreshold, ExternalTable,
|
||||
HasYamlMetadata, MacroArgument, UnparsedSourceDefinition,
|
||||
UnparsedSourceTableDefinition, UnparsedColumn, TestDef,
|
||||
ReportOwner, ExposureType, MaturityType
|
||||
ExposureOwner, ExposureType, MaturityType
|
||||
)
|
||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||
from dbt.exceptions import warn_or_error
|
||||
@@ -555,6 +555,7 @@ class ParsedSourceDefinition(
|
||||
config: SourceConfig = field(default_factory=SourceConfig)
|
||||
patch_path: Optional[Path] = None
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
relation_name: Optional[str] = None
|
||||
|
||||
def same_database_representation(
|
||||
self, other: 'ParsedSourceDefinition'
|
||||
@@ -648,14 +649,14 @@ class ParsedSourceDefinition(
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedReport(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
name: str
|
||||
type: ExposureType
|
||||
owner: ReportOwner
|
||||
resource_type: NodeType = NodeType.Report
|
||||
owner: ExposureOwner
|
||||
resource_type: NodeType = NodeType.Exposure
|
||||
description: str = ''
|
||||
maturity: Optional[MaturityType] = None
|
||||
url: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
@@ -673,25 +674,25 @@ class ParsedReport(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
def tags(self):
|
||||
return []
|
||||
|
||||
def same_depends_on(self, old: 'ParsedReport') -> bool:
|
||||
def same_depends_on(self, old: 'ParsedExposure') -> bool:
|
||||
return set(self.depends_on.nodes) == set(old.depends_on.nodes)
|
||||
|
||||
def same_description(self, old: 'ParsedReport') -> bool:
|
||||
def same_description(self, old: 'ParsedExposure') -> bool:
|
||||
return self.description == old.description
|
||||
|
||||
def same_maturity(self, old: 'ParsedReport') -> bool:
|
||||
def same_maturity(self, old: 'ParsedExposure') -> bool:
|
||||
return self.maturity == old.maturity
|
||||
|
||||
def same_owner(self, old: 'ParsedReport') -> bool:
|
||||
def same_owner(self, old: 'ParsedExposure') -> bool:
|
||||
return self.owner == old.owner
|
||||
|
||||
def same_exposure_type(self, old: 'ParsedReport') -> bool:
|
||||
def same_exposure_type(self, old: 'ParsedExposure') -> bool:
|
||||
return self.type == old.type
|
||||
|
||||
def same_url(self, old: 'ParsedReport') -> bool:
|
||||
def same_url(self, old: 'ParsedExposure') -> bool:
|
||||
return self.url == old.url
|
||||
|
||||
def same_contents(self, old: Optional['ParsedReport']) -> bool:
|
||||
def same_contents(self, old: Optional['ParsedExposure']) -> bool:
|
||||
# existing when it didn't before is a change!
|
||||
if old is None:
|
||||
return True
|
||||
@@ -712,6 +713,6 @@ ParsedResource = Union[
|
||||
ParsedDocumentation,
|
||||
ParsedMacro,
|
||||
ParsedNode,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
ParsedSourceDefinition,
|
||||
]
|
||||
|
||||
@@ -158,19 +158,14 @@ class Time(JsonSchemaMixin, Replaceable):
|
||||
return actual_age > difference
|
||||
|
||||
|
||||
class FreshnessStatus(StrEnum):
|
||||
Pass = 'pass'
|
||||
Warn = 'warn'
|
||||
Error = 'error'
|
||||
|
||||
|
||||
@dataclass
|
||||
class FreshnessThreshold(JsonSchemaMixin, Mergeable):
|
||||
warn_after: Optional[Time] = None
|
||||
error_after: Optional[Time] = None
|
||||
filter: Optional[str] = None
|
||||
|
||||
def status(self, age: float) -> FreshnessStatus:
|
||||
def status(self, age: float) -> "dbt.contracts.results.FreshnessStatus":
|
||||
from dbt.contracts.results import FreshnessStatus
|
||||
if self.error_after and self.error_after.exceeded(age):
|
||||
return FreshnessStatus.Error
|
||||
elif self.warn_after and self.warn_after.exceeded(age):
|
||||
@@ -405,17 +400,17 @@ class MaturityType(StrEnum):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReportOwner(JsonSchemaMixin, Replaceable):
|
||||
class ExposureOwner(JsonSchemaMixin, Replaceable):
|
||||
email: str
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedReport(JsonSchemaMixin, Replaceable):
|
||||
class UnparsedExposure(JsonSchemaMixin, Replaceable):
|
||||
name: str
|
||||
type: ExposureType
|
||||
owner: ReportOwner
|
||||
owner: ExposureOwner
|
||||
description: str = ''
|
||||
maturity: Optional[MaturityType] = None
|
||||
url: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
depends_on: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -12,7 +12,7 @@ from hologram.helpers import HyphenatedJsonSchemaMixin, register_pattern, \
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Dict, Union, Any, NewType
|
||||
|
||||
PIN_PACKAGE_URL = 'https://docs.getdbt.com/docs/package-management#section-specifying-package-versions' # noqa
|
||||
PIN_PACKAGE_URL = 'https://docs.getdbt.com/docs/package-management#section-specifying-package-versions' # noqa
|
||||
DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
|
||||
|
||||
|
||||
@@ -142,6 +142,7 @@ BANNED_PROJECT_NAMES = {
|
||||
'sql',
|
||||
'sql_now',
|
||||
'store_result',
|
||||
'store_raw_result',
|
||||
'target',
|
||||
'this',
|
||||
'tojson',
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
from dbt.contracts.graph.manifest import CompileResultNode
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
FreshnessStatus, FreshnessThreshold
|
||||
FreshnessThreshold
|
||||
)
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition
|
||||
from dbt.contracts.util import (
|
||||
BaseArtifactMetadata,
|
||||
ArtifactMixin,
|
||||
Writable,
|
||||
VersionedSchema,
|
||||
Replaceable,
|
||||
schema_version,
|
||||
@@ -27,6 +26,8 @@ from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Union, Dict, List, Optional, Any, NamedTuple, Sequence
|
||||
|
||||
from dbt.clients.system import write_json
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimingInfo(JsonSchemaMixin):
|
||||
@@ -55,46 +56,58 @@ class collect_timing_info:
|
||||
logger.debug('finished collecting timing info')
|
||||
|
||||
|
||||
class NodeStatus(StrEnum):
|
||||
Success = "success"
|
||||
Error = "error"
|
||||
Fail = "fail"
|
||||
Warn = "warn"
|
||||
Skipped = "skipped"
|
||||
Pass = "pass"
|
||||
RuntimeErr = "runtime error"
|
||||
|
||||
|
||||
class RunStatus(StrEnum):
|
||||
Success = NodeStatus.Success
|
||||
Error = NodeStatus.Error
|
||||
Skipped = NodeStatus.Skipped
|
||||
|
||||
|
||||
class TestStatus(StrEnum):
|
||||
Pass = NodeStatus.Pass
|
||||
Error = NodeStatus.Error
|
||||
Fail = NodeStatus.Fail
|
||||
Warn = NodeStatus.Warn
|
||||
|
||||
|
||||
class FreshnessStatus(StrEnum):
|
||||
Pass = NodeStatus.Pass
|
||||
Warn = NodeStatus.Warn
|
||||
Error = NodeStatus.Error
|
||||
RuntimeErr = NodeStatus.RuntimeErr
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseResult(JsonSchemaMixin):
|
||||
status: Union[RunStatus, TestStatus, FreshnessStatus]
|
||||
timing: List[TimingInfo]
|
||||
thread_id: str
|
||||
execution_time: float
|
||||
message: Optional[Union[str, int]]
|
||||
adapter_response: Dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeResult(BaseResult):
|
||||
node: CompileResultNode
|
||||
error: Optional[str] = None
|
||||
status: Union[None, str, int, bool] = None
|
||||
execution_time: Union[str, int] = 0
|
||||
thread_id: Optional[str] = None
|
||||
timing: List[TimingInfo] = field(default_factory=list)
|
||||
fail: Optional[bool] = None
|
||||
warn: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PartialResult(BaseResult, Writable):
|
||||
pass
|
||||
|
||||
# if the result got to the point where it could be skipped/failed, we would
|
||||
# be returning a real result, not a partial.
|
||||
@property
|
||||
def skipped(self):
|
||||
return False
|
||||
|
||||
|
||||
@dataclass
|
||||
class WritableRunModelResult(BaseResult, Writable):
|
||||
skip: bool = False
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
return self.skip
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunModelResult(WritableRunModelResult):
|
||||
class RunResult(NodeResult):
|
||||
agate_table: Optional[agate.Table] = None
|
||||
|
||||
def to_dict(self, *args, **kwargs):
|
||||
dct = super().to_dict(*args, **kwargs)
|
||||
dct.pop('agate_table', None)
|
||||
return dct
|
||||
@property
|
||||
def skipped(self):
|
||||
return self.status == RunStatus.Skipped
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -112,9 +125,6 @@ class ExecutionResult(JsonSchemaMixin):
|
||||
return self.results[idx]
|
||||
|
||||
|
||||
RunResult = Union[PartialResult, WritableRunModelResult]
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResultsMetadata(BaseArtifactMetadata):
|
||||
dbt_schema_version: str = field(
|
||||
@@ -123,33 +133,69 @@ class RunResultsMetadata(BaseArtifactMetadata):
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('run-results', 1)
|
||||
class RunResultsArtifact(
|
||||
class RunResultOutput(BaseResult):
|
||||
unique_id: str
|
||||
|
||||
|
||||
def process_run_result(result: RunResult) -> RunResultOutput:
|
||||
return RunResultOutput(
|
||||
unique_id=result.node.unique_id,
|
||||
status=result.status,
|
||||
timing=result.timing,
|
||||
thread_id=result.thread_id,
|
||||
execution_time=result.execution_time,
|
||||
message=result.message,
|
||||
adapter_response=result.adapter_response
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunExecutionResult(
|
||||
ExecutionResult,
|
||||
ArtifactMixin,
|
||||
):
|
||||
results: Sequence[RunResult]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = RunResultsArtifact.from_execution_results(
|
||||
results=self.results,
|
||||
elapsed_time=self.elapsed_time,
|
||||
generated_at=self.generated_at,
|
||||
args=self.args,
|
||||
)
|
||||
writable.write(path)
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version('run-results', 1)
|
||||
class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
||||
results: Sequence[RunResultOutput]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def from_node_results(
|
||||
def from_execution_results(
|
||||
cls,
|
||||
results: Sequence[RunResult],
|
||||
elapsed_time: float,
|
||||
generated_at: datetime,
|
||||
args: Dict,
|
||||
):
|
||||
processed_results = [process_run_result(result) for result in results]
|
||||
meta = RunResultsMetadata(
|
||||
dbt_schema_version=str(cls.dbt_schema_version),
|
||||
generated_at=generated_at,
|
||||
)
|
||||
return cls(
|
||||
metadata=meta,
|
||||
results=results,
|
||||
results=processed_results,
|
||||
elapsed_time=elapsed_time,
|
||||
args=args
|
||||
)
|
||||
|
||||
def write(self, path: str, omit_none=False):
|
||||
write_json(path, self.to_dict(omit_none=omit_none))
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunOperationResult(ExecutionResult):
|
||||
@@ -174,7 +220,7 @@ class RunOperationResultsArtifact(RunOperationResult, ArtifactMixin):
|
||||
elapsed_time: float,
|
||||
generated_at: datetime,
|
||||
):
|
||||
meta = RunResultsMetadata(
|
||||
meta = RunOperationResultMetadata(
|
||||
dbt_schema_version=str(cls.dbt_schema_version),
|
||||
generated_at=generated_at,
|
||||
)
|
||||
@@ -185,37 +231,23 @@ class RunOperationResultsArtifact(RunOperationResult, ArtifactMixin):
|
||||
success=success,
|
||||
)
|
||||
|
||||
# due to issues with typing.Union collapsing subclasses, this can't subclass
|
||||
# PartialResult
|
||||
|
||||
|
||||
@dataclass
|
||||
class SourceFreshnessResultMixin(JsonSchemaMixin):
|
||||
class SourceFreshnessResult(NodeResult):
|
||||
node: ParsedSourceDefinition
|
||||
status: FreshnessStatus
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
age: float
|
||||
|
||||
|
||||
# due to issues with typing.Union collapsing subclasses, this can't subclass
|
||||
# PartialResult
|
||||
@dataclass
|
||||
class SourceFreshnessResult(BaseResult, Writable, SourceFreshnessResultMixin):
|
||||
node: ParsedSourceDefinition
|
||||
status: FreshnessStatus = FreshnessStatus.Pass
|
||||
|
||||
def __post_init__(self):
|
||||
self.fail = self.status == 'error'
|
||||
|
||||
@property
|
||||
def warned(self):
|
||||
return self.status == 'warn'
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
return False
|
||||
|
||||
|
||||
def _copykeys(src, keys, **updates):
|
||||
return {k: getattr(src, k) for k in keys}
|
||||
|
||||
|
||||
class FreshnessErrorEnum(StrEnum):
|
||||
runtime_error = 'runtime error'
|
||||
|
||||
@@ -223,8 +255,8 @@ class FreshnessErrorEnum(StrEnum):
|
||||
@dataclass
|
||||
class SourceFreshnessRuntimeError(JsonSchemaMixin):
|
||||
unique_id: str
|
||||
error: str
|
||||
state: FreshnessErrorEnum
|
||||
error: Optional[Union[str, int]]
|
||||
status: FreshnessErrorEnum
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -233,11 +265,22 @@ class SourceFreshnessOutput(JsonSchemaMixin):
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
max_loaded_at_time_ago_in_s: float
|
||||
state: FreshnessStatus
|
||||
status: FreshnessStatus
|
||||
criteria: FreshnessThreshold
|
||||
adapter_response: Dict[str, Any]
|
||||
|
||||
|
||||
FreshnessNodeResult = Union[PartialResult, SourceFreshnessResult]
|
||||
@dataclass
|
||||
class PartialSourceFreshnessResult(NodeResult):
|
||||
status: FreshnessStatus
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
return False
|
||||
|
||||
|
||||
FreshnessNodeResult = Union[PartialSourceFreshnessResult,
|
||||
SourceFreshnessResult]
|
||||
FreshnessNodeOutput = Union[SourceFreshnessRuntimeError, SourceFreshnessOutput]
|
||||
|
||||
|
||||
@@ -245,11 +288,11 @@ def process_freshness_result(
|
||||
result: FreshnessNodeResult
|
||||
) -> FreshnessNodeOutput:
|
||||
unique_id = result.node.unique_id
|
||||
if result.error is not None:
|
||||
if result.status == FreshnessStatus.RuntimeErr:
|
||||
return SourceFreshnessRuntimeError(
|
||||
unique_id=unique_id,
|
||||
error=result.error,
|
||||
state=FreshnessErrorEnum.runtime_error,
|
||||
error=result.message,
|
||||
status=FreshnessErrorEnum.runtime_error,
|
||||
)
|
||||
|
||||
# we know that this must be a SourceFreshnessResult
|
||||
@@ -271,8 +314,9 @@ def process_freshness_result(
|
||||
max_loaded_at=result.max_loaded_at,
|
||||
snapshotted_at=result.snapshotted_at,
|
||||
max_loaded_at_time_ago_in_s=result.age,
|
||||
state=result.status,
|
||||
status=result.status,
|
||||
criteria=criteria,
|
||||
adapter_response=result.adapter_response
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from hologram.helpers import StrEnum
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import WritableManifest
|
||||
from dbt.contracts.results import (
|
||||
TimingInfo,
|
||||
RunResult, RunResultsArtifact, TimingInfo,
|
||||
CatalogArtifact,
|
||||
CatalogResults,
|
||||
ExecutionResult,
|
||||
@@ -19,8 +19,7 @@ from dbt.contracts.results import (
|
||||
FreshnessResult,
|
||||
RunOperationResult,
|
||||
RunOperationResultsArtifact,
|
||||
RunResult,
|
||||
RunResultsArtifact,
|
||||
RunExecutionResult,
|
||||
)
|
||||
from dbt.contracts.util import VersionedSchema, schema_version
|
||||
from dbt.exceptions import InternalException
|
||||
@@ -80,6 +79,7 @@ class RPCTestParameters(RPCCompileParameters):
|
||||
data: bool = False
|
||||
schema: bool = False
|
||||
state: Optional[str] = None
|
||||
defer: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -225,12 +225,12 @@ class RemoteCompileResult(RemoteCompileResultMixin):
|
||||
@dataclass
|
||||
@schema_version('remote-execution-result', 1)
|
||||
class RemoteExecutionResult(ExecutionResult, RemoteResult):
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
results: Sequence[RunResult]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = RunResultsArtifact.from_node_results(
|
||||
writable = RunResultsArtifact.from_execution_results(
|
||||
generated_at=self.generated_at,
|
||||
results=self.results,
|
||||
elapsed_time=self.elapsed_time,
|
||||
@@ -241,11 +241,11 @@ class RemoteExecutionResult(ExecutionResult, RemoteResult):
|
||||
@classmethod
|
||||
def from_local_result(
|
||||
cls,
|
||||
base: RunResultsArtifact,
|
||||
base: RunExecutionResult,
|
||||
logs: List[LogMessage],
|
||||
) -> 'RemoteExecutionResult':
|
||||
return cls(
|
||||
generated_at=base.metadata.generated_at,
|
||||
generated_at=base.generated_at,
|
||||
results=base.results,
|
||||
elapsed_time=base.elapsed_time,
|
||||
args=base.args,
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import List, Dict, Any, Union
|
||||
class SelectorDefinition(JsonSchemaMixin):
|
||||
name: str
|
||||
definition: Union[str, Dict[str, Any]]
|
||||
description: str = ''
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -132,7 +132,7 @@ class RuntimeException(RuntimeError, Exception):
|
||||
result.update({
|
||||
'raw_sql': self.node.raw_sql,
|
||||
# the node isn't always compiled, but if it is, include that!
|
||||
'compiled_sql': getattr(self.node, 'injected_sql', None),
|
||||
'compiled_sql': getattr(self.node, 'compiled_sql', None),
|
||||
})
|
||||
return result
|
||||
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import os
|
||||
import multiprocessing
|
||||
if os.name != 'nt':
|
||||
# https://bugs.python.org/issue41567
|
||||
import multiprocessing.popen_spawn_posix # type: ignore
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from .selector_spec import (
|
||||
|
||||
INTERSECTION_DELIMITER = ','
|
||||
|
||||
DEFAULT_INCLUDES: List[str] = ['fqn:*', 'source:*', 'report:*']
|
||||
DEFAULT_INCLUDES: List[str] = ['fqn:*', 'source:*', 'exposure:*']
|
||||
DEFAULT_EXCLUDES: List[str] = []
|
||||
DATA_TEST_SELECTOR: str = 'test_type:data'
|
||||
SCHEMA_TEST_SELECTOR: str = 'test_type:schema'
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import (
|
||||
import networkx as nx # type: ignore
|
||||
|
||||
from .graph import UniqueId
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedReport
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedExposure
|
||||
from dbt.contracts.graph.compiled import GraphMemberNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.node_types import NodeType
|
||||
@@ -50,8 +50,8 @@ class GraphQueue:
|
||||
node = self.manifest.expect(node_id)
|
||||
if node.resource_type != NodeType.Model:
|
||||
return False
|
||||
# must be a Model - tell mypy this won't be a Source or Report
|
||||
assert not isinstance(node, (ParsedSourceDefinition, ParsedReport))
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure
|
||||
assert not isinstance(node, (ParsedSourceDefinition, ParsedExposure))
|
||||
if node.is_ephemeral:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -129,7 +129,7 @@ class NodeSelector(MethodManager):
|
||||
if unique_id in self.manifest.sources:
|
||||
source = self.manifest.sources[unique_id]
|
||||
return source.config.enabled
|
||||
elif unique_id in self.manifest.reports:
|
||||
elif unique_id in self.manifest.exposures:
|
||||
return True
|
||||
node = self.manifest.nodes[unique_id]
|
||||
return not node.empty and node.config.enabled
|
||||
@@ -146,8 +146,8 @@ class NodeSelector(MethodManager):
|
||||
node = self.manifest.nodes[unique_id]
|
||||
elif unique_id in self.manifest.sources:
|
||||
node = self.manifest.sources[unique_id]
|
||||
elif unique_id in self.manifest.reports:
|
||||
node = self.manifest.reports[unique_id]
|
||||
elif unique_id in self.manifest.exposures:
|
||||
node = self.manifest.exposures[unique_id]
|
||||
else:
|
||||
raise InternalException(
|
||||
f'Node {unique_id} not found in the manifest!'
|
||||
|
||||
@@ -17,7 +17,7 @@ from dbt.contracts.graph.manifest import Manifest, WritableManifest
|
||||
from dbt.contracts.graph.parsed import (
|
||||
HasTestMetadata,
|
||||
ParsedDataTestNode,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
ParsedSchemaTestNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
@@ -46,7 +46,7 @@ class MethodName(StrEnum):
|
||||
TestType = 'test_type'
|
||||
ResourceType = 'resource_type'
|
||||
State = 'state'
|
||||
Report = 'report'
|
||||
Exposure = 'exposure'
|
||||
|
||||
|
||||
def is_selected_node(real_node, node_selector):
|
||||
@@ -75,7 +75,7 @@ def is_selected_node(real_node, node_selector):
|
||||
return True
|
||||
|
||||
|
||||
SelectorTarget = Union[ParsedSourceDefinition, ManifestNode, ParsedReport]
|
||||
SelectorTarget = Union[ParsedSourceDefinition, ManifestNode, ParsedExposure]
|
||||
|
||||
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
@@ -111,16 +111,16 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
continue
|
||||
yield unique_id, source
|
||||
|
||||
def report_nodes(
|
||||
def exposure_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, ParsedReport]]:
|
||||
) -> Iterator[Tuple[UniqueId, ParsedExposure]]:
|
||||
|
||||
for key, report in self.manifest.reports.items():
|
||||
for key, exposure in self.manifest.exposures.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, report
|
||||
yield unique_id, exposure
|
||||
|
||||
def all_nodes(
|
||||
self,
|
||||
@@ -128,7 +128,7 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
) -> Iterator[Tuple[UniqueId, SelectorTarget]]:
|
||||
yield from chain(self.parsed_nodes(included_nodes),
|
||||
self.source_nodes(included_nodes),
|
||||
self.report_nodes(included_nodes))
|
||||
self.exposure_nodes(included_nodes))
|
||||
|
||||
def configurable_nodes(
|
||||
self,
|
||||
@@ -140,9 +140,9 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def non_source_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
) -> Iterator[Tuple[UniqueId, Union[ParsedReport, ManifestNode]]]:
|
||||
) -> Iterator[Tuple[UniqueId, Union[ParsedExposure, ManifestNode]]]:
|
||||
yield from chain(self.parsed_nodes(included_nodes),
|
||||
self.report_nodes(included_nodes))
|
||||
self.exposure_nodes(included_nodes))
|
||||
|
||||
@abc.abstractmethod
|
||||
def search(
|
||||
@@ -244,7 +244,7 @@ class SourceSelectorMethod(SelectorMethod):
|
||||
yield node
|
||||
|
||||
|
||||
class ReportSelectorMethod(SelectorMethod):
|
||||
class ExposureSelectorMethod(SelectorMethod):
|
||||
def search(
|
||||
self, included_nodes: Set[UniqueId], selector: str
|
||||
) -> Iterator[UniqueId]:
|
||||
@@ -256,13 +256,13 @@ class ReportSelectorMethod(SelectorMethod):
|
||||
target_package, target_name = parts
|
||||
else:
|
||||
msg = (
|
||||
'Invalid report selector value "{}". Reports must be of '
|
||||
'the form ${{report_name}} or '
|
||||
'${{report_package.report_name}}'
|
||||
'Invalid exposure selector value "{}". Exposures must be of '
|
||||
'the form ${{exposure_name}} or '
|
||||
'${{exposure_package.exposure_name}}'
|
||||
).format(selector)
|
||||
raise RuntimeException(msg)
|
||||
|
||||
for node, real_node in self.report_nodes(included_nodes):
|
||||
for node, real_node in self.exposure_nodes(included_nodes):
|
||||
if target_package not in (real_node.package_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_name not in (real_node.name, SELECTOR_GLOB):
|
||||
@@ -481,8 +481,8 @@ class StateSelectorMethod(SelectorMethod):
|
||||
previous_node = manifest.nodes[node]
|
||||
elif node in manifest.sources:
|
||||
previous_node = manifest.sources[node]
|
||||
elif node in manifest.reports:
|
||||
previous_node = manifest.reports[node]
|
||||
elif node in manifest.exposures:
|
||||
previous_node = manifest.exposures[node]
|
||||
|
||||
if checker(previous_node, real_node):
|
||||
yield node
|
||||
@@ -499,7 +499,7 @@ class MethodManager:
|
||||
MethodName.TestName: TestNameSelectorMethod,
|
||||
MethodName.TestType: TestTypeSelectorMethod,
|
||||
MethodName.State: StateSelectorMethod,
|
||||
MethodName.Report: ReportSelectorMethod,
|
||||
MethodName.Exposure: ExposureSelectorMethod,
|
||||
}
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -123,6 +123,26 @@ class SelectionCriteria:
|
||||
children_depth=children_depth,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dict_from_single_spec(cls, raw: str):
|
||||
result = RAW_SELECTOR_PATTERN.match(raw)
|
||||
if result is None:
|
||||
return {'error': 'Invalid selector spec'}
|
||||
dct: Dict[str, Any] = result.groupdict()
|
||||
method_name, method_arguments = cls.parse_method(dct)
|
||||
meth_name = str(method_name)
|
||||
if method_arguments:
|
||||
meth_name = meth_name + '.' + '.'.join(method_arguments)
|
||||
dct['method'] = meth_name
|
||||
dct = {k: v for k, v in dct.items() if (v is not None and v != '')}
|
||||
if 'childrens_parents' in dct:
|
||||
dct['childrens_parents'] = bool(dct.get('childrens_parents'))
|
||||
if 'parents' in dct:
|
||||
dct['parents'] = bool(dct.get('parents'))
|
||||
if 'children' in dct:
|
||||
dct['children'] = bool(dct.get('children'))
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
def from_single_spec(cls, raw: str) -> 'SelectionCriteria':
|
||||
result = RAW_SELECTOR_PATTERN.match(raw)
|
||||
|
||||
@@ -7,15 +7,15 @@
|
||||
{{ write(sql) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- set status, res = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}
|
||||
{%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}
|
||||
{%- if name is not none -%}
|
||||
{{ store_result(name, status=status, agate_table=res) }}
|
||||
{{ store_result(name, response=res, agate_table=table) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro noop_statement(name=None, status=None, res=None) -%}
|
||||
{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}
|
||||
{%- set sql = caller() -%}
|
||||
|
||||
{%- if name == 'main' -%}
|
||||
@@ -24,7 +24,7 @@
|
||||
{%- endif -%}
|
||||
|
||||
{%- if name is not none -%}
|
||||
{{ store_result(name, status=status, agate_table=res) }}
|
||||
{{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}
|
||||
{%- endif -%}
|
||||
|
||||
{%- endmacro %}
|
||||
|
||||
@@ -112,7 +112,7 @@
|
||||
{%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}
|
||||
|
||||
{%- set agate_table = load_agate_table() -%}
|
||||
{%- do store_result('agate_table', status='OK', agate_table=agate_table) -%}
|
||||
{%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}
|
||||
|
||||
{{ run_hooks(pre_hooks, inside_transaction=False) }}
|
||||
|
||||
@@ -129,11 +129,11 @@
|
||||
{% set create_table_sql = create_csv_table(model, agate_table) %}
|
||||
{% endif %}
|
||||
|
||||
{% set status = 'CREATE' if full_refresh_mode else 'INSERT' %}
|
||||
{% set num_rows = (agate_table.rows | length) %}
|
||||
{% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}
|
||||
{% set rows_affected = (agate_table.rows | length) %}
|
||||
{% set sql = load_csv_rows(model, agate_table) %}
|
||||
|
||||
{% call noop_statement('main', status ~ ' ' ~ num_rows) %}
|
||||
{% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}
|
||||
{{ create_table_sql }};
|
||||
-- dbt seed --
|
||||
{{ sql }}
|
||||
|
||||
@@ -214,7 +214,7 @@
|
||||
|
||||
{% if not target_relation_exists %}
|
||||
|
||||
{% set build_sql = build_snapshot_table(strategy, model['injected_sql']) %}
|
||||
{% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}
|
||||
{% set final_sql = create_table_as(False, target_relation, build_sql) %}
|
||||
|
||||
{% else %}
|
||||
|
||||
@@ -13,13 +13,7 @@
|
||||
|
||||
when matched
|
||||
and DBT_INTERNAL_DEST.dbt_valid_to is null
|
||||
and DBT_INTERNAL_SOURCE.dbt_change_type = 'update'
|
||||
then update
|
||||
set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to
|
||||
|
||||
when matched
|
||||
and DBT_INTERNAL_DEST.dbt_valid_to is null
|
||||
and DBT_INTERNAL_SOURCE.dbt_change_type = 'delete'
|
||||
and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')
|
||||
then update
|
||||
set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to
|
||||
|
||||
|
||||
@@ -106,7 +106,7 @@
|
||||
|
||||
|
||||
{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}
|
||||
{%- set query_columns = get_columns_in_query(node['injected_sql']) -%}
|
||||
{%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}
|
||||
{%- if not target_exists -%}
|
||||
{# no table yet -> return whatever the query does #}
|
||||
{{ return([false, query_columns]) }}
|
||||
@@ -164,7 +164,11 @@
|
||||
{%- for col in check_cols -%}
|
||||
{{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}
|
||||
or
|
||||
({{ snapshotted_rel }}.{{ col }} is null) != ({{ current_rel }}.{{ col }} is null)
|
||||
(
|
||||
(({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))
|
||||
or
|
||||
((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))
|
||||
)
|
||||
{%- if not loop.last %} or {% endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -23,6 +23,7 @@ import dbt.task.generate as generate_task
|
||||
import dbt.task.serve as serve_task
|
||||
import dbt.task.freshness as freshness_task
|
||||
import dbt.task.run_operation as run_operation_task
|
||||
import dbt.task.parse as parse_task
|
||||
from dbt.profiler import profiler
|
||||
from dbt.task.list import ListTask
|
||||
from dbt.task.rpc.server import RPCServerTask
|
||||
@@ -445,6 +446,21 @@ def _build_snapshot_subparser(subparsers, base_subparser):
|
||||
return sub
|
||||
|
||||
|
||||
def _add_defer_argument(*subparsers):
|
||||
for sub in subparsers:
|
||||
sub.add_optional_argument_inverse(
|
||||
'--defer',
|
||||
enable_help='''
|
||||
If set, defer to the state variable for resolving unselected nodes.
|
||||
''',
|
||||
disable_help='''
|
||||
If set, do not defer to the state variable for resolving unselected
|
||||
nodes.
|
||||
''',
|
||||
default=flags.DEFER_MODE,
|
||||
)
|
||||
|
||||
|
||||
def _build_run_subparser(subparsers, base_subparser):
|
||||
run_sub = subparsers.add_parser(
|
||||
'run',
|
||||
@@ -462,19 +478,6 @@ def _build_run_subparser(subparsers, base_subparser):
|
||||
'''
|
||||
)
|
||||
|
||||
# this is a "dbt run"-only thing, for now
|
||||
run_sub.add_optional_argument_inverse(
|
||||
'--defer',
|
||||
enable_help='''
|
||||
If set, defer to the state variable for resolving unselected nodes.
|
||||
''',
|
||||
disable_help='''
|
||||
If set, do not defer to the state variable for resolving unselected
|
||||
nodes.
|
||||
''',
|
||||
default=flags.DEFER_MODE,
|
||||
)
|
||||
|
||||
run_sub.set_defaults(cls=run_task.RunTask, which='run', rpc_method='run')
|
||||
return run_sub
|
||||
|
||||
@@ -494,6 +497,21 @@ def _build_compile_subparser(subparsers, base_subparser):
|
||||
return sub
|
||||
|
||||
|
||||
def _build_parse_subparser(subparsers, base_subparser):
|
||||
sub = subparsers.add_parser(
|
||||
'parse',
|
||||
parents=[base_subparser],
|
||||
help='''
|
||||
Parsed the project and provides information on performance
|
||||
'''
|
||||
)
|
||||
sub.set_defaults(cls=parse_task.ParseTask, which='parse',
|
||||
rpc_method='parse')
|
||||
sub.add_argument('--write-manifest', action='store_true')
|
||||
sub.add_argument('--compile', action='store_true')
|
||||
return sub
|
||||
|
||||
|
||||
def _build_docs_generate_subparser(subparsers, base_subparser):
|
||||
# it might look like docs_sub is the correct parents entry, but that
|
||||
# will cause weird errors about 'conflicting option strings'.
|
||||
@@ -1006,16 +1024,19 @@ def parse_args(args, cls=DBTArgumentParser):
|
||||
rpc_sub = _build_rpc_subparser(subs, base_subparser)
|
||||
run_sub = _build_run_subparser(subs, base_subparser)
|
||||
compile_sub = _build_compile_subparser(subs, base_subparser)
|
||||
parse_sub = _build_parse_subparser(subs, base_subparser)
|
||||
generate_sub = _build_docs_generate_subparser(docs_subs, base_subparser)
|
||||
test_sub = _build_test_subparser(subs, base_subparser)
|
||||
seed_sub = _build_seed_subparser(subs, base_subparser)
|
||||
# --threads, --no-version-check
|
||||
_add_common_arguments(run_sub, compile_sub, generate_sub, test_sub,
|
||||
rpc_sub, seed_sub)
|
||||
rpc_sub, seed_sub, parse_sub)
|
||||
# --models, --exclude
|
||||
# list_sub sets up its own arguments.
|
||||
_add_selection_arguments(run_sub, compile_sub, generate_sub, test_sub)
|
||||
_add_selection_arguments(snapshot_sub, seed_sub, models_name='select')
|
||||
# --defer
|
||||
_add_defer_argument(run_sub, test_sub)
|
||||
# --full-refresh
|
||||
_add_table_mutability_arguments(run_sub, compile_sub)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ class NodeType(StrEnum):
|
||||
Documentation = 'docs'
|
||||
Source = 'source'
|
||||
Macro = 'macro'
|
||||
Report = 'report'
|
||||
Exposure = 'exposure'
|
||||
|
||||
@classmethod
|
||||
def executable(cls) -> List['NodeType']:
|
||||
@@ -46,6 +46,7 @@ class NodeType(StrEnum):
|
||||
cls.Source,
|
||||
cls.Macro,
|
||||
cls.Analysis,
|
||||
cls.Exposure
|
||||
]
|
||||
|
||||
def pluralize(self) -> str:
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
import os
|
||||
import pickle
|
||||
from typing import (
|
||||
Dict, Optional, Mapping, Callable, Any, List, Type, Union, MutableMapping
|
||||
)
|
||||
import time
|
||||
|
||||
import dbt.exceptions
|
||||
import dbt.tracking
|
||||
import dbt.flags as flags
|
||||
|
||||
from dbt.adapters.factory import (
|
||||
@@ -21,8 +25,9 @@ from dbt.contracts.files import FilePath, FileHash
|
||||
from dbt.contracts.graph.compiled import ManifestNode
|
||||
from dbt.contracts.graph.manifest import Manifest, Disabled
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedSourceDefinition, ParsedNode, ParsedMacro, ColumnInfo, ParsedReport
|
||||
ParsedSourceDefinition, ParsedNode, ParsedMacro, ColumnInfo, ParsedExposure
|
||||
)
|
||||
from dbt.contracts.util import Writable
|
||||
from dbt.exceptions import (
|
||||
ref_target_not_found,
|
||||
get_target_not_found_or_disabled_msg,
|
||||
@@ -46,12 +51,39 @@ from dbt.parser.sources import patch_sources
|
||||
from dbt.ui import warning_tag
|
||||
from dbt.version import __version__
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
|
||||
PARTIAL_PARSE_FILE_NAME = 'partial_parse.pickle'
|
||||
PARSING_STATE = DbtProcessState('parsing')
|
||||
DEFAULT_PARTIAL_PARSE = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParserInfo(JsonSchemaMixin):
|
||||
parser: str
|
||||
elapsed: float
|
||||
path_count: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectLoaderInfo(JsonSchemaMixin):
|
||||
project_name: str
|
||||
elapsed: float
|
||||
parsers: List[ParserInfo]
|
||||
path_count: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManifestLoaderInfo(JsonSchemaMixin, Writable):
|
||||
path_count: int = 0
|
||||
is_partial_parse_enabled: Optional[bool] = None
|
||||
parse_project_elapsed: Optional[float] = None
|
||||
patch_sources_elapsed: Optional[float] = None
|
||||
process_manifest_elapsed: Optional[float] = None
|
||||
load_all_elapsed: Optional[float] = None
|
||||
projects: List[ProjectLoaderInfo] = field(default_factory=list)
|
||||
|
||||
|
||||
_parser_types: List[Type[Parser]] = [
|
||||
ModelParser,
|
||||
SnapshotParser,
|
||||
@@ -119,6 +151,26 @@ class ManifestLoader:
|
||||
root_project, all_projects,
|
||||
)
|
||||
self._loaded_file_cache: Dict[str, FileBlock] = {}
|
||||
self._perf_info = ManifestLoaderInfo(
|
||||
is_partial_parse_enabled=self._partial_parse_enabled()
|
||||
)
|
||||
|
||||
def track_project_load(self):
|
||||
invocation_id = dbt.tracking.active_user.invocation_id
|
||||
dbt.tracking.track_project_load({
|
||||
"invocation_id": invocation_id,
|
||||
"project_id": self.root_project.hashed_name(),
|
||||
"path_count": self._perf_info.path_count,
|
||||
"parse_project_elapsed": self._perf_info.parse_project_elapsed,
|
||||
"patch_sources_elapsed": self._perf_info.patch_sources_elapsed,
|
||||
"process_manifest_elapsed": (
|
||||
self._perf_info.process_manifest_elapsed
|
||||
),
|
||||
"load_all_elapsed": self._perf_info.load_all_elapsed,
|
||||
"is_partial_parse_enabled": (
|
||||
self._perf_info.is_partial_parse_enabled
|
||||
),
|
||||
})
|
||||
|
||||
def parse_with_cache(
|
||||
self,
|
||||
@@ -170,9 +222,35 @@ class ManifestLoader:
|
||||
# per-project cache.
|
||||
self._loaded_file_cache.clear()
|
||||
|
||||
project_parser_info: List[ParserInfo] = []
|
||||
start_timer = time.perf_counter()
|
||||
total_path_count = 0
|
||||
for parser in parsers:
|
||||
parser_path_count = 0
|
||||
parser_start_timer = time.perf_counter()
|
||||
for path in parser.search():
|
||||
self.parse_with_cache(path, parser, old_results)
|
||||
parser_path_count = parser_path_count + 1
|
||||
|
||||
if parser_path_count > 0:
|
||||
project_parser_info.append(ParserInfo(
|
||||
parser=parser.resource_type,
|
||||
path_count=parser_path_count,
|
||||
elapsed=time.perf_counter() - parser_start_timer
|
||||
))
|
||||
total_path_count = total_path_count + parser_path_count
|
||||
|
||||
elapsed = time.perf_counter() - start_timer
|
||||
project_info = ProjectLoaderInfo(
|
||||
project_name=project.project_name,
|
||||
path_count=total_path_count,
|
||||
elapsed=elapsed,
|
||||
parsers=project_parser_info
|
||||
)
|
||||
self._perf_info.projects.append(project_info)
|
||||
self._perf_info.path_count = (
|
||||
self._perf_info.path_count + total_path_count
|
||||
)
|
||||
|
||||
def load_only_macros(self) -> Manifest:
|
||||
old_results = self.read_parse_results()
|
||||
@@ -197,10 +275,16 @@ class ManifestLoader:
|
||||
self.results.macros.update(macro_manifest.macros)
|
||||
self.results.files.update(macro_manifest.files)
|
||||
|
||||
start_timer = time.perf_counter()
|
||||
|
||||
for project in self.all_projects.values():
|
||||
# parse a single project
|
||||
self.parse_project(project, macro_manifest, old_results)
|
||||
|
||||
self._perf_info.parse_project_elapsed = (
|
||||
time.perf_counter() - start_timer
|
||||
)
|
||||
|
||||
def write_parse_results(self):
|
||||
path = os.path.join(self.root_project.target_path,
|
||||
PARTIAL_PARSE_FILE_NAME)
|
||||
@@ -300,7 +384,11 @@ class ManifestLoader:
|
||||
# before we do anything else, patch the sources. This mutates
|
||||
# results.disabled, so it needs to come before the final 'disabled'
|
||||
# list is created
|
||||
start_patch = time.perf_counter()
|
||||
sources = patch_sources(self.results, self.root_project)
|
||||
self._perf_info.patch_sources_elapsed = (
|
||||
time.perf_counter() - start_patch
|
||||
)
|
||||
disabled = []
|
||||
for value in self.results.disabled.values():
|
||||
disabled.extend(value)
|
||||
@@ -314,14 +402,21 @@ class ManifestLoader:
|
||||
sources=sources,
|
||||
macros=self.results.macros,
|
||||
docs=self.results.docs,
|
||||
reports=self.results.reports,
|
||||
exposures=self.results.exposures,
|
||||
metadata=self.root_project.get_metadata(),
|
||||
disabled=disabled,
|
||||
files=self.results.files,
|
||||
selectors=self.root_project.manifest_selectors,
|
||||
)
|
||||
manifest.patch_nodes(self.results.patches)
|
||||
manifest.patch_macros(self.results.macro_patches)
|
||||
start_process = time.perf_counter()
|
||||
self.process_manifest(manifest)
|
||||
|
||||
self._perf_info.process_manifest_elapsed = (
|
||||
time.perf_counter() - start_process
|
||||
)
|
||||
|
||||
return manifest
|
||||
|
||||
@classmethod
|
||||
@@ -332,6 +427,8 @@ class ManifestLoader:
|
||||
macro_hook: Callable[[Manifest], Any],
|
||||
) -> Manifest:
|
||||
with PARSING_STATE:
|
||||
start_load_all = time.perf_counter()
|
||||
|
||||
projects = root_config.load_dependencies()
|
||||
loader = cls(root_config, projects, macro_hook)
|
||||
loader.load(macro_manifest=macro_manifest)
|
||||
@@ -339,6 +436,13 @@ class ManifestLoader:
|
||||
manifest = loader.create_manifest()
|
||||
_check_manifest(manifest, root_config)
|
||||
manifest.build_flat_graph()
|
||||
|
||||
loader._perf_info.load_all_elapsed = (
|
||||
time.perf_counter() - start_load_all
|
||||
)
|
||||
|
||||
loader.track_project_load()
|
||||
|
||||
return manifest
|
||||
|
||||
@classmethod
|
||||
@@ -514,6 +618,12 @@ def _process_docs_for_macro(
|
||||
arg.description = get_rendered(arg.description, context)
|
||||
|
||||
|
||||
def _process_docs_for_exposure(
|
||||
context: Dict[str, Any], exposure: ParsedExposure
|
||||
) -> None:
|
||||
exposure.description = get_rendered(exposure.description, context)
|
||||
|
||||
|
||||
def process_docs(manifest: Manifest, config: RuntimeConfig):
|
||||
for node in manifest.nodes.values():
|
||||
ctx = generate_runtime_docs(
|
||||
@@ -539,13 +649,21 @@ def process_docs(manifest: Manifest, config: RuntimeConfig):
|
||||
config.project_name,
|
||||
)
|
||||
_process_docs_for_macro(ctx, macro)
|
||||
for exposure in manifest.exposures.values():
|
||||
ctx = generate_runtime_docs(
|
||||
config,
|
||||
exposure,
|
||||
manifest,
|
||||
config.project_name,
|
||||
)
|
||||
_process_docs_for_exposure(ctx, exposure)
|
||||
|
||||
|
||||
def _process_refs_for_report(
|
||||
manifest: Manifest, current_project: str, report: ParsedReport
|
||||
def _process_refs_for_exposure(
|
||||
manifest: Manifest, current_project: str, exposure: ParsedExposure
|
||||
):
|
||||
"""Given a manifest and a report in that manifest, process its refs"""
|
||||
for ref in report.refs:
|
||||
"""Given a manifest and a exposure in that manifest, process its refs"""
|
||||
for ref in exposure.refs:
|
||||
target_model: Optional[Union[Disabled, ManifestNode]] = None
|
||||
target_model_name: str
|
||||
target_model_package: Optional[str] = None
|
||||
@@ -563,14 +681,14 @@ def _process_refs_for_report(
|
||||
target_model_name,
|
||||
target_model_package,
|
||||
current_project,
|
||||
report.package_name,
|
||||
exposure.package_name,
|
||||
)
|
||||
|
||||
if target_model is None or isinstance(target_model, Disabled):
|
||||
# This may raise. Even if it doesn't, we don't want to add
|
||||
# this report to the graph b/c there is no destination report
|
||||
# this exposure to the graph b/c there is no destination exposure
|
||||
invalid_ref_fail_unless_test(
|
||||
report, target_model_name, target_model_package,
|
||||
exposure, target_model_name, target_model_package,
|
||||
disabled=(isinstance(target_model, Disabled))
|
||||
)
|
||||
|
||||
@@ -578,8 +696,8 @@ def _process_refs_for_report(
|
||||
|
||||
target_model_id = target_model.unique_id
|
||||
|
||||
report.depends_on.nodes.append(target_model_id)
|
||||
manifest.update_report(report)
|
||||
exposure.depends_on.nodes.append(target_model_id)
|
||||
manifest.update_exposure(exposure)
|
||||
|
||||
|
||||
def _process_refs_for_node(
|
||||
@@ -630,33 +748,33 @@ def _process_refs_for_node(
|
||||
def process_refs(manifest: Manifest, current_project: str):
|
||||
for node in manifest.nodes.values():
|
||||
_process_refs_for_node(manifest, current_project, node)
|
||||
for report in manifest.reports.values():
|
||||
_process_refs_for_report(manifest, current_project, report)
|
||||
for exposure in manifest.exposures.values():
|
||||
_process_refs_for_exposure(manifest, current_project, exposure)
|
||||
return manifest
|
||||
|
||||
|
||||
def _process_sources_for_report(
|
||||
manifest: Manifest, current_project: str, report: ParsedReport
|
||||
def _process_sources_for_exposure(
|
||||
manifest: Manifest, current_project: str, exposure: ParsedExposure
|
||||
):
|
||||
target_source: Optional[Union[Disabled, ParsedSourceDefinition]] = None
|
||||
for source_name, table_name in report.sources:
|
||||
for source_name, table_name in exposure.sources:
|
||||
target_source = manifest.resolve_source(
|
||||
source_name,
|
||||
table_name,
|
||||
current_project,
|
||||
report.package_name,
|
||||
exposure.package_name,
|
||||
)
|
||||
if target_source is None or isinstance(target_source, Disabled):
|
||||
invalid_source_fail_unless_test(
|
||||
report,
|
||||
exposure,
|
||||
source_name,
|
||||
table_name,
|
||||
disabled=(isinstance(target_source, Disabled))
|
||||
)
|
||||
continue
|
||||
target_source_id = target_source.unique_id
|
||||
report.depends_on.nodes.append(target_source_id)
|
||||
manifest.update_report(report)
|
||||
exposure.depends_on.nodes.append(target_source_id)
|
||||
manifest.update_exposure(exposure)
|
||||
|
||||
|
||||
def _process_sources_for_node(
|
||||
@@ -692,8 +810,8 @@ def process_sources(manifest: Manifest, current_project: str):
|
||||
continue
|
||||
assert not isinstance(node, ParsedSourceDefinition)
|
||||
_process_sources_for_node(manifest, current_project, node)
|
||||
for report in manifest.reports.values():
|
||||
_process_sources_for_report(manifest, current_project, report)
|
||||
for exposure in manifest.exposures.values():
|
||||
_process_sources_for_exposure(manifest, current_project, exposure)
|
||||
return manifest
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedMacroPatch,
|
||||
ParsedModelNode,
|
||||
ParsedNodePatch,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
ParsedRPCNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSchemaTestNode,
|
||||
@@ -70,7 +70,7 @@ class ParseResult(JsonSchemaMixin, Writable, Replaceable):
|
||||
sources: MutableMapping[str, UnpatchedSourceDefinition] = dict_field()
|
||||
docs: MutableMapping[str, ParsedDocumentation] = dict_field()
|
||||
macros: MutableMapping[str, ParsedMacro] = dict_field()
|
||||
reports: MutableMapping[str, ParsedReport] = dict_field()
|
||||
exposures: MutableMapping[str, ParsedExposure] = dict_field()
|
||||
macro_patches: MutableMapping[MacroKey, ParsedMacroPatch] = dict_field()
|
||||
patches: MutableMapping[str, ParsedNodePatch] = dict_field()
|
||||
source_patches: MutableMapping[SourceKey, SourcePatch] = dict_field()
|
||||
@@ -103,10 +103,10 @@ class ParseResult(JsonSchemaMixin, Writable, Replaceable):
|
||||
self.add_node_nofile(node)
|
||||
self.get_file(source_file).nodes.append(node.unique_id)
|
||||
|
||||
def add_report(self, source_file: SourceFile, report: ParsedReport):
|
||||
_check_duplicates(report, self.reports)
|
||||
self.reports[report.unique_id] = report
|
||||
self.get_file(source_file).reports.append(report.unique_id)
|
||||
def add_exposure(self, source_file: SourceFile, exposure: ParsedExposure):
|
||||
_check_duplicates(exposure, self.exposures)
|
||||
self.exposures[exposure.unique_id] = exposure
|
||||
self.get_file(source_file).exposures.append(exposure.unique_id)
|
||||
|
||||
def add_disabled_nofile(self, node: CompileResultNode):
|
||||
if node.unique_id in self.disabled:
|
||||
@@ -269,11 +269,11 @@ class ParseResult(JsonSchemaMixin, Writable, Replaceable):
|
||||
continue
|
||||
self._process_node(node_id, source_file, old_file, old_result)
|
||||
|
||||
for report_id in old_file.reports:
|
||||
report = _expect_value(
|
||||
report_id, old_result.reports, old_file, "reports"
|
||||
for exposure_id in old_file.exposures:
|
||||
exposure = _expect_value(
|
||||
exposure_id, old_result.exposures, old_file, "exposures"
|
||||
)
|
||||
self.add_report(source_file, report)
|
||||
self.add_exposure(source_file, exposure)
|
||||
|
||||
patched = False
|
||||
for name in old_file.patches:
|
||||
|
||||
@@ -13,7 +13,7 @@ from dbt.contracts.graph.unparsed import (
|
||||
UnparsedAnalysisUpdate,
|
||||
UnparsedMacroUpdate,
|
||||
UnparsedNodeUpdate,
|
||||
UnparsedReport,
|
||||
UnparsedExposure,
|
||||
)
|
||||
from dbt.exceptions import raise_compiler_error
|
||||
from dbt.parser.search import FileBlock
|
||||
@@ -82,7 +82,7 @@ Target = TypeVar(
|
||||
UnparsedMacroUpdate,
|
||||
UnparsedAnalysisUpdate,
|
||||
UnpatchedSourceDefinition,
|
||||
UnparsedReport,
|
||||
UnparsedExposure,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from dbt.context.context_config import (
|
||||
)
|
||||
from dbt.context.configured import generate_schema_yml
|
||||
from dbt.context.target import generate_target_context
|
||||
from dbt.context.providers import generate_parse_report
|
||||
from dbt.context.providers import generate_parse_exposure
|
||||
from dbt.contracts.files import FileHash
|
||||
from dbt.contracts.graph.manifest import SourceFile
|
||||
from dbt.contracts.graph.model_config import SourceConfig
|
||||
@@ -31,7 +31,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedSchemaTestNode,
|
||||
ParsedMacroPatch,
|
||||
UnpatchedSourceDefinition,
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
FreshnessThreshold,
|
||||
@@ -43,7 +43,7 @@ from dbt.contracts.graph.unparsed import (
|
||||
UnparsedColumn,
|
||||
UnparsedMacroUpdate,
|
||||
UnparsedNodeUpdate,
|
||||
UnparsedReport,
|
||||
UnparsedExposure,
|
||||
UnparsedSourceDefinition,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
@@ -95,6 +95,7 @@ def error_context(
|
||||
|
||||
class ParserRef:
|
||||
"""A helper object to hold parse-time references."""
|
||||
|
||||
def __init__(self):
|
||||
self.column_info: Dict[str, ColumnInfo] = {}
|
||||
|
||||
@@ -264,6 +265,11 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
base=False,
|
||||
)
|
||||
|
||||
def _get_relation_name(self, node: ParsedSourceDefinition):
|
||||
adapter = get_adapter(self.root_project)
|
||||
relation_cls = adapter.Relation
|
||||
return str(relation_cls.create_from(self.root_project, node))
|
||||
|
||||
def parse_source(
|
||||
self, target: UnpatchedSourceDefinition
|
||||
) -> ParsedSourceDefinition:
|
||||
@@ -302,7 +308,7 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
|
||||
default_database = self.root_project.credentials.database
|
||||
|
||||
return ParsedSourceDefinition(
|
||||
parsed_source = ParsedSourceDefinition(
|
||||
package_name=target.package_name,
|
||||
database=(source.database or default_database),
|
||||
schema=(source.schema or source.name),
|
||||
@@ -330,6 +336,11 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
unrendered_config=unrendered_config,
|
||||
)
|
||||
|
||||
# relation name is added after instantiation because the adapter does
|
||||
# not provide the relation name for a UnpatchedSourceDefinition object
|
||||
parsed_source.relation_name = self._get_relation_name(parsed_source)
|
||||
return parsed_source
|
||||
|
||||
def create_test_node(
|
||||
self,
|
||||
target: Union[UnpatchedSourceDefinition, UnparsedNodeUpdate],
|
||||
@@ -543,10 +554,10 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
for test in block.tests:
|
||||
self.parse_test(block, test, None)
|
||||
|
||||
def parse_reports(self, block: YamlBlock) -> None:
|
||||
parser = ReportParser(self, block)
|
||||
def parse_exposures(self, block: YamlBlock) -> None:
|
||||
parser = ExposureParser(self, block)
|
||||
for node in parser.parse():
|
||||
self.results.add_report(block.file, node)
|
||||
self.results.add_exposure(block.file, node)
|
||||
|
||||
def parse_file(self, block: FileBlock) -> None:
|
||||
dct = self._yaml_from_file(block.file)
|
||||
@@ -574,11 +585,15 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
parser = MacroPatchParser(self, yaml_block, plural)
|
||||
elif key == NodeType.Analysis:
|
||||
parser = AnalysisPatchParser(self, yaml_block, plural)
|
||||
elif key == NodeType.Exposure:
|
||||
# handle exposures separately, but they are
|
||||
# technically still "documentable"
|
||||
continue
|
||||
else:
|
||||
parser = TestablePatchParser(self, yaml_block, plural)
|
||||
for test_block in parser.parse():
|
||||
self.parse_tests(test_block)
|
||||
self.parse_reports(yaml_block)
|
||||
self.parse_exposures(yaml_block)
|
||||
|
||||
|
||||
Parsed = TypeVar(
|
||||
@@ -805,21 +820,21 @@ class MacroPatchParser(NonSourceParser[UnparsedMacroUpdate, ParsedMacroPatch]):
|
||||
self.results.add_macro_patch(self.yaml.file, result)
|
||||
|
||||
|
||||
class ReportParser(YamlReader):
|
||||
class ExposureParser(YamlReader):
|
||||
def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock):
|
||||
super().__init__(schema_parser, yaml, NodeType.Report.pluralize())
|
||||
super().__init__(schema_parser, yaml, NodeType.Exposure.pluralize())
|
||||
self.schema_parser = schema_parser
|
||||
self.yaml = yaml
|
||||
|
||||
def parse_report(self, unparsed: UnparsedReport) -> ParsedReport:
|
||||
def parse_exposure(self, unparsed: UnparsedExposure) -> ParsedExposure:
|
||||
package_name = self.project.project_name
|
||||
unique_id = f'{NodeType.Report}.{package_name}.{unparsed.name}'
|
||||
unique_id = f'{NodeType.Exposure}.{package_name}.{unparsed.name}'
|
||||
path = self.yaml.path.relative_path
|
||||
|
||||
fqn = self.schema_parser.get_fqn_prefix(path)
|
||||
fqn.append(unparsed.name)
|
||||
|
||||
parsed = ParsedReport(
|
||||
parsed = ParsedExposure(
|
||||
package_name=package_name,
|
||||
root_path=self.project.project_root,
|
||||
path=path,
|
||||
@@ -833,7 +848,7 @@ class ReportParser(YamlReader):
|
||||
owner=unparsed.owner,
|
||||
maturity=unparsed.maturity,
|
||||
)
|
||||
ctx = generate_parse_report(
|
||||
ctx = generate_parse_exposure(
|
||||
parsed,
|
||||
self.root_project,
|
||||
self.schema_parser.macro_manifest,
|
||||
@@ -848,12 +863,12 @@ class ReportParser(YamlReader):
|
||||
# parsed now has a populated refs/sources
|
||||
return parsed
|
||||
|
||||
def parse(self) -> Iterable[ParsedReport]:
|
||||
def parse(self) -> Iterable[ParsedExposure]:
|
||||
for data in self.get_key_dicts():
|
||||
try:
|
||||
unparsed = UnparsedReport.from_dict(data)
|
||||
unparsed = UnparsedExposure.from_dict(data)
|
||||
except (ValidationError, JSONValidationException) as exc:
|
||||
msg = error_context(self.yaml.path, self.key, data, exc)
|
||||
raise CompilationException(msg) from exc
|
||||
parsed = self.parse_report(unparsed)
|
||||
parsed = self.parse_exposure(unparsed)
|
||||
yield parsed
|
||||
|
||||
@@ -65,7 +65,7 @@ class RPCCompileRunner(GenericRPCRunner[RemoteCompileResult]):
|
||||
def execute(self, compiled_node, manifest) -> RemoteCompileResult:
|
||||
return RemoteCompileResult(
|
||||
raw_sql=compiled_node.raw_sql,
|
||||
compiled_sql=compiled_node.injected_sql,
|
||||
compiled_sql=compiled_node.compiled_sql,
|
||||
node=compiled_node,
|
||||
timing=[], # this will get added later
|
||||
logs=[],
|
||||
@@ -88,7 +88,7 @@ class RPCCompileRunner(GenericRPCRunner[RemoteCompileResult]):
|
||||
class RPCExecuteRunner(GenericRPCRunner[RemoteRunResult]):
|
||||
def execute(self, compiled_node, manifest) -> RemoteRunResult:
|
||||
_, execute_result = self.adapter.execute(
|
||||
compiled_node.injected_sql, fetch=True
|
||||
compiled_node.compiled_sql, fetch=True
|
||||
)
|
||||
|
||||
table = ResultTable(
|
||||
@@ -98,7 +98,7 @@ class RPCExecuteRunner(GenericRPCRunner[RemoteRunResult]):
|
||||
|
||||
return RemoteRunResult(
|
||||
raw_sql=compiled_node.raw_sql,
|
||||
compiled_sql=compiled_node.injected_sql,
|
||||
compiled_sql=compiled_node.compiled_sql,
|
||||
node=compiled_node,
|
||||
table=table,
|
||||
timing=[],
|
||||
|
||||
@@ -187,6 +187,7 @@ def get_results_context(
|
||||
|
||||
class StateHandler:
|
||||
"""A helper context manager to manage task handler state."""
|
||||
|
||||
def __init__(self, task_handler: 'RequestTaskHandler') -> None:
|
||||
self.handler = task_handler
|
||||
|
||||
@@ -248,6 +249,7 @@ class SetArgsStateHandler(StateHandler):
|
||||
"""A state handler that does not touch state on success and does not
|
||||
execute the teardown
|
||||
"""
|
||||
|
||||
def handle_completed(self):
|
||||
pass
|
||||
|
||||
@@ -257,6 +259,7 @@ class SetArgsStateHandler(StateHandler):
|
||||
|
||||
class RequestTaskHandler(threading.Thread, TaskHandlerProtocol):
|
||||
"""Handler for the single task triggered by a given jsonrpc request."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
manager: TaskManagerProtocol,
|
||||
@@ -400,6 +403,7 @@ class RequestTaskHandler(threading.Thread, TaskHandlerProtocol):
|
||||
try:
|
||||
with StateHandler(self):
|
||||
self.result = self.get_result()
|
||||
|
||||
except (dbt.exceptions.Exception, RPCException):
|
||||
# we probably got an error after the RPC call ran (and it was
|
||||
# probably deps...). By now anyone who wanted to see it has seen it
|
||||
|
||||
@@ -9,7 +9,7 @@ from dbt import tracking
|
||||
from dbt import ui
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.results import (
|
||||
RunModelResult, collect_timing_info
|
||||
NodeStatus, RunResult, collect_timing_info, RunStatus
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
NotImplementedException, CompilationException, RuntimeException,
|
||||
@@ -165,6 +165,7 @@ class ExecutionContext:
|
||||
"""During execution and error handling, dbt makes use of mutable state:
|
||||
timing information and the newest (compiled vs executed) form of the node.
|
||||
"""
|
||||
|
||||
def __init__(self, node):
|
||||
self.timing = []
|
||||
self.node = node
|
||||
@@ -179,20 +180,20 @@ class BaseRunner(metaclass=ABCMeta):
|
||||
self.num_nodes = num_nodes
|
||||
|
||||
self.skip = False
|
||||
self.skip_cause: Optional[RunModelResult] = None
|
||||
self.skip_cause: Optional[RunResult] = None
|
||||
|
||||
@abstractmethod
|
||||
def compile(self, manifest: Manifest) -> Any:
|
||||
pass
|
||||
|
||||
def get_result_status(self, result) -> Dict[str, str]:
|
||||
if result.error:
|
||||
return {'node_status': 'error', 'node_error': str(result.error)}
|
||||
elif result.skip:
|
||||
if result.status == NodeStatus.Error:
|
||||
return {'node_status': 'error', 'node_error': str(result.message)}
|
||||
elif result.status == NodeStatus.Skipped:
|
||||
return {'node_status': 'skipped'}
|
||||
elif result.fail:
|
||||
elif result.status == NodeStatus.Fail:
|
||||
return {'node_status': 'failed'}
|
||||
elif result.warn:
|
||||
elif result.status == NodeStatus.Warn:
|
||||
return {'node_status': 'warn'}
|
||||
else:
|
||||
return {'node_status': 'passed'}
|
||||
@@ -212,52 +213,62 @@ class BaseRunner(metaclass=ABCMeta):
|
||||
|
||||
return result
|
||||
|
||||
def _build_run_result(self, node, start_time, error, status, timing_info,
|
||||
skip=False, fail=None, warn=None, agate_table=None):
|
||||
def _build_run_result(self, node, start_time, status, timing_info, message,
|
||||
agate_table=None, adapter_response=None):
|
||||
execution_time = time.time() - start_time
|
||||
thread_id = threading.current_thread().name
|
||||
return RunModelResult(
|
||||
node=node,
|
||||
error=error,
|
||||
skip=skip,
|
||||
if adapter_response is None:
|
||||
adapter_response = {}
|
||||
return RunResult(
|
||||
status=status,
|
||||
fail=fail,
|
||||
warn=warn,
|
||||
execution_time=execution_time,
|
||||
thread_id=thread_id,
|
||||
execution_time=execution_time,
|
||||
timing=timing_info,
|
||||
message=message,
|
||||
node=node,
|
||||
agate_table=agate_table,
|
||||
adapter_response=adapter_response
|
||||
)
|
||||
|
||||
def error_result(self, node, error, start_time, timing_info):
|
||||
def error_result(self, node, message, start_time, timing_info):
|
||||
return self._build_run_result(
|
||||
node=node,
|
||||
start_time=start_time,
|
||||
error=error,
|
||||
status='ERROR',
|
||||
timing_info=timing_info
|
||||
status=RunStatus.Error,
|
||||
timing_info=timing_info,
|
||||
message=message,
|
||||
)
|
||||
|
||||
def ephemeral_result(self, node, start_time, timing_info):
|
||||
return self._build_run_result(
|
||||
node=node,
|
||||
start_time=start_time,
|
||||
error=None,
|
||||
status=None,
|
||||
timing_info=timing_info
|
||||
status=RunStatus.Success,
|
||||
timing_info=timing_info,
|
||||
message=None
|
||||
)
|
||||
|
||||
def from_run_result(self, result, start_time, timing_info):
|
||||
return self._build_run_result(
|
||||
node=result.node,
|
||||
start_time=start_time,
|
||||
error=result.error,
|
||||
skip=result.skip,
|
||||
status=result.status,
|
||||
fail=result.fail,
|
||||
warn=result.warn,
|
||||
timing_info=timing_info,
|
||||
message=result.message,
|
||||
agate_table=result.agate_table,
|
||||
adapter_response=result.adapter_response
|
||||
)
|
||||
|
||||
def skip_result(self, node, message):
|
||||
thread_id = threading.current_thread().name
|
||||
return RunResult(
|
||||
status=RunStatus.Skipped,
|
||||
thread_id=thread_id,
|
||||
execution_time=0,
|
||||
timing=[],
|
||||
message=message,
|
||||
node=node,
|
||||
adapter_response={}
|
||||
)
|
||||
|
||||
def compile_and_execute(self, manifest, ctx):
|
||||
@@ -340,7 +351,7 @@ class BaseRunner(metaclass=ABCMeta):
|
||||
# an error
|
||||
if (
|
||||
exc_str is not None and result is not None and
|
||||
result.error is None and error is None
|
||||
result.status != NodeStatus.Error and error is None
|
||||
):
|
||||
error = exc_str
|
||||
|
||||
@@ -389,7 +400,7 @@ class BaseRunner(metaclass=ABCMeta):
|
||||
schema_name = self.node.schema
|
||||
node_name = self.node.name
|
||||
|
||||
error = None
|
||||
error_message = None
|
||||
if not self.node.is_ephemeral_model:
|
||||
# if this model was skipped due to an upstream ephemeral model
|
||||
# failure, print a special 'error skip' message.
|
||||
@@ -408,7 +419,7 @@ class BaseRunner(metaclass=ABCMeta):
|
||||
'an ephemeral failure'
|
||||
)
|
||||
# set an error so dbt will exit with an error code
|
||||
error = (
|
||||
error_message = (
|
||||
'Compilation Error in {}, caused by compilation error '
|
||||
'in referenced ephemeral model {}'
|
||||
.format(self.node.unique_id,
|
||||
@@ -423,7 +434,7 @@ class BaseRunner(metaclass=ABCMeta):
|
||||
self.num_nodes
|
||||
)
|
||||
|
||||
node_result = RunModelResult(self.node, skip=True, error=error)
|
||||
node_result = self.skip_result(self.node, error_message)
|
||||
return node_result
|
||||
|
||||
def do_skip(self, cause=None):
|
||||
|
||||
@@ -2,7 +2,7 @@ import os.path
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from dbt.task.base import BaseTask
|
||||
from dbt.task.base import BaseTask, move_to_nearest_project_dir
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.config import UnsetProfileConfig
|
||||
|
||||
@@ -32,6 +32,7 @@ class CleanTask(BaseTask):
|
||||
This function takes all the paths in the target file
|
||||
and cleans the project paths that are not protected.
|
||||
"""
|
||||
move_to_nearest_project_dir(self.args)
|
||||
for path in self.config.clean_targets:
|
||||
logger.info("Checking {}/*".format(path))
|
||||
if not self.__is_protected_path(path):
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import threading
|
||||
from .runnable import GraphRunnableTask
|
||||
from .base import BaseRunner
|
||||
|
||||
from dbt.contracts.results import RunModelResult
|
||||
from dbt.contracts.results import RunStatus, RunResult
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.graph import ResourceTypeSelector, SelectionSpec, parse_difference
|
||||
from dbt.logger import print_timestamped_line
|
||||
@@ -16,7 +17,15 @@ class CompileRunner(BaseRunner):
|
||||
pass
|
||||
|
||||
def execute(self, compiled_node, manifest):
|
||||
return RunModelResult(compiled_node)
|
||||
return RunResult(
|
||||
node=compiled_node,
|
||||
status=RunStatus.Success,
|
||||
timing=[],
|
||||
thread_id=threading.current_thread().name,
|
||||
execution_time=0,
|
||||
message=None,
|
||||
adapter_response={}
|
||||
)
|
||||
|
||||
def compile(self, manifest):
|
||||
compiler = self.adapter.get_compiler()
|
||||
|
||||
@@ -48,7 +48,6 @@ Check your database credentials and try again. For more information, visit:
|
||||
{url}
|
||||
'''.lstrip()
|
||||
|
||||
|
||||
MISSING_PROFILE_MESSAGE = '''
|
||||
dbt looked for a profiles.yml file in {path}, but did
|
||||
not find one. For more information on configuring your profile, consult the
|
||||
@@ -90,6 +89,7 @@ class DebugTask(BaseTask):
|
||||
self.profile_name: Optional[str] = None
|
||||
self.project: Optional[Project] = None
|
||||
self.project_fail_details = ''
|
||||
self.any_failure = False
|
||||
self.messages: List[str] = []
|
||||
|
||||
@property
|
||||
@@ -111,7 +111,7 @@ class DebugTask(BaseTask):
|
||||
def run(self):
|
||||
if self.args.config_dir:
|
||||
self.path_info()
|
||||
return
|
||||
return not self.any_failure
|
||||
|
||||
version = get_installed_version().to_version_string(skip_matcher=True)
|
||||
print('dbt version: {}'.format(version))
|
||||
@@ -129,6 +129,11 @@ class DebugTask(BaseTask):
|
||||
print(message)
|
||||
print('')
|
||||
|
||||
return not self.any_failure
|
||||
|
||||
def interpret_results(self, results):
|
||||
return results
|
||||
|
||||
def _load_project(self):
|
||||
if not os.path.exists(self.project_path):
|
||||
self.project_fail_details = FILE_NOT_FOUND
|
||||
@@ -245,6 +250,7 @@ class DebugTask(BaseTask):
|
||||
self.messages.append(MISSING_PROFILE_MESSAGE.format(
|
||||
path=self.profile_path, url=ProfileConfigDocs
|
||||
))
|
||||
self.any_failure = True
|
||||
return red('ERROR not found')
|
||||
|
||||
try:
|
||||
@@ -283,6 +289,7 @@ class DebugTask(BaseTask):
|
||||
dbt.clients.system.run_cmd(os.getcwd(), ['git', '--help'])
|
||||
except dbt.exceptions.ExecutableError as exc:
|
||||
self.messages.append('Error from git --help: {!s}'.format(exc))
|
||||
self.any_failure = True
|
||||
return red('ERROR')
|
||||
return green('OK found')
|
||||
|
||||
@@ -310,6 +317,8 @@ class DebugTask(BaseTask):
|
||||
def _log_project_fail(self):
|
||||
if not self.project_fail_details:
|
||||
return
|
||||
|
||||
self.any_failure = True
|
||||
if self.project_fail_details == FILE_NOT_FOUND:
|
||||
return
|
||||
print('Project loading failed for the following reason:')
|
||||
@@ -319,6 +328,8 @@ class DebugTask(BaseTask):
|
||||
def _log_profile_fail(self):
|
||||
if not self.profile_fail_details:
|
||||
return
|
||||
|
||||
self.any_failure = True
|
||||
if self.profile_fail_details == FILE_NOT_FOUND:
|
||||
return
|
||||
print('Profile loading failed for the following reason:')
|
||||
@@ -347,6 +358,7 @@ class DebugTask(BaseTask):
|
||||
result = self.attempt_connection(self.profile)
|
||||
if result is not None:
|
||||
self.messages.append(result)
|
||||
self.any_failure = True
|
||||
return red('ERROR')
|
||||
return green('OK connection ok')
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict
|
||||
|
||||
from .base import BaseRunner
|
||||
from .printer import (
|
||||
@@ -13,16 +12,13 @@ from .runnable import GraphRunnableTask
|
||||
|
||||
from dbt.contracts.results import (
|
||||
FreshnessExecutionResultArtifact,
|
||||
FreshnessResult,
|
||||
PartialResult,
|
||||
SourceFreshnessResult,
|
||||
FreshnessResult, PartialSourceFreshnessResult,
|
||||
SourceFreshnessResult, FreshnessStatus
|
||||
)
|
||||
from dbt.exceptions import RuntimeException, InternalException
|
||||
from dbt.logger import print_timestamped_line
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
from dbt import utils
|
||||
|
||||
from dbt.graph import NodeSelector, SelectionSpec, parse_difference
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition
|
||||
|
||||
@@ -36,12 +32,6 @@ class FreshnessRunner(BaseRunner):
|
||||
'Freshness: nodes cannot be skipped!'
|
||||
)
|
||||
|
||||
def get_result_status(self, result) -> Dict[str, str]:
|
||||
if result.error:
|
||||
return {'node_status': 'error', 'node_error': str(result.error)}
|
||||
else:
|
||||
return {'node_status': str(result.status)}
|
||||
|
||||
def before_execute(self):
|
||||
description = 'freshness of {0.source_name}.{0.name}'.format(self.node)
|
||||
print_start_line(description, self.node_index, self.num_nodes)
|
||||
@@ -49,18 +39,33 @@ class FreshnessRunner(BaseRunner):
|
||||
def after_execute(self, result):
|
||||
print_freshness_result_line(result, self.node_index, self.num_nodes)
|
||||
|
||||
def _build_run_result(self, node, start_time, error, status, timing_info,
|
||||
skip=False, failed=None):
|
||||
def error_result(self, node, message, start_time, timing_info):
|
||||
return self._build_run_result(
|
||||
node=node,
|
||||
start_time=start_time,
|
||||
status=FreshnessStatus.RuntimeErr,
|
||||
timing_info=timing_info,
|
||||
message=message,
|
||||
)
|
||||
|
||||
def _build_run_result(
|
||||
self,
|
||||
node,
|
||||
start_time,
|
||||
status,
|
||||
timing_info,
|
||||
message
|
||||
):
|
||||
execution_time = time.time() - start_time
|
||||
thread_id = threading.current_thread().name
|
||||
status = utils.lowercase(status)
|
||||
return PartialResult(
|
||||
node=node,
|
||||
return PartialSourceFreshnessResult(
|
||||
status=status,
|
||||
error=error,
|
||||
execution_time=execution_time,
|
||||
thread_id=thread_id,
|
||||
execution_time=execution_time,
|
||||
timing=timing_info,
|
||||
message=message,
|
||||
node=node,
|
||||
adapter_response={}
|
||||
)
|
||||
|
||||
def from_run_result(self, result, start_time, timing_info):
|
||||
@@ -95,6 +100,10 @@ class FreshnessRunner(BaseRunner):
|
||||
node=compiled_node,
|
||||
status=status,
|
||||
thread_id=threading.current_thread().name,
|
||||
timing=[],
|
||||
execution_time=0,
|
||||
message=None,
|
||||
adapter_response={},
|
||||
**freshness
|
||||
)
|
||||
|
||||
@@ -160,7 +169,10 @@ class FreshnessTask(GraphRunnableTask):
|
||||
|
||||
def task_end_messages(self, results):
|
||||
for result in results:
|
||||
if result.error is not None:
|
||||
if result.status in (
|
||||
FreshnessStatus.Error,
|
||||
FreshnessStatus.RuntimeErr
|
||||
):
|
||||
print_run_result_error(result)
|
||||
|
||||
print_timestamped_line('Done.')
|
||||
|
||||
@@ -11,8 +11,8 @@ from dbt.adapters.factory import get_adapter
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.results import (
|
||||
TableMetadata, CatalogTable, CatalogResults, Primitive, CatalogKey,
|
||||
StatsItem, StatsDict, ColumnMetadata, CatalogArtifact
|
||||
NodeStatus, TableMetadata, CatalogTable, CatalogResults, Primitive,
|
||||
CatalogKey, StatsItem, StatsDict, ColumnMetadata, CatalogArtifact
|
||||
)
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.include.global_project import DOCS_INDEX_FILE_PATH
|
||||
@@ -211,7 +211,7 @@ class GenerateTask(CompileTask):
|
||||
compile_results = None
|
||||
if self.args.compile:
|
||||
compile_results = CompileTask.run(self)
|
||||
if any(r.error is not None for r in compile_results):
|
||||
if any(r.status == NodeStatus.Error for r in compile_results):
|
||||
print_timestamped_line(
|
||||
'compile failed, cannot generate docs'
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@ import json
|
||||
from typing import Type
|
||||
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedReport,
|
||||
ParsedExposure,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
from dbt.graph import (
|
||||
@@ -24,7 +24,7 @@ class ListTask(GraphRunnableTask):
|
||||
NodeType.Seed,
|
||||
NodeType.Test,
|
||||
NodeType.Source,
|
||||
NodeType.Report,
|
||||
NodeType.Exposure,
|
||||
))
|
||||
ALL_RESOURCE_VALUES = DEFAULT_RESOURCE_VALUES | frozenset((
|
||||
NodeType.Analysis,
|
||||
@@ -76,8 +76,8 @@ class ListTask(GraphRunnableTask):
|
||||
yield self.manifest.nodes[node]
|
||||
elif node in self.manifest.sources:
|
||||
yield self.manifest.sources[node]
|
||||
elif node in self.manifest.reports:
|
||||
yield self.manifest.reports[node]
|
||||
elif node in self.manifest.exposures:
|
||||
yield self.manifest.exposures[node]
|
||||
else:
|
||||
raise RuntimeException(
|
||||
f'Got an unexpected result from node selection: "{node}"'
|
||||
@@ -93,11 +93,11 @@ class ListTask(GraphRunnableTask):
|
||||
node.package_name, node.source_name, node.name
|
||||
])
|
||||
yield f'source:{source_selector}'
|
||||
elif node.resource_type == NodeType.Report:
|
||||
assert isinstance(node, ParsedReport)
|
||||
# reports are searched for by pkg.report_name
|
||||
report_selector = '.'.join([node.package_name, node.name])
|
||||
yield f'report:{report_selector}'
|
||||
elif node.resource_type == NodeType.Exposure:
|
||||
assert isinstance(node, ParsedExposure)
|
||||
# exposures are searched for by pkg.exposure_name
|
||||
exposure_selector = '.'.join([node.package_name, node.name])
|
||||
yield f'exposure:{exposure_selector}'
|
||||
else:
|
||||
# everything else is from `fqn`
|
||||
yield '.'.join(node.fqn)
|
||||
@@ -190,4 +190,5 @@ class ListTask(GraphRunnableTask):
|
||||
)
|
||||
|
||||
def interpret_results(self, results):
|
||||
return bool(results)
|
||||
# list command should always return 0 as exit code
|
||||
return True
|
||||
|
||||
93
core/dbt/task/parse.py
Normal file
93
core/dbt/task/parse.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# This task is intended to be used for diagnosis, development and
|
||||
# performance analysis.
|
||||
# It separates out the parsing flows for easier logging and
|
||||
# debugging.
|
||||
# To store cProfile performance data, execute with the '-r'
|
||||
# flag and an output file: dbt -r dbt.cprof parse.
|
||||
# Use a visualizer such as snakeviz to look at the output:
|
||||
# snakeviz dbt.cprof
|
||||
from dbt.task.base import ConfiguredTask
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.parser.manifest import Manifest, ManifestLoader, _check_manifest
|
||||
from dbt.logger import DbtProcessState, print_timestamped_line
|
||||
from dbt.graph import Graph
|
||||
import time
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
MANIFEST_FILE_NAME = 'manifest.json'
|
||||
PERF_INFO_FILE_NAME = 'perf_info.json'
|
||||
PARSING_STATE = DbtProcessState('parsing')
|
||||
|
||||
|
||||
class ParseTask(ConfiguredTask):
|
||||
def __init__(self, args, config):
|
||||
super().__init__(args, config)
|
||||
self.manifest: Optional[Manifest] = None
|
||||
self.graph: Optional[Graph] = None
|
||||
self.loader: Optional[ManifestLoader] = None
|
||||
|
||||
def write_manifest(self):
|
||||
path = os.path.join(self.config.target_path, MANIFEST_FILE_NAME)
|
||||
self.manifest.write(path)
|
||||
|
||||
def write_perf_info(self):
|
||||
path = os.path.join(self.config.target_path, PERF_INFO_FILE_NAME)
|
||||
self.loader._perf_info.write(path)
|
||||
print_timestamped_line(f"Performance info: {path}")
|
||||
|
||||
# This method takes code that normally exists in other files
|
||||
# and pulls it in here, to simplify logging and make the
|
||||
# parsing flow-of-control easier to understand and manage,
|
||||
# with the downside that if changes happen in those other methods,
|
||||
# similar changes might need to be made here.
|
||||
# ManifestLoader.get_full_manifest
|
||||
# ManifestLoader.load
|
||||
# ManifestLoader.load_all
|
||||
|
||||
def get_full_manifest(self):
|
||||
adapter = get_adapter(self.config) # type: ignore
|
||||
macro_manifest: Manifest = adapter.load_macro_manifest()
|
||||
print_timestamped_line("Macro manifest loaded")
|
||||
root_config = self.config
|
||||
macro_hook = adapter.connections.set_query_header
|
||||
with PARSING_STATE:
|
||||
start_load_all = time.perf_counter()
|
||||
projects = root_config.load_dependencies()
|
||||
print_timestamped_line("Dependencies loaded")
|
||||
loader = ManifestLoader(root_config, projects, macro_hook)
|
||||
print_timestamped_line("ManifestLoader created")
|
||||
loader.load(macro_manifest=macro_manifest)
|
||||
print_timestamped_line("Manifest loaded")
|
||||
loader.write_parse_results()
|
||||
print_timestamped_line("Parse results written")
|
||||
manifest = loader.create_manifest()
|
||||
print_timestamped_line("Manifest created")
|
||||
_check_manifest(manifest, root_config)
|
||||
print_timestamped_line("Manifest checked")
|
||||
manifest.build_flat_graph()
|
||||
print_timestamped_line("Flat graph built")
|
||||
loader._perf_info.load_all_elapsed = (
|
||||
time.perf_counter() - start_load_all
|
||||
)
|
||||
|
||||
self.loader = loader
|
||||
self.manifest = manifest
|
||||
print_timestamped_line("Manifest loaded")
|
||||
|
||||
def compile_manifest(self):
|
||||
adapter = get_adapter(self.config)
|
||||
compiler = adapter.get_compiler()
|
||||
self.graph = compiler.compile(self.manifest)
|
||||
|
||||
def run(self):
|
||||
print_timestamped_line('Start parsing.')
|
||||
self.get_full_manifest()
|
||||
if self.args.compile:
|
||||
print_timestamped_line('Compiling.')
|
||||
self.compile_manifest()
|
||||
if self.args.write_manifest:
|
||||
print_timestamped_line('Writing manifest.')
|
||||
self.write_manifest()
|
||||
self.write_perf_info()
|
||||
print_timestamped_line('Done.')
|
||||
@@ -11,6 +11,10 @@ from dbt.tracking import InvocationProcessor
|
||||
from dbt import ui
|
||||
from dbt import utils
|
||||
|
||||
from dbt.contracts.results import (
|
||||
FreshnessStatus, NodeResult, NodeStatus, TestStatus
|
||||
)
|
||||
|
||||
|
||||
def print_fancy_output_line(
|
||||
msg: str, status: str, logger_fn: Callable, index: Optional[int],
|
||||
@@ -98,37 +102,37 @@ def print_cancel_line(model) -> None:
|
||||
|
||||
def get_printable_result(
|
||||
result, success: str, error: str) -> Tuple[str, str, Callable]:
|
||||
if result.error is not None:
|
||||
if result.status == NodeStatus.Error:
|
||||
info = 'ERROR {}'.format(error)
|
||||
status = ui.red(result.status)
|
||||
status = ui.red(result.status.upper())
|
||||
logger_fn = logger.error
|
||||
else:
|
||||
info = 'OK {}'.format(success)
|
||||
status = ui.green(result.status)
|
||||
status = ui.green(result.message)
|
||||
logger_fn = logger.info
|
||||
|
||||
return info, status, logger_fn
|
||||
|
||||
|
||||
def print_test_result_line(
|
||||
result, schema_name, index: int, total: int
|
||||
result: NodeResult, schema_name, index: int, total: int
|
||||
) -> None:
|
||||
model = result.node
|
||||
|
||||
if result.error is not None:
|
||||
if result.status == TestStatus.Error:
|
||||
info = "ERROR"
|
||||
color = ui.red
|
||||
logger_fn = logger.error
|
||||
elif result.status == 0:
|
||||
elif result.status == TestStatus.Pass:
|
||||
info = 'PASS'
|
||||
color = ui.green
|
||||
logger_fn = logger.info
|
||||
elif result.warn:
|
||||
info = 'WARN {}'.format(result.status)
|
||||
elif result.status == TestStatus.Warn:
|
||||
info = 'WARN {}'.format(result.message)
|
||||
color = ui.yellow
|
||||
logger_fn = logger.warning
|
||||
elif result.fail:
|
||||
info = 'FAIL {}'.format(result.status)
|
||||
elif result.status == TestStatus.Fail:
|
||||
info = 'FAIL {}'.format(result.message)
|
||||
color = ui.red
|
||||
logger_fn = logger.error
|
||||
else:
|
||||
@@ -196,15 +200,15 @@ def print_seed_result_line(result, schema_name: str, index: int, total: int):
|
||||
|
||||
|
||||
def print_freshness_result_line(result, index: int, total: int) -> None:
|
||||
if result.error:
|
||||
if result.status == FreshnessStatus.RuntimeErr:
|
||||
info = 'ERROR'
|
||||
color = ui.red
|
||||
logger_fn = logger.error
|
||||
elif result.status == 'error':
|
||||
elif result.status == FreshnessStatus.Error:
|
||||
info = 'ERROR STALE'
|
||||
color = ui.red
|
||||
logger_fn = logger.error
|
||||
elif result.status == 'warn':
|
||||
elif result.status == FreshnessStatus.Warn:
|
||||
info = 'WARN'
|
||||
color = ui.yellow
|
||||
logger_fn = logger.warning
|
||||
@@ -220,11 +224,7 @@ def print_freshness_result_line(result, index: int, total: int) -> None:
|
||||
source_name = result.source_name
|
||||
table_name = result.table_name
|
||||
|
||||
msg = "{info} freshness of {source_name}.{table_name}".format(
|
||||
info=info,
|
||||
source_name=source_name,
|
||||
table_name=table_name
|
||||
)
|
||||
msg = f"{info} freshness of {source_name}.{table_name}"
|
||||
|
||||
print_fancy_output_line(
|
||||
msg,
|
||||
@@ -237,14 +237,16 @@ def print_freshness_result_line(result, index: int, total: int) -> None:
|
||||
|
||||
|
||||
def interpret_run_result(result) -> str:
|
||||
if result.error is not None or result.fail:
|
||||
if result.status in (NodeStatus.Error, NodeStatus.Fail):
|
||||
return 'error'
|
||||
elif result.skipped:
|
||||
elif result.status == NodeStatus.Skipped:
|
||||
return 'skip'
|
||||
elif result.warn:
|
||||
elif result.status == NodeStatus.Warn:
|
||||
return 'warn'
|
||||
else:
|
||||
elif result.status in (NodeStatus.Pass, NodeStatus.Success):
|
||||
return 'pass'
|
||||
else:
|
||||
raise RuntimeError(f"unhandled result {result}")
|
||||
|
||||
|
||||
def print_run_status_line(results) -> None:
|
||||
@@ -272,7 +274,9 @@ def print_run_result_error(
|
||||
with TextOnly():
|
||||
logger.info("")
|
||||
|
||||
if result.fail or (is_warning and result.warn):
|
||||
if result.status == NodeStatus.Fail or (
|
||||
is_warning and result.status == NodeStatus.Warn
|
||||
):
|
||||
if is_warning:
|
||||
color = ui.yellow
|
||||
info = 'Warning'
|
||||
@@ -288,12 +292,13 @@ def print_run_result_error(
|
||||
result.node.original_file_path))
|
||||
|
||||
try:
|
||||
int(result.status)
|
||||
# if message is int, must be rows returned for a test
|
||||
int(result.message)
|
||||
except ValueError:
|
||||
logger.error(" Status: {}".format(result.status))
|
||||
else:
|
||||
status = utils.pluralize(result.status, 'result')
|
||||
logger.error(" Got {}, expected 0.".format(status))
|
||||
num_rows = utils.pluralize(result.message, 'result')
|
||||
logger.error(" Got {}, expected 0.".format(num_rows))
|
||||
|
||||
if result.node.build_path is not None:
|
||||
with TextOnly():
|
||||
@@ -301,9 +306,9 @@ def print_run_result_error(
|
||||
logger.info(" compiled SQL at {}".format(
|
||||
result.node.build_path))
|
||||
|
||||
else:
|
||||
elif result.message is not None:
|
||||
first = True
|
||||
for line in result.error.split("\n"):
|
||||
for line in result.message.split("\n"):
|
||||
if first:
|
||||
logger.error(ui.yellow(line))
|
||||
first = False
|
||||
@@ -342,8 +347,21 @@ def print_end_of_run_summary(
|
||||
|
||||
|
||||
def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None:
|
||||
errors = [r for r in results if r.error is not None or r.fail]
|
||||
warnings = [r for r in results if r.warn]
|
||||
errors, warnings = [], []
|
||||
for r in results:
|
||||
if r.status in (
|
||||
NodeStatus.RuntimeErr,
|
||||
NodeStatus.Error,
|
||||
NodeStatus.Fail
|
||||
):
|
||||
errors.append(r)
|
||||
elif r.status == NodeStatus.Skipped and r.message is not None:
|
||||
# this means we skipped a node because of an issue upstream,
|
||||
# so include it as an error
|
||||
errors.append(r)
|
||||
elif r.status == NodeStatus.Warn:
|
||||
warnings.append(r)
|
||||
|
||||
with DbtStatusMessage(), InvocationProcessor():
|
||||
print_end_of_run_summary(len(errors),
|
||||
len(warnings),
|
||||
|
||||
@@ -129,6 +129,10 @@ class RemoteTestProjectTask(RPCCommandTask[RPCTestParameters], TestTask):
|
||||
self.args.schema = params.schema
|
||||
if params.threads is not None:
|
||||
self.args.threads = params.threads
|
||||
if params.defer is None:
|
||||
self.args.defer = flags.DEFER_MODE
|
||||
else:
|
||||
self.args.defer = params.defer
|
||||
|
||||
self.args.state = state_path(params.state)
|
||||
self.set_previous_state()
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# import these so we can find them
|
||||
from . import sql_commands # noqa
|
||||
from . import project_commands # noqa
|
||||
from . import deps # noqa
|
||||
from . import deps # noqa
|
||||
import multiprocessing.queues # noqa - https://bugs.python.org/issue41567
|
||||
import json
|
||||
import os
|
||||
import signal
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import functools
|
||||
import threading
|
||||
import time
|
||||
from typing import List, Dict, Any, Iterable, Set, Tuple, Optional, AbstractSet
|
||||
|
||||
from hologram import JsonSchemaMixin
|
||||
|
||||
from .compile import CompileRunner, CompileTask
|
||||
|
||||
from .printer import (
|
||||
@@ -23,7 +26,7 @@ from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import WritableManifest
|
||||
from dbt.contracts.graph.model_config import Hook
|
||||
from dbt.contracts.graph.parsed import ParsedHookNode
|
||||
from dbt.contracts.results import RunModelResult
|
||||
from dbt.contracts.results import NodeStatus, RunResult, RunStatus
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
InternalException,
|
||||
@@ -105,9 +108,9 @@ def track_model_run(index, num_nodes, run_model_result):
|
||||
"index": index,
|
||||
"total": num_nodes,
|
||||
"execution_time": run_model_result.execution_time,
|
||||
"run_status": run_model_result.status,
|
||||
"run_skipped": run_model_result.skip,
|
||||
"run_error": None,
|
||||
"run_status": str(run_model_result.status).upper(),
|
||||
"run_skipped": run_model_result.status == NodeStatus.Skipped,
|
||||
"run_error": run_model_result.status == NodeStatus.Error,
|
||||
"model_materialization": run_model_result.node.get_materialization(),
|
||||
"model_id": utils.get_hash(run_model_result.node),
|
||||
"hashed_contents": utils.get_hashed_contents(
|
||||
@@ -187,7 +190,18 @@ class ModelRunner(CompileRunner):
|
||||
|
||||
def _build_run_model_result(self, model, context):
|
||||
result = context['load_result']('main')
|
||||
return RunModelResult(model, status=result.status)
|
||||
adapter_response = {}
|
||||
if isinstance(result.response, JsonSchemaMixin):
|
||||
adapter_response = result.response.to_dict()
|
||||
return RunResult(
|
||||
node=model,
|
||||
status=RunStatus.Success,
|
||||
timing=[],
|
||||
thread_id=threading.current_thread().name,
|
||||
execution_time=0,
|
||||
message=str(result.response),
|
||||
adapter_response=adapter_response
|
||||
)
|
||||
|
||||
def _materialization_relations(
|
||||
self, result: Any, model
|
||||
@@ -255,7 +269,7 @@ class RunTask(CompileTask):
|
||||
def get_hook_sql(self, adapter, hook, idx, num_hooks, extra_context):
|
||||
compiler = adapter.get_compiler()
|
||||
compiled = compiler.compile_node(hook, self.manifest, extra_context)
|
||||
statement = compiled.injected_sql
|
||||
statement = compiled.compiled_sql
|
||||
hook_index = hook.index or num_hooks
|
||||
hook_obj = get_hook(statement, index=hook_index)
|
||||
return hook_obj.sql or ''
|
||||
@@ -322,7 +336,7 @@ class RunTask(CompileTask):
|
||||
|
||||
with finishctx, DbtModelState({'node_status': 'passed'}):
|
||||
print_hook_end_line(
|
||||
hook_text, status, idx, num_hooks, timer.elapsed
|
||||
hook_text, str(status), idx, num_hooks, timer.elapsed
|
||||
)
|
||||
|
||||
self._total_executed += len(ordered_hooks)
|
||||
@@ -372,7 +386,7 @@ class RunTask(CompileTask):
|
||||
)
|
||||
return state.manifest
|
||||
|
||||
def defer_to_manifest(self, selected_uids: AbstractSet[str]):
|
||||
def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]):
|
||||
deferred_manifest = self._get_deferred_manifest()
|
||||
if deferred_manifest is None:
|
||||
return
|
||||
@@ -382,6 +396,7 @@ class RunTask(CompileTask):
|
||||
'manifest to defer from!'
|
||||
)
|
||||
self.manifest.merge_from_artifact(
|
||||
adapter=adapter,
|
||||
other=deferred_manifest,
|
||||
selected=selected_uids,
|
||||
)
|
||||
@@ -389,10 +404,10 @@ class RunTask(CompileTask):
|
||||
self.write_manifest()
|
||||
|
||||
def before_run(self, adapter, selected_uids: AbstractSet[str]):
|
||||
self.defer_to_manifest(selected_uids)
|
||||
with adapter.connection_named('master'):
|
||||
self.create_schemas(adapter, selected_uids)
|
||||
self.populate_adapter_cache(adapter)
|
||||
self.defer_to_manifest(adapter, selected_uids)
|
||||
self.safe_run_hooks(adapter, RunHookType.Start, {})
|
||||
|
||||
def after_run(self, adapter, results):
|
||||
@@ -400,10 +415,16 @@ class RunTask(CompileTask):
|
||||
# list of unique database, schema pairs that successfully executed
|
||||
# models were in. for backwards compatibility, include the old
|
||||
# 'schemas', which did not include database information.
|
||||
|
||||
database_schema_set: Set[Tuple[Optional[str], str]] = {
|
||||
(r.node.database, r.node.schema) for r in results
|
||||
if not any((r.error is not None, r.fail, r.skipped))
|
||||
if r.status not in (
|
||||
NodeStatus.Error,
|
||||
NodeStatus.Fail,
|
||||
NodeStatus.Skipped
|
||||
)
|
||||
}
|
||||
|
||||
self._total_executed += len(results)
|
||||
|
||||
extras = {
|
||||
|
||||
@@ -31,7 +31,7 @@ from dbt.logger import (
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition
|
||||
from dbt.contracts.results import RunResultsArtifact
|
||||
from dbt.contracts.results import NodeStatus, RunExecutionResult
|
||||
from dbt.contracts.state import PreviousState
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
@@ -189,17 +189,17 @@ class GraphRunnableTask(ManifestTask):
|
||||
|
||||
fail_fast = getattr(self.config.args, 'fail_fast', False)
|
||||
|
||||
if (result.fail is not None or result.error is not None) and fail_fast:
|
||||
if result.status in (NodeStatus.Error, NodeStatus.Fail) and fail_fast:
|
||||
self._raise_next_tick = FailFastException(
|
||||
message='Failing early due to test failure or runtime error',
|
||||
result=result,
|
||||
node=getattr(result, 'node', None)
|
||||
)
|
||||
elif result.error is not None and self.raise_on_first_error():
|
||||
elif result.status == NodeStatus.Error and self.raise_on_first_error():
|
||||
# if we raise inside a thread, it'll just get silently swallowed.
|
||||
# stash the error message we want here, and it will check the
|
||||
# next 'tick' - should be soon since our thread is about to finish!
|
||||
self._raise_next_tick = RuntimeException(result.error)
|
||||
self._raise_next_tick = RuntimeException(result.message)
|
||||
|
||||
return result
|
||||
|
||||
@@ -287,7 +287,7 @@ class GraphRunnableTask(ManifestTask):
|
||||
else:
|
||||
self.manifest.update_node(node)
|
||||
|
||||
if result.error is not None:
|
||||
if result.status == NodeStatus.Error:
|
||||
if is_ephemeral:
|
||||
cause = result
|
||||
else:
|
||||
@@ -436,7 +436,14 @@ class GraphRunnableTask(ManifestTask):
|
||||
if results is None:
|
||||
return False
|
||||
|
||||
failures = [r for r in results if r.error or r.fail]
|
||||
failures = [
|
||||
r for r in results if r.status in (
|
||||
NodeStatus.RuntimeErr,
|
||||
NodeStatus.Error,
|
||||
NodeStatus.Fail,
|
||||
NodeStatus.Skipped # propogate error message causing skip
|
||||
)
|
||||
]
|
||||
return len(failures) == 0
|
||||
|
||||
def get_model_schemas(
|
||||
@@ -531,8 +538,7 @@ class GraphRunnableTask(ManifestTask):
|
||||
create_future.result()
|
||||
|
||||
def get_result(self, results, elapsed_time, generated_at):
|
||||
|
||||
return RunResultsArtifact.from_node_results(
|
||||
return RunExecutionResult(
|
||||
results=results,
|
||||
elapsed_time=elapsed_time,
|
||||
generated_at=generated_at,
|
||||
|
||||
@@ -7,6 +7,7 @@ from .printer import (
|
||||
print_run_end_messages,
|
||||
)
|
||||
|
||||
from dbt.contracts.results import RunStatus
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.graph import ResourceTypeSelector
|
||||
from dbt.logger import GLOBAL_LOGGER as logger, TextOnly
|
||||
@@ -37,6 +38,10 @@ class SeedRunner(ModelRunner):
|
||||
|
||||
|
||||
class SeedTask(RunTask):
|
||||
def defer_to_manifest(self, adapter, selected_uids):
|
||||
# seeds don't defer
|
||||
return
|
||||
|
||||
def raise_on_first_error(self):
|
||||
return False
|
||||
|
||||
@@ -79,5 +84,5 @@ class SeedTask(RunTask):
|
||||
|
||||
def show_tables(self, results):
|
||||
for result in results:
|
||||
if result.error is None:
|
||||
if result.status != RunStatus.Error:
|
||||
self.show_table(result)
|
||||
|
||||
@@ -22,6 +22,10 @@ class SnapshotTask(RunTask):
|
||||
def raise_on_first_error(self):
|
||||
return False
|
||||
|
||||
def defer_to_manifest(self, adapter, selected_uids):
|
||||
# snapshots don't defer
|
||||
return
|
||||
|
||||
def get_node_selector(self):
|
||||
if self.manifest is None or self.graph is None:
|
||||
raise InternalException(
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import threading
|
||||
from typing import Dict, Any, Set
|
||||
|
||||
from .compile import CompileRunner
|
||||
@@ -14,7 +15,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedDataTestNode,
|
||||
ParsedSchemaTestNode,
|
||||
)
|
||||
from dbt.contracts.results import RunModelResult
|
||||
from dbt.contracts.results import RunResult, TestStatus
|
||||
from dbt.exceptions import raise_compiler_error, InternalException
|
||||
from dbt.graph import (
|
||||
ResourceTypeSelector,
|
||||
@@ -42,7 +43,7 @@ class TestRunner(CompileRunner):
|
||||
|
||||
def execute_data_test(self, test: CompiledDataTestNode):
|
||||
res, table = self.adapter.execute(
|
||||
test.injected_sql, auto_begin=True, fetch=True
|
||||
test.compiled_sql, auto_begin=True, fetch=True
|
||||
)
|
||||
|
||||
num_rows = len(table.rows)
|
||||
@@ -59,7 +60,7 @@ class TestRunner(CompileRunner):
|
||||
|
||||
def execute_schema_test(self, test: CompiledSchemaTestNode):
|
||||
res, table = self.adapter.execute(
|
||||
test.injected_sql,
|
||||
test.compiled_sql,
|
||||
auto_begin=True,
|
||||
fetch=True,
|
||||
)
|
||||
@@ -83,19 +84,30 @@ class TestRunner(CompileRunner):
|
||||
elif isinstance(test, CompiledSchemaTestNode):
|
||||
failed_rows = self.execute_schema_test(test)
|
||||
else:
|
||||
|
||||
raise InternalException(
|
||||
f'Expected compiled schema test or compiled data test, got '
|
||||
f'{type(test)}'
|
||||
)
|
||||
severity = test.config.severity.upper()
|
||||
|
||||
severity = test.config.severity.upper()
|
||||
thread_id = threading.current_thread().name
|
||||
status = None
|
||||
if failed_rows == 0:
|
||||
return RunModelResult(test, status=failed_rows)
|
||||
status = TestStatus.Pass
|
||||
elif severity == 'ERROR' or flags.WARN_ERROR:
|
||||
return RunModelResult(test, status=failed_rows, fail=True)
|
||||
status = TestStatus.Fail
|
||||
else:
|
||||
return RunModelResult(test, status=failed_rows, warn=True)
|
||||
status = TestStatus.Warn
|
||||
|
||||
return RunResult(
|
||||
node=test,
|
||||
status=status,
|
||||
timing=[],
|
||||
thread_id=thread_id,
|
||||
execution_time=0,
|
||||
message=int(failed_rows),
|
||||
adapter_response={}
|
||||
)
|
||||
|
||||
def after_execute(self, result):
|
||||
self.print_result_line(result)
|
||||
@@ -115,7 +127,7 @@ class TestSelector(ResourceTypeSelector):
|
||||
)
|
||||
|
||||
def expand_selection(self, selected: Set[UniqueId]) -> Set[UniqueId]:
|
||||
# reports can't have tests, so this is relatively easy
|
||||
# exposures can't have tests, so this is relatively easy
|
||||
selected_tests = set()
|
||||
for unique_id in self.graph.select_successors(selected):
|
||||
if unique_id in self.manifest.nodes:
|
||||
@@ -132,6 +144,7 @@ class TestTask(RunTask):
|
||||
Read schema files + custom data tests and validate that
|
||||
constraints are satisfied.
|
||||
"""
|
||||
|
||||
def raise_on_first_error(self):
|
||||
return False
|
||||
|
||||
|
||||
@@ -15,20 +15,38 @@ import requests
|
||||
import yaml
|
||||
import os
|
||||
|
||||
import tracking # written in Rust
|
||||
|
||||
sp_logger.setLevel(100)
|
||||
|
||||
COLLECTOR_URL = "fishtownanalytics.sinter-collect.com"
|
||||
COLLECTOR_PROTOCOL = "https"
|
||||
COLLECTOR_URL = tracking.connector_url()
|
||||
COLLECTOR_PROTOCOL = tracking.collector_protocol()
|
||||
|
||||
INVOCATION_SPEC = 'iglu:com.dbt/invocation/jsonschema/1-0-1'
|
||||
PLATFORM_SPEC = 'iglu:com.dbt/platform/jsonschema/1-0-0'
|
||||
RUN_MODEL_SPEC = 'iglu:com.dbt/run_model/jsonschema/1-0-1'
|
||||
INVOCATION_ENV_SPEC = 'iglu:com.dbt/invocation_env/jsonschema/1-0-0'
|
||||
PACKAGE_INSTALL_SPEC = 'iglu:com.dbt/package_install/jsonschema/1-0-0'
|
||||
RPC_REQUEST_SPEC = 'iglu:com.dbt/rpc_request/jsonschema/1-0-1'
|
||||
DEPRECATION_WARN_SPEC = 'iglu:com.dbt/deprecation_warn/jsonschema/1-0-0'
|
||||
INVOCATION_SPEC = tracking.invocation_spec()
|
||||
PLATFORM_SPEC = tracking.platform_spec()
|
||||
RUN_MODEL_SPEC = tracking.run_model_spec()
|
||||
INVOCATION_ENV_SPEC = tracking.invocation_env_spec()
|
||||
PACKAGE_INSTALL_SPEC = tracking.package_install_spec()
|
||||
RPC_REQUEST_SPEC = tracking.rpc_request_spec()
|
||||
DEPRECATION_WARN_SPEC = tracking.deprecation_warn_spec()
|
||||
LOAD_ALL_TIMING_SPEC = tracking.load_all_timing_spec()
|
||||
|
||||
DBT_INVOCATION_ENV = 'DBT_INVOCATION_ENV'
|
||||
DBT_INVOCATION_ENV = tracking.dbt_invocation_env()
|
||||
|
||||
# --- revert to these for testing purposes --- #
|
||||
# COLLECTOR_URL = "fishtownanalytics.sinter-collect.com"
|
||||
# COLLECTOR_PROTOCOL = "https"
|
||||
|
||||
# INVOCATION_SPEC = 'iglu:com.dbt/invocation/jsonschema/1-0-1'
|
||||
# PLATFORM_SPEC = 'iglu:com.dbt/platform/jsonschema/1-0-0'
|
||||
# RUN_MODEL_SPEC = 'iglu:com.dbt/run_model/jsonschema/1-0-1'
|
||||
# INVOCATION_ENV_SPEC = 'iglu:com.dbt/invocation_env/jsonschema/1-0-0'
|
||||
# PACKAGE_INSTALL_SPEC = 'iglu:com.dbt/package_install/jsonschema/1-0-0'
|
||||
# RPC_REQUEST_SPEC = 'iglu:com.dbt/rpc_request/jsonschema/1-0-1'
|
||||
# DEPRECATION_WARN_SPEC = 'iglu:com.dbt/deprecation_warn/jsonschema/1-0-0'
|
||||
# LOAD_ALL_TIMING_SPEC = 'iglu:com.dbt/load_all_timing/jsonschema/1-0-0'
|
||||
|
||||
# DBT_INVOCATION_ENV = 'DBT_INVOCATION_ENV'
|
||||
|
||||
|
||||
class TimeoutEmitter(Emitter):
|
||||
@@ -273,6 +291,20 @@ def track_invocation_start(config=None, args=None):
|
||||
)
|
||||
|
||||
|
||||
def track_project_load(options):
|
||||
context = [SelfDescribingJson(LOAD_ALL_TIMING_SPEC, options)]
|
||||
assert active_user is not None, \
|
||||
'Cannot track project loading time when active user is None'
|
||||
|
||||
track(
|
||||
active_user,
|
||||
category='dbt',
|
||||
action='load_project',
|
||||
label=active_user.invocation_id,
|
||||
context=context
|
||||
)
|
||||
|
||||
|
||||
def track_model_run(options):
|
||||
context = [SelfDescribingJson(RUN_MODEL_SPEC, options)]
|
||||
assert active_user is not None, \
|
||||
|
||||
@@ -298,7 +298,7 @@ def filter_null_values(input: Dict[K_T, Optional[V_T]]) -> Dict[K_T, V_T]:
|
||||
|
||||
|
||||
def add_ephemeral_model_prefix(s: str) -> str:
|
||||
return '__dbt__CTE__{}'.format(s)
|
||||
return '__dbt__cte__{}'.format(s)
|
||||
|
||||
|
||||
def timestring() -> str:
|
||||
|
||||
@@ -96,5 +96,5 @@ def _get_dbt_plugins_info():
|
||||
yield plugin_name, mod.version
|
||||
|
||||
|
||||
__version__ = '0.19.0a1'
|
||||
__version__ = '0.19.0'
|
||||
installed = get_installed_version()
|
||||
|
||||
@@ -24,7 +24,7 @@ def read(fname):
|
||||
|
||||
|
||||
package_name = "dbt-core"
|
||||
package_version = "0.19.0a1"
|
||||
package_version = "0.19.0"
|
||||
description = """dbt (data build tool) is a command line tool that helps \
|
||||
analysts and engineers transform data in their warehouse more effectively"""
|
||||
|
||||
@@ -64,13 +64,13 @@ setup(
|
||||
'sqlparse>=0.2.3,<0.4',
|
||||
'networkx>=2.3,<3',
|
||||
'minimal-snowplow-tracker==0.0.2',
|
||||
'colorama>=0.3.9,<0.5',
|
||||
'colorama>=0.3.9,<0.4.4',
|
||||
'agate>=1.6,<2',
|
||||
'isodate>=0.6,<0.7',
|
||||
'json-rpc>=1.12,<2',
|
||||
'werkzeug>=0.15,<0.17',
|
||||
'werkzeug>=0.15,<2.0',
|
||||
'dataclasses==0.6;python_version<"3.7"',
|
||||
'hologram==0.0.10',
|
||||
'hologram==0.0.12',
|
||||
'logbook>=1.5,<1.6',
|
||||
'typing-extensions>=3.7.4,<3.8',
|
||||
# the following are all to match snowflake-connector-python
|
||||
@@ -91,6 +91,7 @@ setup(
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
],
|
||||
python_requires=">=3.6.3",
|
||||
)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
version: '3.5'
|
||||
version: "3.5"
|
||||
services:
|
||||
|
||||
database:
|
||||
image: postgres
|
||||
environment:
|
||||
@@ -13,7 +12,7 @@ services:
|
||||
test:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
dockerfile: Dockerfile.test
|
||||
args:
|
||||
# Run `make .env` to set $USER_ID and $GROUP_ID
|
||||
USER_ID: ${USER_ID:-}
|
||||
|
||||
32
docker/Dockerfile
Normal file
32
docker/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
ARG BASE_IMAGE="python:3.8-slim-buster"
|
||||
|
||||
FROM $BASE_IMAGE
|
||||
ARG BASE_REQUIREMENTS_SRC_PATH
|
||||
ARG WHEEL_REQUIREMENTS_SRC_PATH
|
||||
ARG DIST_PATH
|
||||
RUN apt-get update \
|
||||
&& apt-get dist-upgrade -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
git \
|
||||
software-properties-common \
|
||||
make \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
libpq-dev \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
RUN echo BASE_REQUIREMENTS_SRC_PATH=$BASE_REQUIREMENTS_SRC_PATH
|
||||
RUN echo WHEEL_REQUIREMENTS_SRC_PATH=$WHEEL_REQUIREMENTS_SRC_PATH
|
||||
RUN echo DIST_PATH=$DIST_PATH
|
||||
COPY $BASE_REQUIREMENTS_SRC_PATH ./requirements.txt
|
||||
COPY $WHEEL_REQUIREMENTS_SRC_PATH ./wheel_requirements.txt
|
||||
COPY $DIST_PATH ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.txt
|
||||
RUN pip install --requirement ./wheel_requirements.txt
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
ENTRYPOINT ["dbt"]
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.15.3.txt ./requirements.0.15.3.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.15.3.txt
|
||||
RUN pip install ./dist/dbt_postgres-0.15.3-py3-none-any.whl ./dist/dbt_redshift-0.15.3-py3-none-any.whl ./dist/dbt_bigquery-0.15.3-py3-none-any.whl ./dist/dbt_core-0.15.3-py3-none-any.whl ./dist/dbt_snowflake-0.15.3-py3-none-any.whl ./dist/dbt-0.15.3-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.0.txt ./requirements.0.16.0.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.0.txt
|
||||
RUN pip install ./dist/dbt-0.16.0-py3-none-any.whl ./dist/dbt_bigquery-0.16.0-py3-none-any.whl ./dist/dbt_snowflake-0.16.0-py3-none-any.whl ./dist/dbt_core-0.16.0-py3-none-any.whl ./dist/dbt_redshift-0.16.0-py3-none-any.whl ./dist/dbt_postgres-0.16.0-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends netcat curl git ssh software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.0b2.txt ./requirements.0.16.0b2.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.0b2.txt
|
||||
RUN pip install ./dist/dbt_snowflake-0.16.0b2-py3-none-any.whl ./dist/dbt_core-0.16.0b2-py3-none-any.whl ./dist/dbt_postgres-0.16.0b2-py3-none-any.whl ./dist/dbt-0.16.0b2-py3-none-any.whl ./dist/dbt_bigquery-0.16.0b2-py3-none-any.whl ./dist/dbt_redshift-0.16.0b2-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.0b3.txt ./requirements.0.16.0b3.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.0b3.txt
|
||||
RUN pip install ./dist/dbt_postgres-0.16.0b3-py3-none-any.whl ./dist/dbt_core-0.16.0b3-py3-none-any.whl ./dist/dbt-0.16.0b3-py3-none-any.whl ./dist/dbt_snowflake-0.16.0b3-py3-none-any.whl ./dist/dbt_bigquery-0.16.0b3-py3-none-any.whl ./dist/dbt_redshift-0.16.0b3-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.0rc2.txt ./requirements.0.16.0rc2.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.0rc2.txt
|
||||
RUN pip install ./dist/dbt_bigquery-0.16.0rc2-py3-none-any.whl ./dist/dbt_core-0.16.0rc2-py3-none-any.whl ./dist/dbt_snowflake-0.16.0rc2-py3-none-any.whl ./dist/dbt-0.16.0rc2-py3-none-any.whl ./dist/dbt_redshift-0.16.0rc2-py3-none-any.whl ./dist/dbt_postgres-0.16.0rc2-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.0rc3.txt ./requirements.0.16.0rc3.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.0rc3.txt
|
||||
RUN pip install ./dist/dbt_bigquery-0.16.0rc3-py3-none-any.whl ./dist/dbt_redshift-0.16.0rc3-py3-none-any.whl ./dist/dbt-0.16.0rc3-py3-none-any.whl ./dist/dbt_postgres-0.16.0rc3-py3-none-any.whl ./dist/dbt_core-0.16.0rc3-py3-none-any.whl ./dist/dbt_snowflake-0.16.0rc3-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.0rc4.txt ./requirements.0.16.0rc4.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.0rc4.txt
|
||||
RUN pip install ./dist/dbt_snowflake-0.16.0rc4-py3-none-any.whl ./dist/dbt_core-0.16.0rc4-py3-none-any.whl ./dist/dbt_postgres-0.16.0rc4-py3-none-any.whl ./dist/dbt_redshift-0.16.0rc4-py3-none-any.whl ./dist/dbt-0.16.0rc4-py3-none-any.whl ./dist/dbt_bigquery-0.16.0rc4-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.16.1rc1.txt ./requirements.0.16.1rc1.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.16.1rc1.txt
|
||||
RUN pip install ./dist/dbt-0.16.1rc1-py3-none-any.whl ./dist/dbt_redshift-0.16.1rc1-py3-none-any.whl ./dist/dbt_postgres-0.16.1rc1-py3-none-any.whl ./dist/dbt_core-0.16.1rc1-py3-none-any.whl ./dist/dbt_snowflake-0.16.1rc1-py3-none-any.whl ./dist/dbt_bigquery-0.16.1rc1-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.17.0b1.txt ./requirements.0.17.0b1.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.17.0b1.txt
|
||||
RUN pip install ./dist/dbt_bigquery-0.17.0b1-py3-none-any.whl ./dist/dbt_redshift-0.17.0b1-py3-none-any.whl ./dist/dbt_snowflake-0.17.0b1-py3-none-any.whl ./dist/dbt_postgres-0.17.0b1-py3-none-any.whl ./dist/dbt_core-0.17.0b1-py3-none-any.whl ./dist/dbt-0.17.0b1-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.17.0rc1.txt ./requirements.0.17.0rc1.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.17.0rc1.txt
|
||||
RUN pip install ./dist/dbt_bigquery-0.17.0rc1-py3-none-any.whl ./dist/dbt_redshift-0.17.0rc1-py3-none-any.whl ./dist/dbt-0.17.0rc1-py3-none-any.whl ./dist/dbt_postgres-0.17.0rc1-py3-none-any.whl ./dist/dbt_core-0.17.0rc1-py3-none-any.whl ./dist/dbt_snowflake-0.17.0rc1-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
CMD ['dbt', 'run']
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.17.0rc2.txt ./requirements.0.17.0rc2.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.17.0rc2.txt
|
||||
RUN pip install ./dist/dbt_core-0.17.0rc2-py3-none-any.whl ./dist/dbt_snowflake-0.17.0rc2-py3-none-any.whl ./dist/dbt_redshift-0.17.0rc2-py3-none-any.whl ./dist/dbt-0.17.0rc2-py3-none-any.whl ./dist/dbt_postgres-0.17.0rc2-py3-none-any.whl ./dist/dbt_bigquery-0.17.0rc2-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
ENTRYPOINT dbt
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.17.0rc3.txt ./requirements.0.17.0rc3.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.17.0rc3.txt
|
||||
RUN pip install ./dist/dbt-0.17.0rc3-py3-none-any.whl ./dist/dbt_redshift-0.17.0rc3-py3-none-any.whl ./dist/dbt_postgres-0.17.0rc3-py3-none-any.whl ./dist/dbt_core-0.17.0rc3-py3-none-any.whl ./dist/dbt_snowflake-0.17.0rc3-py3-none-any.whl ./dist/dbt_bigquery-0.17.0rc3-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
ENTRYPOINT dbt
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM python:3.8.1-slim-buster
|
||||
|
||||
RUN apt-get update && apt-get dist-upgrade -y && apt-get install -y --no-install-recommends git software-properties-common make build-essential ca-certificates libpq-dev && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
COPY docker/requirements/requirements.0.17.0rc4.txt ./requirements.0.17.0rc4.txt
|
||||
COPY dist ./dist
|
||||
RUN pip install --upgrade pip setuptools
|
||||
RUN pip install --requirement ./requirements.0.17.0rc4.txt
|
||||
RUN pip install ./dist/dbt_bigquery-0.17.0rc4-py3-none-any.whl ./dist/dbt_snowflake-0.17.0rc4-py3-none-any.whl ./dist/dbt_core-0.17.0rc4-py3-none-any.whl ./dist/dbt_postgres-0.17.0rc4-py3-none-any.whl ./dist/dbt-0.17.0rc4-py3-none-any.whl ./dist/dbt_redshift-0.17.0rc4-py3-none-any.whl
|
||||
|
||||
RUN useradd -mU dbt_user
|
||||
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
WORKDIR /usr/app
|
||||
VOLUME /usr/app
|
||||
|
||||
USER dbt_user
|
||||
ENTRYPOINT dbt
|
||||
69
docker/requirements/requirements.0.18.1.txt
Normal file
69
docker/requirements/requirements.0.18.1.txt
Normal file
@@ -0,0 +1,69 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.2.0
|
||||
azure-common==1.1.25
|
||||
azure-core==1.8.2
|
||||
azure-storage-blob==12.5.0
|
||||
Babel==2.8.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.1.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==2.9.2
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.16.0
|
||||
google-auth==1.22.1
|
||||
google-cloud-bigquery==1.25.0
|
||||
google-cloud-core==1.3.0
|
||||
google-resumable-media==0.5.1
|
||||
googleapis-common-protos==1.6.0
|
||||
hologram==0.0.10
|
||||
idna==2.9
|
||||
importlib-metadata==2.0.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.4.3
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.4.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
protobuf==3.11.3
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==19.1.0
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.1
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
SecretStorage==3.1.2
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.2.10
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.10
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.3.0
|
||||
66
docker/requirements/requirements.0.18.1b1.txt
Normal file
66
docker/requirements/requirements.0.18.1b1.txt
Normal file
@@ -0,0 +1,66 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.2.0
|
||||
azure-common==1.1.25
|
||||
azure-core==1.8.1
|
||||
azure-storage-blob==12.5.0
|
||||
Babel==2.8.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.1.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.2
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==2.9.2
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.16.0
|
||||
google-auth==1.21.2
|
||||
google-cloud-bigquery==1.25.0
|
||||
google-cloud-core==1.3.0
|
||||
google-resumable-media==0.5.1
|
||||
googleapis-common-protos==1.6.0
|
||||
hologram==0.0.10
|
||||
idna==2.9
|
||||
importlib-metadata==1.7.0
|
||||
isodate==0.6.0
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
protobuf==3.11.3
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==19.1.0
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.1
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.2.10
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.10
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.1.0
|
||||
66
docker/requirements/requirements.0.18.1b2.txt
Normal file
66
docker/requirements/requirements.0.18.1b2.txt
Normal file
@@ -0,0 +1,66 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.2.0
|
||||
azure-common==1.1.25
|
||||
azure-core==1.8.1
|
||||
azure-storage-blob==12.5.0
|
||||
Babel==2.8.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.1.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==2.9.2
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.16.0
|
||||
google-auth==1.21.2
|
||||
google-cloud-bigquery==1.25.0
|
||||
google-cloud-core==1.3.0
|
||||
google-resumable-media==0.5.1
|
||||
googleapis-common-protos==1.6.0
|
||||
hologram==0.0.10
|
||||
idna==2.9
|
||||
importlib-metadata==1.7.0
|
||||
isodate==0.6.0
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
protobuf==3.11.3
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==19.1.0
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.1
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.2.10
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.10
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.1.0
|
||||
69
docker/requirements/requirements.0.18.1b3.txt
Normal file
69
docker/requirements/requirements.0.18.1b3.txt
Normal file
@@ -0,0 +1,69 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.2.0
|
||||
azure-common==1.1.25
|
||||
azure-core==1.8.1
|
||||
azure-storage-blob==12.5.0
|
||||
Babel==2.8.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.1.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==2.9.2
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.16.0
|
||||
google-auth==1.21.3
|
||||
google-cloud-bigquery==1.25.0
|
||||
google-cloud-core==1.3.0
|
||||
google-resumable-media==0.5.1
|
||||
googleapis-common-protos==1.6.0
|
||||
hologram==0.0.10
|
||||
idna==2.9
|
||||
importlib-metadata==2.0.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.4.3
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.4.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
protobuf==3.11.3
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==19.1.0
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.1
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
SecretStorage==3.1.2
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.2.10
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.10
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.2.0
|
||||
73
docker/requirements/requirements.0.18.1rc1.txt
Normal file
73
docker/requirements/requirements.0.18.1rc1.txt
Normal file
@@ -0,0 +1,73 @@
|
||||
agate==1.6.1
|
||||
aiohttp==3.6.2
|
||||
asn1crypto==1.4.0
|
||||
async-timeout==3.0.1
|
||||
attrs==20.2.0
|
||||
azure-common==1.1.25
|
||||
azure-core==1.8.1
|
||||
azure-storage-blob==12.5.0
|
||||
Babel==2.8.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.1.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==2.9.2
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.16.0
|
||||
google-auth==1.22.0
|
||||
google-cloud-bigquery==1.25.0
|
||||
google-cloud-core==1.3.0
|
||||
google-resumable-media==0.5.1
|
||||
googleapis-common-protos==1.6.0
|
||||
hologram==0.0.10
|
||||
idna==2.9
|
||||
importlib-metadata==2.0.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.4.3
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.4.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
multidict==4.7.6
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
protobuf==3.11.3
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==19.1.0
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.1
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
SecretStorage==3.1.2
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.2.10
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.10
|
||||
Werkzeug==0.16.1
|
||||
yarl==1.6.0
|
||||
zipp==3.2.0
|
||||
71
docker/requirements/requirements.0.19.0.txt
Normal file
71
docker/requirements/requirements.0.19.0.txt
Normal file
@@ -0,0 +1,71 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.3.0
|
||||
azure-common==1.1.26
|
||||
azure-core==1.10.0
|
||||
azure-storage-blob==12.7.1
|
||||
Babel==2.9.0
|
||||
boto3==1.15.18
|
||||
botocore==1.18.18
|
||||
cachetools==4.2.1
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.4
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==3.3.1
|
||||
decorator==4.4.2
|
||||
google-api-core==1.23.0
|
||||
google-auth==1.24.0
|
||||
google-cloud-bigquery==2.3.1
|
||||
google-cloud-core==1.4.4
|
||||
google-crc32c==1.1.2
|
||||
google-resumable-media==1.2.0
|
||||
googleapis-common-protos==1.52.0
|
||||
grpcio==1.35.0
|
||||
hologram==0.0.12
|
||||
idna==2.9
|
||||
importlib-metadata==3.4.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.6.0
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.8.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.21
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
proto-plus==1.13.0
|
||||
protobuf==3.14.0
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.9
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==20.0.1
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.5
|
||||
PyYAML==5.4.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.7
|
||||
s3transfer==0.3.4
|
||||
SecretStorage==3.3.0
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.3.6
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.11
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.4.0
|
||||
69
docker/requirements/requirements.0.19.0b1.txt
Normal file
69
docker/requirements/requirements.0.19.0b1.txt
Normal file
@@ -0,0 +1,69 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.2.0
|
||||
azure-common==1.1.25
|
||||
azure-core==1.8.2
|
||||
azure-storage-blob==12.5.0
|
||||
Babel==2.8.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.1.1
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==2.9.2
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.16.0
|
||||
google-auth==1.22.1
|
||||
google-cloud-bigquery==1.25.0
|
||||
google-cloud-core==1.3.0
|
||||
google-resumable-media==0.5.1
|
||||
googleapis-common-protos==1.6.0
|
||||
hologram==0.0.10
|
||||
idna==2.9
|
||||
importlib-metadata==2.0.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.4.3
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.4.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
protobuf==3.11.3
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.8
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==19.1.0
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.1
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
SecretStorage==3.1.2
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.2.10
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.11
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.3.1
|
||||
70
docker/requirements/requirements.0.19.0rc1.txt
Normal file
70
docker/requirements/requirements.0.19.0rc1.txt
Normal file
@@ -0,0 +1,70 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.3.0
|
||||
azure-common==1.1.26
|
||||
azure-core==1.9.0
|
||||
azure-storage-blob==12.6.0
|
||||
Babel==2.9.0
|
||||
boto3==1.11.17
|
||||
botocore==1.14.17
|
||||
cachetools==4.2.0
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.4
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==3.3.1
|
||||
decorator==4.4.2
|
||||
docutils==0.15.2
|
||||
google-api-core==1.23.0
|
||||
google-auth==1.24.0
|
||||
google-cloud-bigquery==2.3.1
|
||||
google-cloud-core==1.4.4
|
||||
google-crc32c==1.1.0
|
||||
google-resumable-media==1.2.0
|
||||
googleapis-common-protos==1.52.0
|
||||
grpcio==1.34.0
|
||||
hologram==0.0.12
|
||||
idna==2.9
|
||||
importlib-metadata==3.3.0
|
||||
isodate==0.6.0
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.8.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
proto-plus==1.13.0
|
||||
protobuf==3.14.0
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.9
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==20.0.1
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.5
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.6
|
||||
s3transfer==0.3.3
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.3.6
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.11
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.4.0
|
||||
71
docker/requirements/requirements.0.19.0rc2.txt
Normal file
71
docker/requirements/requirements.0.19.0rc2.txt
Normal file
@@ -0,0 +1,71 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.3.0
|
||||
azure-common==1.1.26
|
||||
azure-core==1.10.0
|
||||
azure-storage-blob==12.7.0
|
||||
Babel==2.9.0
|
||||
boto3==1.15.18
|
||||
botocore==1.18.18
|
||||
cachetools==4.2.0
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.4
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==3.3.1
|
||||
decorator==4.4.2
|
||||
google-api-core==1.23.0
|
||||
google-auth==1.24.0
|
||||
google-cloud-bigquery==2.3.1
|
||||
google-cloud-core==1.4.4
|
||||
google-crc32c==1.1.1
|
||||
google-resumable-media==1.2.0
|
||||
googleapis-common-protos==1.52.0
|
||||
grpcio==1.34.1
|
||||
hologram==0.0.12
|
||||
idna==2.9
|
||||
importlib-metadata==3.4.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.6.0
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.8.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.19
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
proto-plus==1.13.0
|
||||
protobuf==3.14.0
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.9
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==20.0.1
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.5
|
||||
PyYAML==5.3.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.7
|
||||
s3transfer==0.3.4
|
||||
SecretStorage==3.3.0
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.3.6
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.11
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.4.0
|
||||
71
docker/requirements/requirements.0.19.0rc3.txt
Normal file
71
docker/requirements/requirements.0.19.0rc3.txt
Normal file
@@ -0,0 +1,71 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==20.3.0
|
||||
azure-common==1.1.26
|
||||
azure-core==1.10.0
|
||||
azure-storage-blob==12.7.1
|
||||
Babel==2.9.0
|
||||
boto3==1.15.18
|
||||
botocore==1.18.18
|
||||
cachetools==4.2.1
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.4
|
||||
chardet==3.0.4
|
||||
colorama==0.4.3
|
||||
cryptography==3.3.1
|
||||
decorator==4.4.2
|
||||
google-api-core==1.23.0
|
||||
google-auth==1.24.0
|
||||
google-cloud-bigquery==2.3.1
|
||||
google-cloud-core==1.4.4
|
||||
google-crc32c==1.1.2
|
||||
google-resumable-media==1.2.0
|
||||
googleapis-common-protos==1.52.0
|
||||
grpcio==1.35.0
|
||||
hologram==0.0.12
|
||||
idna==2.9
|
||||
importlib-metadata==3.4.0
|
||||
isodate==0.6.0
|
||||
jeepney==0.6.0
|
||||
Jinja2==2.11.2
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.8.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==1.1.1
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msrest==0.6.21
|
||||
networkx==2.5
|
||||
oauthlib==3.1.0
|
||||
oscrypto==1.2.1
|
||||
parsedatetime==2.6
|
||||
proto-plus==1.13.0
|
||||
protobuf==3.14.0
|
||||
psycopg2-binary==2.8.6
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.9.9
|
||||
PyJWT==1.7.1
|
||||
pyOpenSSL==20.0.1
|
||||
pyrsistent==0.17.3
|
||||
python-dateutil==2.8.1
|
||||
python-slugify==4.0.1
|
||||
pytimeparse==1.1.8
|
||||
pytz==2020.5
|
||||
PyYAML==5.4.1
|
||||
requests==2.23.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.7
|
||||
s3transfer==0.3.4
|
||||
SecretStorage==3.3.0
|
||||
six==1.15.0
|
||||
snowflake-connector-python==2.3.6
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.11
|
||||
Werkzeug==0.16.1
|
||||
zipp==3.4.0
|
||||
@@ -1 +1 @@
|
||||
version = '0.19.0a1'
|
||||
version = '0.19.0'
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
import agate
|
||||
from requests.exceptions import ConnectionError
|
||||
from typing import Optional, Any, Dict
|
||||
from typing import Optional, Any, Dict, Tuple
|
||||
|
||||
import google.auth
|
||||
import google.auth.exceptions
|
||||
@@ -17,7 +19,7 @@ from google.oauth2 import (
|
||||
from dbt.utils import format_bytes, format_rows_number
|
||||
from dbt.clients import agate_helper, gcloud
|
||||
from dbt.tracking import active_user
|
||||
from dbt.contracts.connection import ConnectionState
|
||||
from dbt.contracts.connection import ConnectionState, AdapterResponse
|
||||
from dbt.exceptions import (
|
||||
FailedToConnectException, RuntimeException, DatabaseException
|
||||
)
|
||||
@@ -45,6 +47,17 @@ RETRYABLE_ERRORS = (
|
||||
)
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def get_bigquery_defaults() -> Tuple[Any, Optional[str]]:
|
||||
"""
|
||||
Returns (credentials, project_id)
|
||||
|
||||
project_id is returned available from the environment; otherwise None
|
||||
"""
|
||||
# Cached, because the underlying implementation shells out, taking ~1s
|
||||
return google.auth.default()
|
||||
|
||||
|
||||
class Priority(StrEnum):
|
||||
Interactive = 'interactive'
|
||||
Batch = 'batch'
|
||||
@@ -57,9 +70,17 @@ class BigQueryConnectionMethod(StrEnum):
|
||||
OAUTH_SECRETS = 'oauth-secrets'
|
||||
|
||||
|
||||
@dataclass
|
||||
class BigQueryAdapterResponse(AdapterResponse):
|
||||
bytes_processed: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class BigQueryCredentials(Credentials):
|
||||
method: BigQueryConnectionMethod
|
||||
# BigQuery allows an empty database / project, where it defers to the
|
||||
# environment for the project
|
||||
database: Optional[str]
|
||||
timeout_seconds: Optional[int] = 300
|
||||
location: Optional[str] = None
|
||||
priority: Optional[Priority] = None
|
||||
@@ -91,6 +112,16 @@ class BigQueryCredentials(Credentials):
|
||||
return ('method', 'database', 'schema', 'location', 'priority',
|
||||
'timeout_seconds', 'maximum_bytes_billed')
|
||||
|
||||
def __post_init__(self):
|
||||
# We need to inject the correct value of the database (aka project) at
|
||||
# this stage, ref
|
||||
# https://github.com/fishtown-analytics/dbt/pull/2908#discussion_r532927436.
|
||||
|
||||
# `database` is an alias of `project` in BigQuery
|
||||
if self.database is None:
|
||||
_, database = get_bigquery_defaults()
|
||||
self.database = database
|
||||
|
||||
|
||||
class BigQueryConnectionManager(BaseConnectionManager):
|
||||
TYPE = 'bigquery'
|
||||
@@ -170,7 +201,7 @@ class BigQueryConnectionManager(BaseConnectionManager):
|
||||
creds = GoogleServiceAccountCredentials.Credentials
|
||||
|
||||
if method == BigQueryConnectionMethod.OAUTH:
|
||||
credentials, project_id = google.auth.default(scopes=cls.SCOPE)
|
||||
credentials, _ = get_bigquery_defaults()
|
||||
return credentials
|
||||
|
||||
elif method == BigQueryConnectionMethod.SERVICE_ACCOUNT:
|
||||
@@ -238,7 +269,6 @@ class BigQueryConnectionManager(BaseConnectionManager):
|
||||
handle = cls.get_bigquery_client(connection.credentials)
|
||||
|
||||
except Exception as e:
|
||||
raise
|
||||
logger.debug("Got an error when attempting to create a bigquery "
|
||||
"client: '{}'".format(e))
|
||||
|
||||
@@ -269,15 +299,13 @@ class BigQueryConnectionManager(BaseConnectionManager):
|
||||
column_names = [field.name for field in resp.schema]
|
||||
return agate_helper.table_from_data_flat(resp, column_names)
|
||||
|
||||
def raw_execute(self, sql, fetch=False):
|
||||
def raw_execute(self, sql, fetch=False, *, use_legacy_sql=False):
|
||||
conn = self.get_thread_connection()
|
||||
client = conn.handle
|
||||
|
||||
logger.debug('On {}: {}', conn.name, sql)
|
||||
|
||||
job_params = {
|
||||
'use_legacy_sql': False,
|
||||
}
|
||||
job_params = {'use_legacy_sql': use_legacy_sql}
|
||||
|
||||
if active_user:
|
||||
job_params['labels'] = {
|
||||
@@ -302,44 +330,75 @@ class BigQueryConnectionManager(BaseConnectionManager):
|
||||
|
||||
return query_job, iterator
|
||||
|
||||
def execute(self, sql, auto_begin=False, fetch=None):
|
||||
def execute(
|
||||
self, sql, auto_begin=False, fetch=None
|
||||
) -> Tuple[BigQueryAdapterResponse, agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
# auto_begin is ignored on bigquery, and only included for consistency
|
||||
query_job, iterator = self.raw_execute(sql, fetch=fetch)
|
||||
|
||||
if fetch:
|
||||
res = self.get_table_from_response(iterator)
|
||||
table = self.get_table_from_response(iterator)
|
||||
else:
|
||||
res = agate_helper.empty_table()
|
||||
table = agate_helper.empty_table()
|
||||
|
||||
message = 'OK'
|
||||
code = None
|
||||
num_rows = None
|
||||
bytes_processed = None
|
||||
|
||||
if query_job.statement_type == 'CREATE_VIEW':
|
||||
status = 'CREATE VIEW'
|
||||
code = 'CREATE VIEW'
|
||||
|
||||
elif query_job.statement_type == 'CREATE_TABLE_AS_SELECT':
|
||||
conn = self.get_thread_connection()
|
||||
client = conn.handle
|
||||
table = client.get_table(query_job.destination)
|
||||
processed = format_bytes(query_job.total_bytes_processed)
|
||||
status = 'CREATE TABLE ({} rows, {} processed)'.format(
|
||||
format_rows_number(table.num_rows),
|
||||
format_bytes(query_job.total_bytes_processed),
|
||||
query_table = client.get_table(query_job.destination)
|
||||
code = 'CREATE TABLE'
|
||||
num_rows = query_table.num_rows
|
||||
bytes_processed = query_job.total_bytes_processed
|
||||
message = '{} ({} rows, {} processed)'.format(
|
||||
code,
|
||||
format_rows_number(num_rows),
|
||||
format_bytes(bytes_processed)
|
||||
)
|
||||
|
||||
elif query_job.statement_type == 'SCRIPT':
|
||||
processed = format_bytes(query_job.total_bytes_processed)
|
||||
status = f'SCRIPT ({processed} processed)'
|
||||
code = 'SCRIPT'
|
||||
bytes_processed = query_job.total_bytes_processed
|
||||
message = f'{code} ({format_bytes(bytes_processed)} processed)'
|
||||
|
||||
elif query_job.statement_type in ['INSERT', 'DELETE', 'MERGE']:
|
||||
status = '{} ({} rows, {} processed)'.format(
|
||||
query_job.statement_type,
|
||||
format_rows_number(query_job.num_dml_affected_rows),
|
||||
format_bytes(query_job.total_bytes_processed),
|
||||
code = query_job.statement_type
|
||||
num_rows = query_job.num_dml_affected_rows
|
||||
bytes_processed = query_job.total_bytes_processed
|
||||
message = '{} ({} rows, {} processed)'.format(
|
||||
code,
|
||||
format_rows_number(num_rows),
|
||||
format_bytes(bytes_processed),
|
||||
)
|
||||
|
||||
else:
|
||||
status = 'OK'
|
||||
response = BigQueryAdapterResponse(
|
||||
_message=message,
|
||||
rows_affected=num_rows,
|
||||
code=code,
|
||||
bytes_processed=bytes_processed
|
||||
)
|
||||
|
||||
return status, res
|
||||
return response, table
|
||||
|
||||
def get_partitions_metadata(self, table):
|
||||
def standard_to_legacy(table):
|
||||
return table.project + ':' + table.dataset + '.' + table.identifier
|
||||
|
||||
legacy_sql = 'SELECT * FROM ['\
|
||||
+ standard_to_legacy(table) + '$__PARTITIONS_SUMMARY__]'
|
||||
|
||||
sql = self._add_query_comment(legacy_sql)
|
||||
# auto_begin is ignored on bigquery, and only included for consistency
|
||||
_, iterator =\
|
||||
self.raw_execute(sql, fetch='fetch_result', use_legacy_sql=True)
|
||||
return self.get_table_from_response(iterator)
|
||||
|
||||
def create_bigquery_table(self, database, schema, table_name, callback,
|
||||
sql):
|
||||
@@ -510,4 +569,7 @@ def _is_retryable(error):
|
||||
"""Return true for errors that are unlikely to occur again if retried."""
|
||||
if isinstance(error, RETRYABLE_ERRORS):
|
||||
return True
|
||||
elif isinstance(error, google.api_core.exceptions.Forbidden) and any(
|
||||
e['reason'] == 'rateLimitExceeded' for e in error.errors):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -50,6 +50,7 @@ def sql_escape(string):
|
||||
class PartitionConfig(JsonSchemaMixin):
|
||||
field: str
|
||||
data_type: str = 'date'
|
||||
granularity: str = 'day'
|
||||
range: Optional[Dict[str, Any]] = None
|
||||
|
||||
def render(self, alias: Optional[str] = None):
|
||||
@@ -57,10 +58,11 @@ class PartitionConfig(JsonSchemaMixin):
|
||||
if alias:
|
||||
column = f'{alias}.{self.field}'
|
||||
|
||||
if self.data_type in ('timestamp', 'datetime'):
|
||||
return f'date({column})'
|
||||
else:
|
||||
if self.data_type.lower() == 'date' and \
|
||||
self.granularity.lower() == 'day':
|
||||
return column
|
||||
else:
|
||||
return f'{self.data_type}_trunc({column}, {self.granularity})'
|
||||
|
||||
@classmethod
|
||||
def parse(cls, raw_partition_by) -> Optional['PartitionConfig']:
|
||||
@@ -109,6 +111,8 @@ class BigqueryConfig(AdapterConfig):
|
||||
partitions: Optional[List[str]] = None
|
||||
grant_access_to: Optional[List[Dict[str, str]]] = None
|
||||
hours_to_expiration: Optional[int] = None
|
||||
require_partition_filter: Optional[bool] = None
|
||||
partition_expiration_days: Optional[int] = None
|
||||
|
||||
|
||||
class BigQueryAdapter(BaseAdapter):
|
||||
@@ -385,7 +389,7 @@ class BigQueryAdapter(BaseAdapter):
|
||||
model_database = model.get('database')
|
||||
model_schema = model.get('schema')
|
||||
model_alias = model.get('alias')
|
||||
model_sql = model.get('injected_sql')
|
||||
model_sql = model.get('compiled_sql')
|
||||
|
||||
logger.debug("Model SQL ({}):\n{}".format(model_alias, model_sql))
|
||||
self.connections.create_view(
|
||||
@@ -505,7 +509,7 @@ class BigQueryAdapter(BaseAdapter):
|
||||
decorator=None):
|
||||
|
||||
if sql_override is None:
|
||||
sql_override = model.get('injected_sql')
|
||||
sql_override = model.get('compiled_sql')
|
||||
|
||||
if flags.STRICT_MODE:
|
||||
connection = self.connections.get_thread_connection()
|
||||
@@ -547,7 +551,9 @@ class BigQueryAdapter(BaseAdapter):
|
||||
return True
|
||||
elif conf_partition and table.time_partitioning is not None:
|
||||
table_field = table.time_partitioning.field
|
||||
return table_field == conf_partition.field
|
||||
table_granularity = table.partitioning_type
|
||||
return table_field == conf_partition.field \
|
||||
and table_granularity == conf_partition.granularity
|
||||
elif conf_partition and table.range_partitioning is not None:
|
||||
dest_part = table.range_partitioning
|
||||
conf_part = conf_partition.range or {}
|
||||
@@ -784,6 +790,14 @@ class BigQueryAdapter(BaseAdapter):
|
||||
labels = config.get('labels', {})
|
||||
opts['labels'] = list(labels.items())
|
||||
|
||||
if config.get('require_partition_filter'):
|
||||
opts['require_partition_filter'] = config.get(
|
||||
'require_partition_filter')
|
||||
|
||||
if config.get('partition_expiration_days') is not None:
|
||||
opts['partition_expiration_days'] = config.get(
|
||||
'partition_expiration_days')
|
||||
|
||||
return opts
|
||||
|
||||
@available.parse_none
|
||||
|
||||
@@ -5,3 +5,11 @@
|
||||
{% macro grant_access_to(entity, entity_type, role, grant_target_dict) -%}
|
||||
{% do adapter.grant_access_to(entity, entity_type, role, grant_target_dict) %}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro get_partitions_metadata(table) -%}
|
||||
{%- if execute -%}
|
||||
{%- set res = adapter.get_partitions_metadata(table) -%}
|
||||
{{- return(res) -}}
|
||||
{%- endif -%}
|
||||
{{- return(None) -}}
|
||||
{%- endmacro -%}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user