mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 05:51:28 +00:00
Compare commits
82 Commits
performanc
...
v0.21.0b2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a2bdd08d88 | ||
|
|
1807526d0a | ||
|
|
362770f5bd | ||
|
|
af38f51041 | ||
|
|
efc8ece12e | ||
|
|
7471f07431 | ||
|
|
b37f6a010e | ||
|
|
e817164d31 | ||
|
|
6fa30d10ea | ||
|
|
09ce43edbf | ||
|
|
2980cd17df | ||
|
|
8c804de643 | ||
|
|
c8241b87e6 | ||
|
|
f204d24ed8 | ||
|
|
d5461ccd8b | ||
|
|
a20d2d93d3 | ||
|
|
57e1eec165 | ||
|
|
d2dbe6afe4 | ||
|
|
72eb163223 | ||
|
|
af16c74c3a | ||
|
|
664f6584b9 | ||
|
|
76fd3bdf8c | ||
|
|
35150f914f | ||
|
|
b633adb881 | ||
|
|
b477be9eff | ||
|
|
b6e534cdd0 | ||
|
|
1dc4adb86f | ||
|
|
0a4d7c4831 | ||
|
|
ad67e55d74 | ||
|
|
2fae64a488 | ||
|
|
1a984601ee | ||
|
|
b67e877cc1 | ||
|
|
454168204c | ||
|
|
1c066cd680 | ||
|
|
43642956a2 | ||
|
|
ec97b46caf | ||
|
|
e7b8488be8 | ||
|
|
b5bb354929 | ||
|
|
0efaaf7daf | ||
|
|
9ae7d68260 | ||
|
|
45fe76eef4 | ||
|
|
ea772ae419 | ||
|
|
c68fca7937 | ||
|
|
159e79ee6b | ||
|
|
57783bb5f6 | ||
|
|
d73ee588e5 | ||
|
|
40089d710b | ||
|
|
6ec61950eb | ||
|
|
72c831a80a | ||
|
|
929931a26a | ||
|
|
577e2438c1 | ||
|
|
2679792199 | ||
|
|
2adf982991 | ||
|
|
1fb4a7f428 | ||
|
|
30e72bc5e2 | ||
|
|
35645a7233 | ||
|
|
d583c8d737 | ||
|
|
a83f00c594 | ||
|
|
c448702c1b | ||
|
|
558a6a03ac | ||
|
|
52ec7907d3 | ||
|
|
792f39a888 | ||
|
|
16264f58c1 | ||
|
|
2317c0c3c8 | ||
|
|
3c09ab9736 | ||
|
|
f10dc0e1b3 | ||
|
|
634bc41d8a | ||
|
|
d7ea3648c6 | ||
|
|
e5c8e19ff2 | ||
|
|
93cf1f085f | ||
|
|
a84f824a44 | ||
|
|
9c58f3465b | ||
|
|
0e3778132b | ||
|
|
72722635f2 | ||
|
|
a4c7c7fc55 | ||
|
|
2bad73eead | ||
|
|
67c194dcd1 | ||
|
|
bd7010678a | ||
|
|
9f716b31b3 | ||
|
|
3dd486d8fa | ||
|
|
33217891ca | ||
|
|
1d37c4e555 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.21.0a1
|
||||
current_version = 0.21.0b2
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
@@ -47,3 +47,4 @@ first_value = 1
|
||||
[bumpversion:file:plugins/snowflake/dbt/adapters/snowflake/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/dbt/adapters/bigquery/__version__.py]
|
||||
|
||||
|
||||
@@ -1,22 +1,12 @@
|
||||
version: 2.1
|
||||
jobs:
|
||||
unit:
|
||||
build-wheels:
|
||||
docker: &test_only
|
||||
- image: fishtownanalytics/test-container:12
|
||||
environment:
|
||||
DBT_INVOCATION_ENV: circle
|
||||
DOCKER_TEST_DATABASE_HOST: "database"
|
||||
TOX_PARALLEL_NO_SPINNER: 1
|
||||
steps:
|
||||
- checkout
|
||||
- run: tox -p -e py36,py37,py38
|
||||
lint:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run: tox -e mypy,flake8 -- -v
|
||||
build-wheels:
|
||||
docker: *test_only
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
@@ -99,24 +89,12 @@ workflows:
|
||||
version: 2
|
||||
test-everything:
|
||||
jobs:
|
||||
- lint
|
||||
- unit
|
||||
- integration-postgres:
|
||||
requires:
|
||||
- unit
|
||||
- integration-redshift:
|
||||
requires:
|
||||
- unit
|
||||
- integration-bigquery:
|
||||
requires:
|
||||
- unit
|
||||
- integration-snowflake:
|
||||
requires:
|
||||
- unit
|
||||
- integration-postgres
|
||||
- integration-redshift
|
||||
- integration-bigquery
|
||||
- integration-snowflake
|
||||
- build-wheels:
|
||||
requires:
|
||||
- lint
|
||||
- unit
|
||||
- integration-postgres
|
||||
- integration-redshift
|
||||
- integration-bigquery
|
||||
|
||||
27
.github/ISSUE_TEMPLATE/beta-minor-version-release.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/beta-minor-version-release.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: Beta minor version release
|
||||
about: Creates a tracking checklist of items for a Beta minor version release
|
||||
title: "[Tracking] v#.##.#B# release "
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] Follow [dbt-release workflow](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#03ff37da697d4d8ba63d24fae1bfa817)
|
||||
- [ ] [Engineering] Verify new release branch is created in the repo
|
||||
- [ ] [Product] Finalize migration guide (next.docs.getdbt.com)
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#f01854e8da3641179fbcbe505bdf515c) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] [Spark](https://github.com/dbt-labs/dbt-spark)
|
||||
- [ ] [Presto](https://github.com/dbt-labs/dbt-presto)
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Create an epic for the RC release
|
||||
28
.github/ISSUE_TEMPLATE/final-minor-version-release.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/final-minor-version-release.md
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
name: Final minor version release
|
||||
about: Creates a tracking checklist of items for a final minor version release
|
||||
title: "[Tracking] v#.##.# final release "
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] Verify all necessary changes exist on the release branch
|
||||
- [ ] [Engineering] Follow [dbt-release workflow](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#03ff37da697d4d8ba63d24fae1bfa817)
|
||||
- [ ] [Product] Merge `next` into `current` for docs.getdbt.com
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Update discourse
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#f01854e8da3641179fbcbe505bdf515c) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] [Spark](https://github.com/dbt-labs/dbt-spark)
|
||||
- [ ] [Presto](https://github.com/dbt-labs/dbt-presto)
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Product] Release new version of dbt-utils with new dbt version compatibility. If there are breaking changes requiring a minor version, plan upgrades of other packages that depend on dbt-utils.
|
||||
29
.github/ISSUE_TEMPLATE/minor-version-release.md
vendored
29
.github/ISSUE_TEMPLATE/minor-version-release.md
vendored
@@ -1,29 +0,0 @@
|
||||
---
|
||||
name: Minor version release
|
||||
about: Creates a tracking checklist of items for a minor version release
|
||||
title: "[Tracking] v#.##.# release "
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] dbt-release workflow
|
||||
- [ ] [Engineering] Create new protected `x.latest` branch
|
||||
- [ ] [Product] Finalize migration guide (next.docs.getdbt.com)
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Publish discourse
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/fishtownanalytics/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#59571f5bc1a040d9a8fd096e23d2c7db) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] Spark
|
||||
- [ ] Presto
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud
|
||||
- [ ] [Product] Release new version of dbt-utils with new dbt version compatibility. If there are breaking changes requiring a minor version, plan upgrades of other packages that depend on dbt-utils.
|
||||
- [ ] [Engineering] If this isn't a final release, create an epic for the next release
|
||||
29
.github/ISSUE_TEMPLATE/rc-minor-version-release copy.md
vendored
Normal file
29
.github/ISSUE_TEMPLATE/rc-minor-version-release copy.md
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
---
|
||||
name: RC minor version release
|
||||
about: Creates a tracking checklist of items for a RC minor version release
|
||||
title: "[Tracking] v#.##.#RC# release "
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] Verify all necessary changes exist on the release branch
|
||||
- [ ] [Engineering] Follow [dbt-release workflow](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#03ff37da697d4d8ba63d24fae1bfa817)
|
||||
- [ ] [Product] Update migration guide (next.docs.getdbt.com)
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Publish discourse
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#f01854e8da3641179fbcbe505bdf515c) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] [Spark](https://github.com/dbt-labs/dbt-spark)
|
||||
- [ ] [Presto](https://github.com/dbt-labs/dbt-presto)
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Product] Release new version of dbt-utils with new dbt version compatibility. If there are breaking changes requiring a minor version, plan upgrades of other packages that depend on dbt-utils.
|
||||
- [ ] [Engineering] Create an epic for the final release
|
||||
7
.github/workflows/performance.yml
vendored
7
.github/workflows/performance.yml
vendored
@@ -2,13 +2,8 @@
|
||||
name: Performance Regression Testing
|
||||
# Schedule triggers
|
||||
on:
|
||||
# TODO this is just while developing
|
||||
pull_request:
|
||||
branches:
|
||||
- 'develop'
|
||||
- 'performance-regression-testing'
|
||||
# runs twice a day at 10:05am and 10:05pm
|
||||
schedule:
|
||||
# runs twice a day at 10:05am and 10:05pm
|
||||
- cron: '5 10,22 * * *'
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
43
.github/workflows/tests.yml
vendored
43
.github/workflows/tests.yml
vendored
@@ -1,4 +1,4 @@
|
||||
# This is a workflow to run our unit and integration tests for windows and mac
|
||||
# This is a workflow to run our integration tests for windows and mac
|
||||
|
||||
name: dbt Tests
|
||||
|
||||
@@ -10,7 +10,7 @@ on:
|
||||
- 'develop'
|
||||
- '*.latest'
|
||||
- 'releases/*'
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'develop'
|
||||
- '*.latest'
|
||||
@@ -20,45 +20,9 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Linting:
|
||||
runs-on: ubuntu-latest #no need to run on every OS
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Linting'
|
||||
run: tox -e mypy,flake8 -- -v
|
||||
|
||||
UnitTest:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run unit tests'
|
||||
run: python -m tox -e py -- -v
|
||||
|
||||
PostgresIntegrationTest:
|
||||
runs-on: 'windows-latest' #TODO: Add Mac support
|
||||
environment: 'Postgres'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: 'Install postgresql and set up database'
|
||||
@@ -98,7 +62,6 @@ jobs:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
environment: 'Snowflake'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
@@ -132,7 +95,6 @@ jobs:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
environment: 'Bigquery'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
@@ -156,7 +118,6 @@ jobs:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
environment: 'Redshift'
|
||||
needs: UnitTest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
|
||||
61
.github/workflows/unit_tests.yml
vendored
Normal file
61
.github/workflows/unit_tests.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# This is a workflow to run our linting and unit tests for windows, mac, and linux
|
||||
|
||||
name: Linting and Unit Tests
|
||||
|
||||
# Triggers
|
||||
on:
|
||||
# Trigger on commits to develop and releases branches
|
||||
push:
|
||||
branches:
|
||||
- 'develop'
|
||||
- '*.latest'
|
||||
- 'releases/*'
|
||||
pull_request: # Trigger for all PRs
|
||||
workflow_dispatch: # Allow manual triggers
|
||||
|
||||
jobs:
|
||||
Linting:
|
||||
runs-on: ubuntu-latest #no need to run on every OS
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.6'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Linting'
|
||||
run: tox -e mypy,flake8 -- -v
|
||||
|
||||
UnitTest:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: Linting
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python 3.6
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.6'
|
||||
architecture: 'x64'
|
||||
- name: Setup Python 3.7
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.7'
|
||||
architecture: 'x64'
|
||||
- name: Setup Python 3.8
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: python -m pip install --upgrade pip && pip install tox
|
||||
|
||||
- name: 'Run unit tests'
|
||||
run: tox -p -e py36,py37,py38
|
||||
759
CHANGELOG.md
759
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -24,7 +24,7 @@ Please note that all contributors to `dbt` must sign the [Contributor License Ag
|
||||
|
||||
### Defining the problem
|
||||
|
||||
If you have an idea for a new feature or if you've discovered a bug in `dbt`, the first step is to open an issue. Please check the list of [open issues](https://github.com/fishtown-analytics/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
If you have an idea for a new feature or if you've discovered a bug in `dbt`, the first step is to open an issue. Please check the list of [open issues](https://github.com/dbt-labs/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs.
|
||||
|
||||
> **Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed.
|
||||
|
||||
@@ -36,7 +36,7 @@ After you open an issue, a `dbt` maintainer will follow up by commenting on your
|
||||
|
||||
If an issue is appropriately well scoped and describes a beneficial change to the `dbt` codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this.
|
||||
|
||||
The `dbt` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/fishtown-analytics/dbt/contribute) page.
|
||||
The `dbt` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/dbt-labs/dbt/contribute) page.
|
||||
|
||||
Here's a good workflow:
|
||||
- Comment on the open issue, expressing your interest in contributing the required code change
|
||||
@@ -52,15 +52,15 @@ The `dbt` maintainers use labels to categorize open issues. Some labels indicate
|
||||
|
||||
| tag | description |
|
||||
| --- | ----------- |
|
||||
| [triage](https://github.com/fishtown-analytics/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt` maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/fishtown-analytics/dbt/labels/bug) | This issue represents a defect or regression in `dbt` |
|
||||
| [enhancement](https://github.com/fishtown-analytics/dbt/labels/enhancement) | This issue represents net-new functionality in `dbt` |
|
||||
| [good first issue](https://github.com/fishtown-analytics/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt` codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/fishtown-analytics/`dbt`/labels/help%20wanted) / [discussion](https://github.com/fishtown-analytics/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/fishtown-analytics/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt` maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/fishtown-analytics/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt` maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/fishtown-analytics/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt` maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/fishtown-analytics/dbt/labels/wontfix) | This issue does not require a code change in the `dbt` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
| [triage](https://github.com/dbt-labs/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt` maintainer. This label is removed when a maintainer reviews and responds to the issue. |
|
||||
| [bug](https://github.com/dbt-labs/dbt/labels/bug) | This issue represents a defect or regression in `dbt` |
|
||||
| [enhancement](https://github.com/dbt-labs/dbt/labels/enhancement) | This issue represents net-new functionality in `dbt` |
|
||||
| [good first issue](https://github.com/dbt-labs/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt` codebase to implement. This issue is appropriate for a first-time contributor. |
|
||||
| [help wanted](https://github.com/dbt-labs/dbt/labels/help%20wanted) / [discussion](https://github.com/dbt-labs/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. |
|
||||
| [duplicate](https://github.com/dbt-labs/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt` maintainers will close this issue and encourage community members to focus conversation on the other one. |
|
||||
| [snoozed](https://github.com/dbt-labs/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt` maintainers will revist these issues periodically and re-prioritize them accordingly. |
|
||||
| [stale](https://github.com/dbt-labs/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt` maintainers, but they can be re-opened if the discussion is restarted. |
|
||||
| [wontfix](https://github.com/dbt-labs/dbt/labels/wontfix) | This issue does not require a code change in the `dbt` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. |
|
||||
|
||||
#### Branching Strategy
|
||||
|
||||
@@ -78,17 +78,17 @@ You will need `git` in order to download and modify the `dbt` source code. On ma
|
||||
|
||||
### External contributors
|
||||
|
||||
If you are not a member of the `fishtown-analytics` GitHub organization, you can contribute to `dbt` by forking the `dbt` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt` by forking the `dbt` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
|
||||
|
||||
1. fork the `dbt` repository
|
||||
2. clone your fork locally
|
||||
3. check out a new branch for your proposed changes
|
||||
4. push changes to your fork
|
||||
5. open a pull request against `fishtown-analytics/dbt` from your forked repository
|
||||
5. open a pull request against `dbt-labs/dbt` from your forked repository
|
||||
|
||||
### Core contributors
|
||||
|
||||
If you are a member of the `fishtown-analytics` GitHub organization, you will have push access to the `dbt` repo. Rather than forking `dbt` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt` repo. Rather than forking `dbt` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
|
||||
|
||||
## Setting up an environment
|
||||
|
||||
@@ -155,7 +155,7 @@ Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as
|
||||
|
||||
Getting the `dbt` integration tests set up in your local environment will be very helpful as you start to make changes to your local version of `dbt`. The section that follows outlines some helpful tips for setting up the test environment.
|
||||
|
||||
Since `dbt` works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, Fishtown Analytics provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup.
|
||||
Since `dbt` works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, dbt Labs provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup.
|
||||
|
||||
### Initial setup
|
||||
|
||||
@@ -224,7 +224,7 @@ python -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
> is a list of useful command-line options for `pytest` to use while developing.
|
||||
## Submitting a Pull Request
|
||||
|
||||
Fishtown Analytics provides a sandboxed Redshift, Snowflake, and BigQuery database for use in a CI environment. When pull requests are submitted to the `fishtown-analytics/dbt` repo, GitHub will trigger automated tests in CircleCI and Azure Pipelines.
|
||||
dbt Labs provides a sandboxed Redshift, Snowflake, and BigQuery database for use in a CI environment. When pull requests are submitted to the `dbt-labs/dbt` repo, GitHub will trigger automated tests in CircleCI and Azure Pipelines.
|
||||
|
||||
A `dbt` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
|
||||
|
||||
|
||||
73
converter.py
73
converter.py
@@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import json
|
||||
import yaml
|
||||
import sys
|
||||
import argparse
|
||||
from datetime import datetime, timezone
|
||||
import dbt.clients.registry as registry
|
||||
|
||||
|
||||
def yaml_type(fname):
|
||||
with open(fname) as f:
|
||||
return yaml.load(f)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--project", type=yaml_type, default="dbt_project.yml")
|
||||
parser.add_argument("--namespace", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_full_name(args):
|
||||
return "{}/{}".format(args.namespace, args.project["name"])
|
||||
|
||||
|
||||
def init_project_in_packages(args, packages):
|
||||
full_name = get_full_name(args)
|
||||
if full_name not in packages:
|
||||
packages[full_name] = {
|
||||
"name": args.project["name"],
|
||||
"namespace": args.namespace,
|
||||
"latest": args.project["version"],
|
||||
"assets": {},
|
||||
"versions": {},
|
||||
}
|
||||
return packages[full_name]
|
||||
|
||||
|
||||
def add_version_to_package(args, project_json):
|
||||
project_json["versions"][args.project["version"]] = {
|
||||
"id": "{}/{}".format(get_full_name(args), args.project["version"]),
|
||||
"name": args.project["name"],
|
||||
"version": args.project["version"],
|
||||
"description": "",
|
||||
"published_at": datetime.now(timezone.utc).astimezone().isoformat(),
|
||||
"packages": args.project.get("packages") or [],
|
||||
"works_with": [],
|
||||
"_source": {
|
||||
"type": "github",
|
||||
"url": "",
|
||||
"readme": "",
|
||||
},
|
||||
"downloads": {
|
||||
"tarball": "",
|
||||
"format": "tgz",
|
||||
"sha1": "",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
packages = registry.packages()
|
||||
project_json = init_project_in_packages(args, packages)
|
||||
if args.project["version"] in project_json["versions"]:
|
||||
raise Exception("Version {} already in packages JSON"
|
||||
.format(args.project["version"]),
|
||||
file=sys.stderr)
|
||||
add_version_to_package(args, project_json)
|
||||
print(json.dumps(packages, indent=2))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -31,7 +31,6 @@ from dbt.contracts.graph.compiled import (
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.utils import filter_null_values, executor
|
||||
|
||||
@@ -310,8 +309,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
for node in manifest.nodes.values()
|
||||
if (
|
||||
node.resource_type in NodeType.executable() and
|
||||
not node.is_ephemeral_model
|
||||
node.is_relational and not node.is_ephemeral_model
|
||||
)
|
||||
}
|
||||
|
||||
@@ -513,7 +511,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
def get_columns_in_relation(
|
||||
self, relation: BaseRelation
|
||||
) -> List[BaseColumn]:
|
||||
"""Get a list of the columns in the given Relation."""
|
||||
"""Get a list of the columns in the given Relation. """
|
||||
raise NotImplementedException(
|
||||
'`get_columns_in_relation` is not implemented for this adapter!'
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import dbt.exceptions
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
import yaml
|
||||
import yaml.scanner
|
||||
|
||||
@@ -56,7 +56,7 @@ def contextualized_yaml_error(raw_contents, error):
|
||||
raw_error=error)
|
||||
|
||||
|
||||
def safe_load(contents):
|
||||
def safe_load(contents) -> Optional[Dict[str, Any]]:
|
||||
return yaml.load(contents, Loader=SafeLoader)
|
||||
|
||||
|
||||
|
||||
@@ -120,11 +120,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
|
||||
def calculate_node_config(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Dict[str, Any] = None
|
||||
) -> BaseConfig:
|
||||
own_config = self.get_node_project(project_name)
|
||||
|
||||
@@ -134,8 +135,15 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
for fqn_config in project_configs:
|
||||
result = self._update_from_config(result, fqn_config)
|
||||
|
||||
for config_call in config_calls:
|
||||
result = self._update_from_config(result, config_call)
|
||||
# When schema files patch config, it has lower precedence than
|
||||
# config in the models (config_call_dict), so we add the patch_config_dict
|
||||
# before the config_call_dict
|
||||
if patch_config_dict:
|
||||
result = self._update_from_config(result, patch_config_dict)
|
||||
|
||||
# config_calls are created in the 'experimental' model parser and
|
||||
# the ParseConfigObject (via add_config_call)
|
||||
result = self._update_from_config(result, config_call_dict)
|
||||
|
||||
if own_config.project_name != self._active_project.project_name:
|
||||
for fqn_config in self._active_project_configs(fqn, resource_type):
|
||||
@@ -147,11 +155,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
@abstractmethod
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
...
|
||||
|
||||
@@ -186,18 +195,20 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
config = self.calculate_node_config(
|
||||
config_calls=config_calls,
|
||||
config_call_dict=config_call_dict,
|
||||
fqn=fqn,
|
||||
resource_type=resource_type,
|
||||
project_name=project_name,
|
||||
base=base,
|
||||
patch_config_dict=patch_config_dict
|
||||
)
|
||||
finalized = config.finalize_and_validate()
|
||||
return finalized.to_dict(omit_none=True)
|
||||
@@ -209,18 +220,20 @@ class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):
|
||||
|
||||
def calculate_node_config_dict(
|
||||
self,
|
||||
config_calls: List[Dict[str, Any]],
|
||||
config_call_dict: Dict[str, Any],
|
||||
fqn: List[str],
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
return self.calculate_node_config(
|
||||
config_calls=config_calls,
|
||||
config_call_dict=config_call_dict,
|
||||
fqn=fqn,
|
||||
resource_type=resource_type,
|
||||
project_name=project_name,
|
||||
base=base,
|
||||
patch_config_dict=patch_config_dict
|
||||
)
|
||||
|
||||
def initial_result(
|
||||
@@ -251,20 +264,39 @@ class ContextConfig:
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
) -> None:
|
||||
self._config_calls: List[Dict[str, Any]] = []
|
||||
self._config_call_dict: Dict[str, Any] = {}
|
||||
self._active_project = active_project
|
||||
self._fqn = fqn
|
||||
self._resource_type = resource_type
|
||||
self._project_name = project_name
|
||||
|
||||
def update_in_model_config(self, opts: Dict[str, Any]) -> None:
|
||||
self._config_calls.append(opts)
|
||||
def add_config_call(self, opts: Dict[str, Any]) -> None:
|
||||
dct = self._config_call_dict
|
||||
self._add_config_call(dct, opts)
|
||||
|
||||
@classmethod
|
||||
def _add_config_call(cls, config_call_dict, opts: Dict[str, Any]) -> None:
|
||||
for k, v in opts.items():
|
||||
# MergeBehavior for post-hook and pre-hook is to collect all
|
||||
# values, instead of overwriting
|
||||
if k in BaseConfig.mergebehavior['append']:
|
||||
if not isinstance(v, list):
|
||||
v = [v]
|
||||
if k in BaseConfig.mergebehavior['update'] and not isinstance(v, dict):
|
||||
raise InternalException(f'expected dict, got {v}')
|
||||
if k in config_call_dict and isinstance(config_call_dict[k], list):
|
||||
config_call_dict[k].extend(v)
|
||||
elif k in config_call_dict and isinstance(config_call_dict[k], dict):
|
||||
config_call_dict[k].update(v)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
|
||||
def build_config_dict(
|
||||
self,
|
||||
base: bool = False,
|
||||
*,
|
||||
rendered: bool = True,
|
||||
patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
if rendered:
|
||||
src = ContextConfigGenerator(self._active_project)
|
||||
@@ -272,9 +304,10 @@ class ContextConfig:
|
||||
src = UnrenderedConfigGenerator(self._active_project)
|
||||
|
||||
return src.calculate_node_config_dict(
|
||||
config_calls=self._config_calls,
|
||||
config_call_dict=self._config_call_dict,
|
||||
fqn=self._fqn,
|
||||
resource_type=self._resource_type,
|
||||
project_name=self._project_name,
|
||||
base=base,
|
||||
patch_config_dict=patch_config_dict
|
||||
)
|
||||
|
||||
@@ -279,7 +279,7 @@ class Config(Protocol):
|
||||
...
|
||||
|
||||
|
||||
# `config` implementations
|
||||
# Implementation of "config(..)" calls in models
|
||||
class ParseConfigObject(Config):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
self.model = model
|
||||
@@ -316,7 +316,7 @@ class ParseConfigObject(Config):
|
||||
raise RuntimeException(
|
||||
'At parse time, did not receive a context config'
|
||||
)
|
||||
self.context_config.update_in_model_config(opts)
|
||||
self.context_config.add_config_call(opts)
|
||||
return ''
|
||||
|
||||
def set(self, name, value):
|
||||
@@ -1243,7 +1243,7 @@ class ModelContext(ProviderContext):
|
||||
|
||||
@contextproperty
|
||||
def pre_hooks(self) -> List[Dict[str, Any]]:
|
||||
if isinstance(self.model, ParsedSourceDefinition):
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
return [
|
||||
h.to_dict(omit_none=True) for h in self.model.config.pre_hook
|
||||
@@ -1251,7 +1251,7 @@ class ModelContext(ProviderContext):
|
||||
|
||||
@contextproperty
|
||||
def post_hooks(self) -> List[Dict[str, Any]]:
|
||||
if isinstance(self.model, ParsedSourceDefinition):
|
||||
if self.model.resource_type in [NodeType.Source, NodeType.Test]:
|
||||
return []
|
||||
return [
|
||||
h.to_dict(omit_none=True) for h in self.model.config.post_hook
|
||||
|
||||
@@ -220,7 +220,7 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
mcp: List[str] = field(default_factory=list)
|
||||
mcp: Dict[str, str] = field(default_factory=dict)
|
||||
# any source patches in this file. The entries are package, name pairs
|
||||
# Patches are only against external sources. Sources can be
|
||||
# created too, but those are in 'sources'
|
||||
|
||||
@@ -109,7 +109,9 @@ class CompiledSnapshotNode(CompiledNode):
|
||||
@dataclass
|
||||
class CompiledDataTestNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type:ignore
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -117,7 +119,9 @@ class CompiledSchemaTestNode(CompiledNode, HasTestMetadata):
|
||||
# keep this in sync with ParsedSchemaTestNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type:ignore
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
|
||||
@@ -14,7 +14,7 @@ from dbt.contracts.graph.compiled import (
|
||||
CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,
|
||||
ParsedMacro, ParsedDocumentation,
|
||||
ParsedSourceDefinition, ParsedExposure, HasUniqueID,
|
||||
UnpatchedSourceDefinition, ManifestNodes
|
||||
)
|
||||
@@ -26,9 +26,7 @@ from dbt.contracts.util import (
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
raise_duplicate_resource_name, raise_compiler_error, warn_or_error,
|
||||
raise_duplicate_patch_name,
|
||||
raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name,
|
||||
raise_duplicate_resource_name, raise_compiler_error,
|
||||
)
|
||||
from dbt.helper_types import PathSet
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
@@ -172,7 +170,7 @@ class RefableLookup(dbtClassMixin):
|
||||
|
||||
|
||||
class AnalysisLookup(RefableLookup):
|
||||
_lookup_types: ClassVar[set] = set(NodeType.Analysis)
|
||||
_lookup_types: ClassVar[set] = set([NodeType.Analysis])
|
||||
|
||||
|
||||
def _search_packages(
|
||||
@@ -718,60 +716,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
resource_fqns[resource_type_plural].add(tuple(resource.fqn))
|
||||
return resource_fqns
|
||||
|
||||
# This is called by 'parse_patch' in the NodePatchParser
|
||||
def add_patch(
|
||||
self, source_file: SchemaSourceFile, patch: ParsedNodePatch,
|
||||
) -> None:
|
||||
if patch.yaml_key in ['models', 'seeds', 'snapshots']:
|
||||
unique_id = self.ref_lookup.get_unique_id(patch.name, None)
|
||||
elif patch.yaml_key == 'analyses':
|
||||
unique_id = self.analysis_lookup.get_unique_id(patch.name, None)
|
||||
else:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f'Unexpected yaml_key {patch.yaml_key} for patch in '
|
||||
f'file {source_file.path.original_file_path}'
|
||||
)
|
||||
if unique_id is None:
|
||||
# This will usually happen when a node is disabled
|
||||
return
|
||||
|
||||
# patches can't be overwritten
|
||||
node = self.nodes.get(unique_id)
|
||||
if node:
|
||||
if node.patch_path:
|
||||
package_name, existing_file_path = node.patch_path.split('://')
|
||||
raise_duplicate_patch_name(patch, existing_file_path)
|
||||
source_file.append_patch(patch.yaml_key, unique_id)
|
||||
node.patch(patch)
|
||||
|
||||
def add_macro_patch(
|
||||
self, source_file: SchemaSourceFile, patch: ParsedMacroPatch,
|
||||
) -> None:
|
||||
# macros are fully namespaced
|
||||
unique_id = f'macro.{patch.package_name}.{patch.name}'
|
||||
macro = self.macros.get(unique_id)
|
||||
if not macro:
|
||||
warn_or_error(
|
||||
f'WARNING: Found documentation for macro "{patch.name}" '
|
||||
f'which was not found'
|
||||
)
|
||||
return
|
||||
if macro.patch_path:
|
||||
package_name, existing_file_path = macro.patch_path.split('://')
|
||||
raise_duplicate_macro_patch_name(patch, existing_file_path)
|
||||
source_file.macro_patches.append(unique_id)
|
||||
macro.patch(patch)
|
||||
|
||||
def add_source_patch(
|
||||
self, source_file: SchemaSourceFile, patch: SourcePatch,
|
||||
) -> None:
|
||||
# source patches must be unique
|
||||
key = (patch.overrides, patch.name)
|
||||
if key in self.source_patches:
|
||||
raise_duplicate_source_patch_name(patch, self.source_patches[key])
|
||||
self.source_patches[key] = patch
|
||||
source_file.source_patches.append(key)
|
||||
|
||||
def get_used_schemas(self, resource_types=None):
|
||||
return frozenset({
|
||||
(node.database, node.schema) for node in
|
||||
|
||||
@@ -2,13 +2,13 @@ from dataclasses import field, Field, dataclass
|
||||
from enum import Enum
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Any, List, Optional, Dict, Union, Type, TypeVar
|
||||
Any, List, Optional, Dict, Union, Type, TypeVar, Callable
|
||||
)
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin, ValidationError, register_pattern,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.exceptions import InternalException, CompilationException
|
||||
from dbt.contracts.util import Replaceable, list_str
|
||||
from dbt import hooks
|
||||
from dbt.node_types import NodeType
|
||||
@@ -204,6 +204,34 @@ class BaseConfig(
|
||||
else:
|
||||
self._extra[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
if hasattr(self, key):
|
||||
msg = (
|
||||
'Error, tried to delete config key "{}": Cannot delete '
|
||||
'built-in keys'
|
||||
).format(key)
|
||||
raise CompilationException(msg)
|
||||
else:
|
||||
del self._extra[key]
|
||||
|
||||
def _content_iterator(self, include_condition: Callable[[Field], bool]):
|
||||
seen = set()
|
||||
for fld, _ in self._get_fields():
|
||||
seen.add(fld.name)
|
||||
if include_condition(fld):
|
||||
yield fld.name
|
||||
|
||||
for key in self._extra:
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
yield from self._content_iterator(include_condition=lambda f: True)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._get_fields()) + len(self._extra)
|
||||
|
||||
@staticmethod
|
||||
def compare_key(
|
||||
unrendered: Dict[str, Any],
|
||||
@@ -239,8 +267,15 @@ class BaseConfig(
|
||||
return False
|
||||
return True
|
||||
|
||||
# This is used in 'add_config_call' to created the combined config_call_dict.
|
||||
# 'meta' moved here from node
|
||||
mergebehavior = {
|
||||
"append": ['pre-hook', 'pre_hook', 'post-hook', 'post_hook', 'tags'],
|
||||
"update": ['quoting', 'column_types', 'meta'],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _extract_dict(
|
||||
def _merge_dicts(
|
||||
cls, src: Dict[str, Any], data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Find all the items in data that match a target_field on this class,
|
||||
@@ -286,10 +321,10 @@ class BaseConfig(
|
||||
|
||||
adapter_config_cls = get_config_class_by_name(adapter_type)
|
||||
|
||||
self_merged = self._extract_dict(dct, data)
|
||||
self_merged = self._merge_dicts(dct, data)
|
||||
dct.update(self_merged)
|
||||
|
||||
adapter_merged = adapter_config_cls._extract_dict(dct, data)
|
||||
adapter_merged = adapter_config_cls._merge_dicts(dct, data)
|
||||
dct.update(adapter_merged)
|
||||
|
||||
# any remaining fields must be "clobber"
|
||||
@@ -321,33 +356,8 @@ class SourceConfig(BaseConfig):
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeConfig(BaseConfig):
|
||||
class NodeAndTestConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
materialized: str = 'view'
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
# this only applies for config v1, so it doesn't participate in comparison
|
||||
vars: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=metas(CompareBehavior.Exclude, MergeBehavior.Update),
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# This is actually only used by seeds. Should it be available to others?
|
||||
# That would be a breaking change!
|
||||
column_types: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# these fields are included in serialized output, but are not part of
|
||||
# config comparison (they are part of database_representation)
|
||||
alias: Optional[str] = field(
|
||||
@@ -368,7 +378,38 @@ class NodeConfig(BaseConfig):
|
||||
MergeBehavior.Append,
|
||||
CompareBehavior.Exclude),
|
||||
)
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeConfig(NodeAndTestConfig):
|
||||
# Note: if any new fields are added with MergeBehavior, also update the
|
||||
# 'mergebehavior' dictionary
|
||||
materialized: str = 'view'
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
pre_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
metadata=MergeBehavior.Append.meta(),
|
||||
)
|
||||
quoting: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
# This is actually only used by seeds. Should it be available to others?
|
||||
# That would be a breaking change!
|
||||
column_types: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
full_refresh: Optional[bool] = None
|
||||
on_schema_change: Optional[str] = 'ignore'
|
||||
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
@@ -410,7 +451,8 @@ class SeedConfig(NodeConfig):
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestConfig(NodeConfig):
|
||||
class TestConfig(NodeAndTestConfig):
|
||||
# this is repeated because of a different default
|
||||
schema: Optional[str] = field(
|
||||
default='dbt_test__audit',
|
||||
metadata=CompareBehavior.Exclude.meta(),
|
||||
|
||||
@@ -148,6 +148,7 @@ class ParsedNodeMixins(dbtClassMixin):
|
||||
"""Given a ParsedNodePatch, add the new information to the node."""
|
||||
# explicitly pick out the parts to update so we don't inadvertently
|
||||
# step on the model name or anything
|
||||
# Note: config should already be updated
|
||||
self.patch_path: Optional[str] = patch.file_id
|
||||
# update created_at so process_docs will run in partial parsing
|
||||
self.created_at = int(time.time())
|
||||
@@ -166,9 +167,6 @@ class ParsedNodeMixins(dbtClassMixin):
|
||||
def get_materialization(self):
|
||||
return self.config.materialized
|
||||
|
||||
def local_vars(self):
|
||||
return self.config.vars
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedNodeMandatory(
|
||||
@@ -203,6 +201,7 @@ class ParsedNodeDefaults(ParsedNodeMandatory):
|
||||
deferred: bool = False
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
created_at: int = field(default_factory=lambda: int(time.time()))
|
||||
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def write_node(self, target_path: str, subdirectory: str, payload: str):
|
||||
if (os.path.basename(self.path) ==
|
||||
@@ -229,6 +228,11 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
def _serialize(self):
|
||||
return self.to_dict()
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
if 'config_call_dict' in dct:
|
||||
del dct['config_call_dict']
|
||||
return dct
|
||||
|
||||
@classmethod
|
||||
def _deserialize(cls, dct: Dict[str, int]):
|
||||
# The serialized ParsedNodes do not differ from each other
|
||||
@@ -258,10 +262,16 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
return cls.from_dict(dct)
|
||||
|
||||
def _persist_column_docs(self) -> bool:
|
||||
return bool(self.config.persist_docs.get('columns'))
|
||||
if hasattr(self.config, 'persist_docs'):
|
||||
assert isinstance(self.config, NodeConfig)
|
||||
return bool(self.config.persist_docs.get('columns'))
|
||||
return False
|
||||
|
||||
def _persist_relation_docs(self) -> bool:
|
||||
return bool(self.config.persist_docs.get('relation'))
|
||||
if hasattr(self.config, 'persist_docs'):
|
||||
assert isinstance(self.config, NodeConfig)
|
||||
return bool(self.config.persist_docs.get('relation'))
|
||||
return False
|
||||
|
||||
def same_body(self: T, other: T) -> bool:
|
||||
return self.raw_sql == other.raw_sql
|
||||
@@ -411,7 +421,9 @@ class HasTestMetadata(dbtClassMixin):
|
||||
@dataclass
|
||||
class ParsedDataTestNode(ParsedNode):
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -419,7 +431,9 @@ class ParsedSchemaTestNode(ParsedNode, HasTestMetadata):
|
||||
# keep this in sync with CompiledSchemaTestNode!
|
||||
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
config: TestConfig = field(default_factory=TestConfig)
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
@@ -456,6 +470,7 @@ class ParsedPatch(HasYamlMetadata, Replaceable):
|
||||
description: str
|
||||
meta: Dict[str, Any]
|
||||
docs: Docs
|
||||
config: Dict[str, Any]
|
||||
|
||||
|
||||
# The parsed node update is only the 'patch', not the test. The test became a
|
||||
@@ -487,9 +502,6 @@ class ParsedMacro(UnparsedBaseNode, HasUniqueID):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
created_at: int = field(default_factory=lambda: int(time.time()))
|
||||
|
||||
def local_vars(self):
|
||||
return {}
|
||||
|
||||
def patch(self, patch: ParsedMacroPatch):
|
||||
self.patch_path: Optional[str] = patch.file_id
|
||||
self.description = patch.description
|
||||
@@ -692,7 +704,7 @@ class ParsedSourceDefinition(
|
||||
|
||||
@property
|
||||
def depends_on(self):
|
||||
return {'nodes': []}
|
||||
return DependsOn(macros=[], nodes=[])
|
||||
|
||||
@property
|
||||
def refs(self):
|
||||
|
||||
@@ -126,12 +126,17 @@ class HasYamlMetadata(dbtClassMixin):
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasColumnDocs, HasDocs, HasYamlMetadata):
|
||||
class HasConfig():
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasDocs, HasYamlMetadata):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedNodeUpdate(HasColumnTests, HasTests, HasYamlMetadata):
|
||||
class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasTests, HasYamlMetadata):
|
||||
quote_columns: Optional[bool] = None
|
||||
|
||||
|
||||
@@ -143,7 +148,7 @@ class MacroArgument(dbtClassMixin):
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedMacroUpdate(HasDocs, HasYamlMetadata):
|
||||
class UnparsedMacroUpdate(HasConfig, HasDocs, HasYamlMetadata):
|
||||
arguments: List[MacroArgument] = field(default_factory=list)
|
||||
|
||||
|
||||
@@ -261,6 +266,7 @@ class UnparsedSourceDefinition(dbtClassMixin, Replaceable):
|
||||
loaded_at_field: Optional[str] = None
|
||||
tables: List[UnparsedSourceTableDefinition] = field(default_factory=list)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def yaml_key(self) -> 'str':
|
||||
|
||||
@@ -83,6 +83,7 @@ class GitPackage(Package):
|
||||
class RegistryPackage(Package):
|
||||
package: str
|
||||
version: Union[RawVersion, List[RawVersion]]
|
||||
install_prerelease: Optional[bool] = False
|
||||
|
||||
def get_versions(self) -> List[str]:
|
||||
if isinstance(self.version, list):
|
||||
|
||||
@@ -116,6 +116,16 @@ class RPCDocsGenerateParameters(RPCParameters):
|
||||
state: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCBuildParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
models: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
defer: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RPCCliParameters(RPCParameters):
|
||||
cli: str
|
||||
@@ -186,6 +196,8 @@ class RPCRunOperationParameters(RPCParameters):
|
||||
class RPCSourceFreshnessParameters(RPCParameters):
|
||||
threads: Optional[int] = None
|
||||
select: Union[None, str, List[str]] = None
|
||||
exclude: Union[None, str, List[str]] = None
|
||||
selector: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -71,10 +71,14 @@ class RegistryUnpinnedPackage(
|
||||
RegistryPackageMixin, UnpinnedPackage[RegistryPinnedPackage]
|
||||
):
|
||||
def __init__(
|
||||
self, package: str, versions: List[semver.VersionSpecifier]
|
||||
self,
|
||||
package: str,
|
||||
versions: List[semver.VersionSpecifier],
|
||||
install_prerelease: bool
|
||||
) -> None:
|
||||
super().__init__(package)
|
||||
self.versions = versions
|
||||
self.install_prerelease = install_prerelease
|
||||
|
||||
def _check_in_index(self):
|
||||
index = registry.index_cached()
|
||||
@@ -91,13 +95,18 @@ class RegistryUnpinnedPackage(
|
||||
semver.VersionSpecifier.from_version_string(v)
|
||||
for v in raw_version
|
||||
]
|
||||
return cls(package=contract.package, versions=versions)
|
||||
return cls(
|
||||
package=contract.package,
|
||||
versions=versions,
|
||||
install_prerelease=contract.install_prerelease
|
||||
)
|
||||
|
||||
def incorporate(
|
||||
self, other: 'RegistryUnpinnedPackage'
|
||||
) -> 'RegistryUnpinnedPackage':
|
||||
return RegistryUnpinnedPackage(
|
||||
package=self.package,
|
||||
install_prerelease=self.install_prerelease,
|
||||
versions=self.versions + other.versions,
|
||||
)
|
||||
|
||||
@@ -111,12 +120,16 @@ class RegistryUnpinnedPackage(
|
||||
raise DependencyException(new_msg) from e
|
||||
|
||||
available = registry.get_available_versions(self.package)
|
||||
installable = semver.filter_installable(
|
||||
available,
|
||||
self.install_prerelease
|
||||
)
|
||||
|
||||
# for now, pick a version and then recurse. later on,
|
||||
# we'll probably want to traverse multiple options
|
||||
# so we can match packages. not going to make a difference
|
||||
# right now.
|
||||
target = semver.resolve_to_specific_version(range_, available)
|
||||
target = semver.resolve_to_specific_version(range_, installable)
|
||||
if not target:
|
||||
package_version_not_found(self.package, range_, available)
|
||||
package_version_not_found(self.package, range_, installable)
|
||||
return RegistryPinnedPackage(package=self.package, version=target)
|
||||
|
||||
@@ -710,7 +710,7 @@ def system_error(operation_name):
|
||||
raise_compiler_error(
|
||||
"dbt encountered an error when attempting to {}. "
|
||||
"If this error persists, please create an issue at: \n\n"
|
||||
"https://github.com/fishtown-analytics/dbt"
|
||||
"https://github.com/dbt-labs/dbt"
|
||||
.format(operation_name))
|
||||
|
||||
|
||||
|
||||
@@ -22,13 +22,11 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
from dbt.contracts.state import PreviousState
|
||||
from dbt.logger import GLOBAL_LOGGER as logger
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
|
||||
SELECTOR_GLOB = '*'
|
||||
@@ -381,7 +379,7 @@ class TestTypeSelectorMethod(SelectorMethod):
|
||||
class StateSelectorMethod(SelectorMethod):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.macros_were_modified: Optional[List[str]] = None
|
||||
self.modified_macros: Optional[List[str]] = None
|
||||
|
||||
def _macros_modified(self) -> List[str]:
|
||||
# we checked in the caller!
|
||||
@@ -394,44 +392,74 @@ class StateSelectorMethod(SelectorMethod):
|
||||
|
||||
modified = []
|
||||
for uid, macro in new_macros.items():
|
||||
name = f'{macro.package_name}.{macro.name}'
|
||||
if uid in old_macros:
|
||||
old_macro = old_macros[uid]
|
||||
if macro.macro_sql != old_macro.macro_sql:
|
||||
modified.append(f'{name} changed')
|
||||
modified.append(uid)
|
||||
else:
|
||||
modified.append(f'{name} added')
|
||||
modified.append(uid)
|
||||
|
||||
for uid, macro in old_macros.items():
|
||||
if uid not in new_macros:
|
||||
modified.append(f'{macro.package_name}.{macro.name} removed')
|
||||
modified.append(uid)
|
||||
|
||||
return modified[:3]
|
||||
return modified
|
||||
|
||||
def check_modified(
|
||||
self,
|
||||
old: Optional[SelectorTarget],
|
||||
new: SelectorTarget,
|
||||
def recursively_check_macros_modified(self, node):
|
||||
# check if there are any changes in macros the first time
|
||||
if self.modified_macros is None:
|
||||
self.modified_macros = self._macros_modified()
|
||||
|
||||
# loop through all macros that this node depends on
|
||||
for macro_uid in node.depends_on.macros:
|
||||
# is this macro one of the modified macros?
|
||||
if macro_uid in self.modified_macros:
|
||||
return True
|
||||
# if not, and this macro depends on other macros, keep looping
|
||||
macro = self.manifest.macros[macro_uid]
|
||||
if len(macro.depends_on.macros) > 0:
|
||||
return self.recursively_check_macros_modified(macro)
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
def check_modified(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
different_contents = not new.same_contents(old) # type: ignore
|
||||
upstream_macro_change = self.recursively_check_macros_modified(new)
|
||||
return different_contents or upstream_macro_change
|
||||
|
||||
def check_modified_body(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
if hasattr(new, "same_body"):
|
||||
return not new.same_body(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_modified_configs(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
if hasattr(new, "same_config"):
|
||||
return not new.same_config(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_modified_persisted_descriptions(
|
||||
self, old: Optional[SelectorTarget], new: SelectorTarget
|
||||
) -> bool:
|
||||
# check if there are any changes in macros, if so, log a warning the
|
||||
# first time
|
||||
if self.macros_were_modified is None:
|
||||
self.macros_were_modified = self._macros_modified()
|
||||
if self.macros_were_modified:
|
||||
log_str = ', '.join(self.macros_were_modified)
|
||||
logger.warning(warning_tag(
|
||||
f'During a state comparison, dbt detected a change in '
|
||||
f'macros. This will not be marked as a modification. Some '
|
||||
f'macros: {log_str}'
|
||||
))
|
||||
if hasattr(new, "same_persisted_description"):
|
||||
return not new.same_persisted_description(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
return not new.same_contents(old) # type: ignore
|
||||
|
||||
def check_new(
|
||||
self,
|
||||
old: Optional[SelectorTarget],
|
||||
new: SelectorTarget,
|
||||
def check_modified_relation(
|
||||
self, old: Optional[SelectorTarget], new: SelectorTarget
|
||||
) -> bool:
|
||||
if hasattr(new, "same_database_representation"):
|
||||
return not new.same_database_representation(old) # type: ignore
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_modified_macros(self, _, new: SelectorTarget) -> bool:
|
||||
return self.recursively_check_macros_modified(new)
|
||||
|
||||
def check_new(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||
return old is None
|
||||
|
||||
def search(
|
||||
@@ -443,8 +471,15 @@ class StateSelectorMethod(SelectorMethod):
|
||||
)
|
||||
|
||||
state_checks = {
|
||||
# it's new if there is no old version
|
||||
'new': lambda old, _: old is None,
|
||||
# use methods defined above to compare properties of old + new
|
||||
'modified': self.check_modified,
|
||||
'new': self.check_new,
|
||||
'modified.body': self.check_modified_body,
|
||||
'modified.configs': self.check_modified_configs,
|
||||
'modified.persisted_descriptions': self.check_modified_persisted_descriptions,
|
||||
'modified.relation': self.check_modified_relation,
|
||||
'modified.macros': self.check_modified_macros,
|
||||
}
|
||||
if selector in state_checks:
|
||||
checker = state_checks[selector]
|
||||
|
||||
@@ -311,3 +311,34 @@
|
||||
{{ config.set('sql_header', caller()) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}
|
||||
{{ return(adapter.dispatch('alter_relation_add_remove_columns')(relation, add_columns, remove_columns)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}
|
||||
|
||||
{% if add_columns is none %}
|
||||
{% set add_columns = [] %}
|
||||
{% endif %}
|
||||
{% if remove_columns is none %}
|
||||
{% set remove_columns = [] %}
|
||||
{% endif %}
|
||||
|
||||
{% set sql -%}
|
||||
|
||||
alter {{ relation.type }} {{ relation }}
|
||||
|
||||
{% for column in add_columns %}
|
||||
add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}
|
||||
{% endfor %}{{ ',' if remove_columns | length > 0 }}
|
||||
|
||||
{% for column in remove_columns %}
|
||||
drop column {{ column.name }}{{ ',' if not loop.last }}
|
||||
{% endfor %}
|
||||
|
||||
{%- endset -%}
|
||||
|
||||
{% do run_query(sql) %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
@@ -79,7 +79,7 @@
|
||||
(
|
||||
select {{ dest_cols_csv }}
|
||||
from {{ source }}
|
||||
);
|
||||
)
|
||||
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
|
||||
{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name="main") %}
|
||||
|
||||
{%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}
|
||||
{%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}
|
||||
|
||||
|
||||
@@ -5,6 +5,10 @@
|
||||
|
||||
{% set target_relation = this.incorporate(type='table') %}
|
||||
{% set existing_relation = load_relation(this) %}
|
||||
{% set tmp_relation = make_temp_relation(target_relation) %}
|
||||
{%- set full_refresh_mode = (should_full_refresh()) -%}
|
||||
|
||||
{% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}
|
||||
|
||||
{% set tmp_identifier = model['name'] + '__dbt_tmp' %}
|
||||
{% set backup_identifier = model['name'] + "__dbt_backup" %}
|
||||
@@ -28,9 +32,16 @@
|
||||
{{ run_hooks(pre_hooks, inside_transaction=True) }}
|
||||
|
||||
{% set to_drop = [] %}
|
||||
|
||||
{# -- first check whether we want to full refresh for source view or config reasons #}
|
||||
{% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}
|
||||
|
||||
{% if existing_relation is none %}
|
||||
{% set build_sql = create_table_as(False, target_relation, sql) %}
|
||||
{% elif existing_relation.is_view or should_full_refresh() %}
|
||||
{% elif trigger_full_refresh %}
|
||||
{#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}
|
||||
{% set tmp_identifier = model['name'] + '__dbt_tmp' %}
|
||||
{% set backup_identifier = model['name'] + '__dbt_backup' %}
|
||||
{% set intermediate_relation = existing_relation.incorporate(path={"identifier": tmp_identifier}) %}
|
||||
{% set backup_relation = existing_relation.incorporate(path={"identifier": backup_identifier}) %}
|
||||
|
||||
@@ -38,12 +49,13 @@
|
||||
{% set need_swap = true %}
|
||||
{% do to_drop.append(backup_relation) %}
|
||||
{% else %}
|
||||
{% set tmp_relation = make_temp_relation(target_relation) %}
|
||||
{% do run_query(create_table_as(True, tmp_relation, sql)) %}
|
||||
{% do adapter.expand_target_column_types(
|
||||
{% do run_query(create_table_as(True, tmp_relation, sql)) %}
|
||||
{% do adapter.expand_target_column_types(
|
||||
from_relation=tmp_relation,
|
||||
to_relation=target_relation) %}
|
||||
{% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}
|
||||
{% do process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}
|
||||
{% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% call statement("main") %}
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}
|
||||
|
||||
{% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}
|
||||
|
||||
{% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}
|
||||
{% do log(log_message) %}
|
||||
|
||||
{{ return(default) }}
|
||||
|
||||
{% else %}
|
||||
|
||||
{{ return(on_schema_change) }}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro diff_columns(source_columns, target_columns) %}
|
||||
|
||||
{% set result = [] %}
|
||||
{% set source_names = source_columns | map(attribute = 'column') | list %}
|
||||
{% set target_names = target_columns | map(attribute = 'column') | list %}
|
||||
|
||||
{# --check whether the name attribute exists in the target - this does not perform a data type check #}
|
||||
{% for sc in source_columns %}
|
||||
{% if sc.name not in target_names %}
|
||||
{{ result.append(sc) }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{{ return(result) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro diff_column_data_types(source_columns, target_columns) %}
|
||||
|
||||
{% set result = [] %}
|
||||
{% for sc in source_columns %}
|
||||
{% set tc = target_columns | selectattr("name", "equalto", sc.name) | list | first %}
|
||||
{% if tc %}
|
||||
{% if sc.data_type != tc.data_type %}
|
||||
{{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{{ return(result) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro check_for_schema_changes(source_relation, target_relation) %}
|
||||
|
||||
{% set schema_changed = False %}
|
||||
|
||||
{%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}
|
||||
{%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}
|
||||
{%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}
|
||||
{%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}
|
||||
|
||||
{% set new_target_types = diff_column_data_types(source_columns, target_columns) %}
|
||||
|
||||
{% if source_not_in_target != [] %}
|
||||
{% set schema_changed = True %}
|
||||
{% elif target_not_in_source != [] or new_target_types != [] %}
|
||||
{% set schema_changed = True %}
|
||||
{% elif new_target_types != [] %}
|
||||
{% set schema_changed = True %}
|
||||
{% endif %}
|
||||
|
||||
{% set changes_dict = {
|
||||
'schema_changed': schema_changed,
|
||||
'source_not_in_target': source_not_in_target,
|
||||
'target_not_in_source': target_not_in_source,
|
||||
'new_target_types': new_target_types
|
||||
} %}
|
||||
|
||||
{% set msg %}
|
||||
In {{ target_relation }}:
|
||||
Schema changed: {{ schema_changed }}
|
||||
Source columns not in target: {{ source_not_in_target }}
|
||||
Target columns not in source: {{ target_not_in_source }}
|
||||
New column types: {{ new_target_types }}
|
||||
{% endset %}
|
||||
|
||||
{% do log(msg) %}
|
||||
|
||||
{{ return(changes_dict) }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}
|
||||
|
||||
{%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}
|
||||
|
||||
{%- if on_schema_change == 'append_new_columns'-%}
|
||||
{%- if add_to_target_arr | length > 0 -%}
|
||||
{%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}
|
||||
{%- endif -%}
|
||||
|
||||
{% elif on_schema_change == 'sync_all_columns' %}
|
||||
{%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}
|
||||
{%- set new_target_types = schema_changes_dict['new_target_types'] -%}
|
||||
|
||||
{% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}
|
||||
{%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}
|
||||
{% endif %}
|
||||
|
||||
{% if new_target_types != [] %}
|
||||
{% for ntt in new_target_types %}
|
||||
{% set column_name = ntt['column_name'] %}
|
||||
{% set new_type = ntt['new_type'] %}
|
||||
{% do alter_column_type(target_relation, column_name, new_type) %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% set schema_change_message %}
|
||||
In {{ target_relation }}:
|
||||
Schema change approach: {{ on_schema_change }}
|
||||
Columns added: {{ add_to_target_arr }}
|
||||
Columns removed: {{ remove_from_target_arr }}
|
||||
Data types changed: {{ new_target_types }}
|
||||
{% endset %}
|
||||
|
||||
{% do log(schema_change_message) %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}
|
||||
|
||||
{% if on_schema_change != 'ignore' %}
|
||||
|
||||
{% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}
|
||||
|
||||
{% if schema_changes_dict['schema_changed'] %}
|
||||
|
||||
{% if on_schema_change == 'fail' %}
|
||||
|
||||
{% set fail_msg %}
|
||||
The source and target schemas on this incremental model are out of sync!
|
||||
They can be reconciled in several ways:
|
||||
- set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.
|
||||
- Re-run the incremental model with `full_refresh: True` to update the target schema.
|
||||
- update the schema manually and re-run the process.
|
||||
{% endset %}
|
||||
|
||||
{% do exceptions.raise_compiler_error(fail_msg) %}
|
||||
|
||||
{# -- unless we ignore, run the sync operation per the config #}
|
||||
{% else %}
|
||||
|
||||
{% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
@@ -21,7 +21,6 @@
|
||||
and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'
|
||||
then insert ({{ insert_cols_csv }})
|
||||
values ({{ insert_cols_csv }})
|
||||
;
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
|
||||
-- cleanup
|
||||
{% if old_relation is not none %}
|
||||
{{ adapter.rename_relation(target_relation, backup_relation) }}
|
||||
{{ adapter.rename_relation(old_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__handle_existing_table(full_refresh, old_relation) %}
|
||||
{{ log("Dropping relation " ~ old_relation ~ " because it is of type " ~ old_relation.type) }}
|
||||
{{ adapter.drop_relation(old_relation) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -19,7 +20,7 @@
|
||||
*/
|
||||
#}
|
||||
|
||||
{% macro create_or_replace_view(run_outside_transaction_hooks=True) %}
|
||||
{% macro create_or_replace_view() %}
|
||||
{%- set identifier = model['alias'] -%}
|
||||
|
||||
{%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}
|
||||
@@ -30,13 +31,7 @@
|
||||
identifier=identifier, schema=schema, database=database,
|
||||
type='view') -%}
|
||||
|
||||
{% if run_outside_transaction_hooks %}
|
||||
-- no transactions on BigQuery
|
||||
{{ run_hooks(pre_hooks, inside_transaction=False) }}
|
||||
{% endif %}
|
||||
|
||||
-- `BEGIN` happens here on Snowflake
|
||||
{{ run_hooks(pre_hooks, inside_transaction=True) }}
|
||||
{{ run_hooks(pre_hooks) }}
|
||||
|
||||
-- If there's a table with the same name and we weren't told to full refresh,
|
||||
-- that's an error. If we were told to full refresh, drop it. This behavior differs
|
||||
@@ -50,14 +45,7 @@
|
||||
{{ create_view_as(target_relation, sql) }}
|
||||
{%- endcall %}
|
||||
|
||||
{{ run_hooks(post_hooks, inside_transaction=True) }}
|
||||
|
||||
{{ adapter.commit() }}
|
||||
|
||||
{% if run_outside_transaction_hooks %}
|
||||
-- No transactions on BigQuery
|
||||
{{ run_hooks(post_hooks, inside_transaction=False) }}
|
||||
{% endif %}
|
||||
{{ run_hooks(post_hooks) }}
|
||||
|
||||
{{ return({'relations': [target_relation]}) }}
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
-- cleanup
|
||||
-- move the existing view out of the way
|
||||
{% if old_relation is not none %}
|
||||
{{ adapter.rename_relation(target_relation, backup_relation) }}
|
||||
{{ adapter.rename_relation(old_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
|
||||
@@ -1,16 +1,23 @@
|
||||
|
||||
{% macro default__test_relationships(model, column_name, to, field) %}
|
||||
|
||||
with child as (
|
||||
select * from {{ model }}
|
||||
where {{ column_name }} is not null
|
||||
),
|
||||
|
||||
parent as (
|
||||
select * from {{ to }}
|
||||
)
|
||||
|
||||
select
|
||||
child.{{ column_name }}
|
||||
|
||||
from {{ model }} as child
|
||||
|
||||
left join {{ to }} as parent
|
||||
from child
|
||||
left join parent
|
||||
on child.{{ column_name }} = parent.{{ field }}
|
||||
|
||||
where child.{{ column_name }} is not null
|
||||
and parent.{{ field }} is null
|
||||
where parent.{{ field }} is null
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -43,6 +43,15 @@ DEBUG_LOG_FORMAT = (
|
||||
'{record.message}'
|
||||
)
|
||||
|
||||
SECRET_ENV_PREFIX = 'DBT_ENV_SECRET_'
|
||||
|
||||
|
||||
def get_secret_env() -> List[str]:
|
||||
return [
|
||||
v for k, v in os.environ.items()
|
||||
if k.startswith(SECRET_ENV_PREFIX)
|
||||
]
|
||||
|
||||
|
||||
ExceptionInformation = str
|
||||
|
||||
@@ -333,6 +342,12 @@ class TimestampNamed(logbook.Processor):
|
||||
record.extra[self.name] = datetime.utcnow().isoformat()
|
||||
|
||||
|
||||
class ScrubSecrets(logbook.Processor):
|
||||
def process(self, record):
|
||||
for secret in get_secret_env():
|
||||
record.message = record.message.replace(secret, "*****")
|
||||
|
||||
|
||||
logger = logbook.Logger('dbt')
|
||||
# provide this for the cache, disabled by default
|
||||
CACHE_LOGGER = logbook.Logger('dbt.cache')
|
||||
@@ -473,7 +488,8 @@ class LogManager(logbook.NestedSetup):
|
||||
self._file_handler = DelayedFileHandler()
|
||||
self._relevel_processor = Relevel(allowed=['dbt', 'werkzeug'])
|
||||
self._state_processor = DbtProcessState('internal')
|
||||
# keep track of wheter we've already entered to decide if we should
|
||||
self._scrub_processor = ScrubSecrets()
|
||||
# keep track of whether we've already entered to decide if we should
|
||||
# be actually pushing. This allows us to log in main() and also
|
||||
# support entering dbt execution via handle_and_check.
|
||||
self._stack_depth = 0
|
||||
@@ -483,6 +499,7 @@ class LogManager(logbook.NestedSetup):
|
||||
self._file_handler,
|
||||
self._relevel_processor,
|
||||
self._state_processor,
|
||||
self._scrub_processor
|
||||
])
|
||||
|
||||
def push_application(self):
|
||||
|
||||
@@ -41,6 +41,7 @@ class DBTVersion(argparse.Action):
|
||||
"""This is very very similar to the builtin argparse._Version action,
|
||||
except it just calls dbt.version.get_version_information().
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
option_strings,
|
||||
version=None,
|
||||
@@ -755,23 +756,14 @@ def _build_test_subparser(subparsers, base_subparser):
|
||||
return sub
|
||||
|
||||
|
||||
def _build_source_snapshot_freshness_subparser(subparsers, base_subparser):
|
||||
def _build_source_freshness_subparser(subparsers, base_subparser):
|
||||
sub = subparsers.add_parser(
|
||||
'snapshot-freshness',
|
||||
'freshness',
|
||||
parents=[base_subparser],
|
||||
help='''
|
||||
Snapshots the current freshness of the project's sources
|
||||
''',
|
||||
)
|
||||
sub.add_argument(
|
||||
'-s',
|
||||
'--select',
|
||||
required=False,
|
||||
nargs='+',
|
||||
help='''
|
||||
Specify the sources to snapshot freshness
|
||||
''',
|
||||
dest='selected'
|
||||
aliases=['snapshot-freshness'],
|
||||
)
|
||||
sub.add_argument(
|
||||
'-o',
|
||||
@@ -792,9 +784,16 @@ def _build_source_snapshot_freshness_subparser(subparsers, base_subparser):
|
||||
)
|
||||
sub.set_defaults(
|
||||
cls=freshness_task.FreshnessTask,
|
||||
which='snapshot-freshness',
|
||||
rpc_method='snapshot-freshness',
|
||||
which='source-freshness',
|
||||
rpc_method='source-freshness',
|
||||
)
|
||||
_add_select_argument(
|
||||
sub,
|
||||
dest='select',
|
||||
metavar='SELECTOR',
|
||||
required=False,
|
||||
)
|
||||
_add_common_selector_arguments(sub)
|
||||
return sub
|
||||
|
||||
|
||||
@@ -1073,18 +1072,18 @@ def parse_args(args, cls=DBTArgumentParser):
|
||||
seed_sub = _build_seed_subparser(subs, base_subparser)
|
||||
# --threads, --no-version-check
|
||||
_add_common_arguments(run_sub, compile_sub, generate_sub, test_sub,
|
||||
rpc_sub, seed_sub, parse_sub)
|
||||
rpc_sub, seed_sub, parse_sub, build_sub)
|
||||
# --models, --exclude
|
||||
# list_sub sets up its own arguments.
|
||||
_add_selection_arguments(build_sub, run_sub, compile_sub, generate_sub, test_sub)
|
||||
_add_selection_arguments(snapshot_sub, seed_sub, models_name='select')
|
||||
# --defer
|
||||
_add_defer_argument(run_sub, test_sub)
|
||||
_add_defer_argument(run_sub, test_sub, build_sub)
|
||||
# --full-refresh
|
||||
_add_table_mutability_arguments(run_sub, compile_sub)
|
||||
_add_table_mutability_arguments(run_sub, compile_sub, build_sub)
|
||||
|
||||
_build_docs_serve_subparser(docs_subs, base_subparser)
|
||||
_build_source_snapshot_freshness_subparser(source_subs, base_subparser)
|
||||
_build_source_freshness_subparser(source_subs, base_subparser)
|
||||
_build_run_operation_subparser(subs, base_subparser)
|
||||
|
||||
if len(args) == 0:
|
||||
|
||||
@@ -256,9 +256,7 @@ class ConfiguredParser(
|
||||
parsed_node, self.root_project, self.manifest, config
|
||||
)
|
||||
|
||||
def render_with_context(
|
||||
self, parsed_node: IntermediateNode, config: ContextConfig
|
||||
) -> None:
|
||||
def render_with_context(self, parsed_node: IntermediateNode, config: ContextConfig):
|
||||
# Given the parsed node and a ContextConfig to use during parsing,
|
||||
# render the node's sql wtih macro capture enabled.
|
||||
# Note: this mutates the config object when config calls are rendered.
|
||||
@@ -273,11 +271,12 @@ class ConfiguredParser(
|
||||
get_rendered(
|
||||
parsed_node.raw_sql, context, parsed_node, capture_macros=True
|
||||
)
|
||||
return context
|
||||
|
||||
# This is taking the original config for the node, converting it to a dict,
|
||||
# updating the config with new config passed in, then re-creating the
|
||||
# config from the dict in the node.
|
||||
def update_parsed_node_config(
|
||||
def update_parsed_node_config_dict(
|
||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
||||
) -> None:
|
||||
# Overwrite node config
|
||||
@@ -294,28 +293,50 @@ class ConfiguredParser(
|
||||
self._update_node_schema(parsed_node, config_dict)
|
||||
self._update_node_alias(parsed_node, config_dict)
|
||||
|
||||
def update_parsed_node(
|
||||
self, parsed_node: IntermediateNode, config: ContextConfig
|
||||
def update_parsed_node_config(
|
||||
self, parsed_node: IntermediateNode, config: ContextConfig,
|
||||
context=None, patch_config_dict=None
|
||||
) -> None:
|
||||
"""Given the ContextConfig used for parsing and the parsed node,
|
||||
generate and set the true values to use, overriding the temporary parse
|
||||
values set in _build_intermediate_parsed_node.
|
||||
"""
|
||||
config_dict = config.build_config_dict()
|
||||
|
||||
# Set tags on node provided in config blocks
|
||||
# build_config_dict takes the config_call_dict in the ContextConfig object
|
||||
# and calls calculate_node_config to combine dbt_project configs and
|
||||
# config calls from SQL files
|
||||
config_dict = config.build_config_dict(patch_config_dict=patch_config_dict)
|
||||
|
||||
# Set tags on node provided in config blocks. Tags are additive, so even if
|
||||
# config has been built before, we don't have to reset tags in the parsed_node.
|
||||
model_tags = config_dict.get('tags', [])
|
||||
parsed_node.tags.extend(model_tags)
|
||||
for tag in model_tags:
|
||||
if tag not in parsed_node.tags:
|
||||
parsed_node.tags.append(tag)
|
||||
|
||||
# If we have meta in the config, copy to node level, for backwards
|
||||
# compatibility with earlier node-only config.
|
||||
if 'meta' in config_dict and config_dict['meta']:
|
||||
parsed_node.meta = config_dict['meta']
|
||||
|
||||
# unrendered_config is used to compare the original database/schema/alias
|
||||
# values and to handle 'same_config' and 'same_contents' calls
|
||||
parsed_node.unrendered_config = config.build_config_dict(
|
||||
rendered=False
|
||||
)
|
||||
|
||||
parsed_node.config_call_dict = config._config_call_dict
|
||||
|
||||
# do this once before we parse the node database/schema/alias, so
|
||||
# parsed_node.config is what it would be if they did nothing
|
||||
self.update_parsed_node_config(parsed_node, config_dict)
|
||||
self.update_parsed_node_config_dict(parsed_node, config_dict)
|
||||
# This updates the node database/schema/alias
|
||||
self.update_parsed_node_name(parsed_node, config_dict)
|
||||
|
||||
# tests don't have hooks
|
||||
if parsed_node.resource_type == NodeType.Test:
|
||||
return
|
||||
|
||||
# at this point, we've collected our hooks. Use the node context to
|
||||
# render each hook and collect refs/sources
|
||||
hooks = list(itertools.chain(parsed_node.config.pre_hook,
|
||||
@@ -323,9 +344,8 @@ class ConfiguredParser(
|
||||
# skip context rebuilding if there aren't any hooks
|
||||
if not hooks:
|
||||
return
|
||||
# we could cache the original context from parsing this node. Is that
|
||||
# worth the cost in memory/complexity?
|
||||
context = self._context_for(parsed_node, config)
|
||||
if not context:
|
||||
context = self._context_for(parsed_node, config)
|
||||
for hook in hooks:
|
||||
get_rendered(hook.sql, context, parsed_node, capture_macros=True)
|
||||
|
||||
@@ -357,8 +377,8 @@ class ConfiguredParser(
|
||||
self, node: IntermediateNode, config: ContextConfig
|
||||
) -> None:
|
||||
try:
|
||||
self.render_with_context(node, config)
|
||||
self.update_parsed_node(node, config)
|
||||
context = self.render_with_context(node, config)
|
||||
self.update_parsed_node_config(node, config, context=context)
|
||||
except ValidationError as exc:
|
||||
# we got a ValidationError - probably bad types in config()
|
||||
msg = validator_error_message(exc)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
import os
|
||||
import traceback
|
||||
from typing import (
|
||||
Dict, Optional, Mapping, Callable, Any, List, Type, Union
|
||||
Dict, Optional, Mapping, Callable, Any, List, Type, Union, Tuple
|
||||
)
|
||||
import time
|
||||
|
||||
@@ -59,13 +60,24 @@ from dbt.parser.sources import SourcePatcher
|
||||
from dbt.ui import warning_tag
|
||||
from dbt.version import __version__
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
from dbt.dataclass_schema import StrEnum, dbtClassMixin
|
||||
|
||||
PARTIAL_PARSE_FILE_NAME = 'partial_parse.msgpack'
|
||||
PARSING_STATE = DbtProcessState('parsing')
|
||||
DEFAULT_PARTIAL_PARSE = False
|
||||
|
||||
|
||||
class ReparseReason(StrEnum):
|
||||
version_mismatch = '01_version_mismatch'
|
||||
file_not_found = '02_file_not_found'
|
||||
vars_changed = '03_vars_changed'
|
||||
profile_changed = '04_profile_changed'
|
||||
deps_changed = '05_deps_changed'
|
||||
project_config_changed = '06_project_config_changed'
|
||||
load_file_failure = '07_load_file_failure'
|
||||
exception = '08_exception'
|
||||
|
||||
|
||||
# Part of saved performance info
|
||||
@dataclass
|
||||
class ParserInfo(dbtClassMixin):
|
||||
@@ -189,10 +201,6 @@ class ManifestLoader:
|
||||
# Read files creates a dictionary of projects to a dictionary
|
||||
# of parsers to lists of file strings. The file strings are
|
||||
# used to get the SourceFiles from the manifest files.
|
||||
# In the future the loaded files will be used to control
|
||||
# partial parsing, but right now we're just moving the
|
||||
# file loading out of the individual parsers and doing it
|
||||
# all at once.
|
||||
start_read_files = time.perf_counter()
|
||||
project_parser_files = {}
|
||||
for project in self.all_projects.values():
|
||||
@@ -204,15 +212,51 @@ class ManifestLoader:
|
||||
if self.saved_manifest is not None:
|
||||
partial_parsing = PartialParsing(self.saved_manifest, self.manifest.files)
|
||||
skip_parsing = partial_parsing.skip_parsing()
|
||||
if not skip_parsing:
|
||||
if skip_parsing:
|
||||
# nothing changed, so we don't need to generate project_parser_files
|
||||
self.manifest = self.saved_manifest
|
||||
else:
|
||||
# create child_map and parent_map
|
||||
self.saved_manifest.build_parent_and_child_maps()
|
||||
# files are different, we need to create a new set of
|
||||
# project_parser_files.
|
||||
project_parser_files = partial_parsing.get_parsing_files()
|
||||
self.partially_parsing = True
|
||||
try:
|
||||
project_parser_files = partial_parsing.get_parsing_files()
|
||||
self.partially_parsing = True
|
||||
self.manifest = self.saved_manifest
|
||||
except Exception:
|
||||
# pp_files should still be the full set and manifest is new manifest,
|
||||
# since get_parsing_files failed
|
||||
logger.info("Partial parsing enabled but an error occurred. "
|
||||
"Switching to a full re-parse.")
|
||||
|
||||
self.manifest = self.saved_manifest
|
||||
# Get traceback info
|
||||
tb_info = traceback.format_exc()
|
||||
formatted_lines = tb_info.splitlines()
|
||||
(_, line, method) = formatted_lines[-3].split(', ')
|
||||
exc_info = {
|
||||
"traceback": tb_info,
|
||||
"exception": formatted_lines[-1],
|
||||
"code": formatted_lines[-2],
|
||||
"location": f"{line} {method}",
|
||||
}
|
||||
|
||||
# get file info for local logs
|
||||
parse_file_type = None
|
||||
file_id = partial_parsing.processing_file
|
||||
if file_id and file_id in self.manifest.files:
|
||||
old_file = self.manifest.files[file_id]
|
||||
parse_file_type = old_file.parse_file_type
|
||||
logger.debug(f"Partial parsing exception processing file {file_id}")
|
||||
file_dict = old_file.to_dict()
|
||||
logger.debug(f"PP file: {file_dict}")
|
||||
exc_info['parse_file_type'] = parse_file_type
|
||||
logger.debug(f"PP exception info: {exc_info}")
|
||||
|
||||
# Send event
|
||||
if dbt.tracking.active_user is not None:
|
||||
exc_info['full_reparse_reason'] = ReparseReason.exception
|
||||
dbt.tracking.track_partial_parser(exc_info)
|
||||
|
||||
if self.manifest._parsing_info is None:
|
||||
self.manifest._parsing_info = ParsingInfo()
|
||||
@@ -379,10 +423,10 @@ class ManifestLoader:
|
||||
if not self.partially_parsing and HookParser in parser_types:
|
||||
hook_parser = HookParser(project, self.manifest, self.root_project)
|
||||
path = hook_parser.get_path()
|
||||
file_block = FileBlock(
|
||||
load_source_file(path, ParseFileType.Hook, project.project_name)
|
||||
)
|
||||
hook_parser.parse_file(file_block)
|
||||
file = load_source_file(path, ParseFileType.Hook, project.project_name)
|
||||
if file:
|
||||
file_block = FileBlock(file)
|
||||
hook_parser.parse_file(file_block)
|
||||
|
||||
# Store the performance info
|
||||
elapsed = time.perf_counter() - start_timer
|
||||
@@ -434,6 +478,12 @@ class ManifestLoader:
|
||||
path = os.path.join(self.root_project.target_path,
|
||||
PARTIAL_PARSE_FILE_NAME)
|
||||
try:
|
||||
# This shouldn't be necessary, but we have gotten bug reports (#3757) of the
|
||||
# saved manifest not matching the code version.
|
||||
if self.manifest.metadata.dbt_version != __version__:
|
||||
logger.debug("Manifest metadata did not contain correct version. "
|
||||
f"Contained '{self.manifest.metadata.dbt_version}' instead.")
|
||||
self.manifest.metadata.dbt_version = __version__
|
||||
manifest_msgpack = self.manifest.to_msgpack()
|
||||
make_directory(os.path.dirname(path))
|
||||
with open(path, 'wb') as fp:
|
||||
@@ -441,24 +491,31 @@ class ManifestLoader:
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def matching_parse_results(self, manifest: Manifest) -> bool:
|
||||
def is_partial_parsable(self, manifest: Manifest) -> Tuple[bool, Optional[str]]:
|
||||
"""Compare the global hashes of the read-in parse results' values to
|
||||
the known ones, and return if it is ok to re-use the results.
|
||||
"""
|
||||
valid = True
|
||||
reparse_reason = None
|
||||
|
||||
if manifest.metadata.dbt_version != __version__:
|
||||
logger.info("Unable to do partial parsing because of a dbt version mismatch")
|
||||
return False # If the version is wrong, the other checks might not work
|
||||
# #3757 log both versions because of reports of invalid cases of mismatch.
|
||||
logger.info("Unable to do partial parsing because of a dbt version mismatch. "
|
||||
f"Saved manifest version: {manifest.metadata.dbt_version}. "
|
||||
f"Current version: {__version__}.")
|
||||
# If the version is wrong, the other checks might not work
|
||||
return False, ReparseReason.version_mismatch
|
||||
if self.manifest.state_check.vars_hash != manifest.state_check.vars_hash:
|
||||
logger.info("Unable to do partial parsing because config vars, "
|
||||
"config profile, or config target have changed")
|
||||
valid = False
|
||||
reparse_reason = ReparseReason.vars_changed
|
||||
if self.manifest.state_check.profile_hash != manifest.state_check.profile_hash:
|
||||
# Note: This should be made more granular. We shouldn't need to invalidate
|
||||
# partial parsing if a non-used profile section has changed.
|
||||
logger.info("Unable to do partial parsing because profile has changed")
|
||||
valid = False
|
||||
reparse_reason = ReparseReason.profile_changed
|
||||
|
||||
missing_keys = {
|
||||
k for k in self.manifest.state_check.project_hashes
|
||||
@@ -467,6 +524,7 @@ class ManifestLoader:
|
||||
if missing_keys:
|
||||
logger.info("Unable to do partial parsing because a project dependency has been added")
|
||||
valid = False
|
||||
reparse_reason = ReparseReason.deps_changed
|
||||
|
||||
for key, new_value in self.manifest.state_check.project_hashes.items():
|
||||
if key in manifest.state_check.project_hashes:
|
||||
@@ -475,7 +533,8 @@ class ManifestLoader:
|
||||
logger.info("Unable to do partial parsing because "
|
||||
"a project config has changed")
|
||||
valid = False
|
||||
return valid
|
||||
reparse_reason = ReparseReason.project_config_changed
|
||||
return valid, reparse_reason
|
||||
|
||||
def _partial_parse_enabled(self):
|
||||
# if the CLI is set, follow that
|
||||
@@ -494,6 +553,8 @@ class ManifestLoader:
|
||||
path = os.path.join(self.root_project.target_path,
|
||||
PARTIAL_PARSE_FILE_NAME)
|
||||
|
||||
reparse_reason = None
|
||||
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
with open(path, 'rb') as fp:
|
||||
@@ -502,7 +563,8 @@ class ManifestLoader:
|
||||
# keep this check inside the try/except in case something about
|
||||
# the file has changed in weird ways, perhaps due to being a
|
||||
# different version of dbt
|
||||
if self.matching_parse_results(manifest):
|
||||
is_partial_parseable, reparse_reason = self.is_partial_parsable(manifest)
|
||||
if is_partial_parseable:
|
||||
return manifest
|
||||
except Exception as exc:
|
||||
logger.debug(
|
||||
@@ -510,8 +572,13 @@ class ManifestLoader:
|
||||
.format(path, exc),
|
||||
exc_info=True
|
||||
)
|
||||
reparse_reason = ReparseReason.load_file_failure
|
||||
else:
|
||||
logger.info(f"Unable to do partial parsing because {path} not found")
|
||||
reparse_reason = ReparseReason.file_not_found
|
||||
|
||||
# this event is only fired if a full reparse is needed
|
||||
dbt.tracking.track_partial_parser({'full_reparse_reason': reparse_reason})
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from dbt.context.context_config import ContextConfig
|
||||
from dbt.contracts.graph.parsed import ParsedModelNode
|
||||
import dbt.flags as flags
|
||||
import dbt.tracking
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.parser.base import SimpleSQLParser
|
||||
from dbt.parser.search import FileBlock
|
||||
import dbt.tracking as tracking
|
||||
from dbt import utils
|
||||
from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore
|
||||
import itertools
|
||||
import random
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
class ModelParser(SimpleSQLParser[ParsedModelNode]):
|
||||
@@ -40,9 +40,9 @@ class ModelParser(SimpleSQLParser[ParsedModelNode]):
|
||||
experimentally_parsed: Dict[str, List[Any]] = py_extract_from_source(node.raw_sql)
|
||||
|
||||
# second config format
|
||||
config_calls: List[Dict[str, str]] = []
|
||||
config_call_dict: Dict[str, Any] = {}
|
||||
for c in experimentally_parsed['configs']:
|
||||
config_calls.append({c[0]: c[1]})
|
||||
ContextConfig._add_config_call(config_call_dict, {c[0]: c[1]})
|
||||
|
||||
# format sources TODO change extractor to match this type
|
||||
source_calls: List[List[str]] = []
|
||||
@@ -64,22 +64,15 @@ class ModelParser(SimpleSQLParser[ParsedModelNode]):
|
||||
if isinstance(experimentally_parsed, Exception):
|
||||
result += ["01_experimental_parser_cannot_parse"]
|
||||
else:
|
||||
# rearrange existing configs to match:
|
||||
real_configs: List[Tuple[str, Any]] = list(
|
||||
itertools.chain.from_iterable(
|
||||
map(lambda x: x.items(), config._config_calls)
|
||||
)
|
||||
)
|
||||
|
||||
# look for false positive configs
|
||||
for c in experimentally_parsed['configs']:
|
||||
if c not in real_configs:
|
||||
for k in config_call_dict.keys():
|
||||
if k not in config._config_call_dict:
|
||||
result += ["02_false_positive_config_value"]
|
||||
break
|
||||
|
||||
# look for missed configs
|
||||
for c in real_configs:
|
||||
if c not in experimentally_parsed['configs']:
|
||||
for k in config._config_call_dict.keys():
|
||||
if k not in config_call_dict:
|
||||
result += ["03_missed_config_value"]
|
||||
break
|
||||
|
||||
@@ -116,33 +109,35 @@ class ModelParser(SimpleSQLParser[ParsedModelNode]):
|
||||
# no false positives or misses, we can expect the number model
|
||||
# files parseable by the experimental parser to match our internal
|
||||
# testing.
|
||||
tracking.track_experimental_parser_sample({
|
||||
"project_id": self.root_project.hashed_name(),
|
||||
"file_id": utils.get_hash(node),
|
||||
"status": result
|
||||
})
|
||||
if dbt.tracking.active_user is not None: # None in some tests
|
||||
tracking.track_experimental_parser_sample({
|
||||
"project_id": self.root_project.hashed_name(),
|
||||
"file_id": utils.get_hash(node),
|
||||
"status": result
|
||||
})
|
||||
|
||||
# if the --use-experimental-parser flag was set, and the experimental parser succeeded
|
||||
elif not isinstance(experimentally_parsed, Exception):
|
||||
# since it doesn't need python jinja, fit the refs, sources, and configs
|
||||
# into the node. Down the line the rest of the node will be updated with
|
||||
# this information. (e.g. depends_on etc.)
|
||||
config._config_calls = config_calls
|
||||
config._config_call_dict = config_call_dict
|
||||
|
||||
# this uses the updated config to set all the right things in the node.
|
||||
# if there are hooks present, it WILL render jinja. Will need to change
|
||||
# when the experimental parser supports hooks
|
||||
self.update_parsed_node(node, config)
|
||||
self.update_parsed_node_config(node, config)
|
||||
|
||||
# update the unrendered config with values from the file.
|
||||
# values from yaml files are in there already
|
||||
node.unrendered_config.update(dict(experimentally_parsed['configs']))
|
||||
|
||||
# set refs, sources, and configs on the node object
|
||||
# set refs and sources on the node object
|
||||
node.refs += experimentally_parsed['refs']
|
||||
node.sources += experimentally_parsed['sources']
|
||||
for configv in experimentally_parsed['configs']:
|
||||
node.config[configv[0]] = configv[1]
|
||||
|
||||
# configs don't need to be merged into the node
|
||||
# setting them in config._config_call_dict is sufficient
|
||||
|
||||
self.manifest._parsing_info.static_analysis_parsed_path_count += 1
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ class PartialParsing:
|
||||
self.deleted_manifest = Manifest()
|
||||
self.macro_child_map: Dict[str, List[str]] = {}
|
||||
self.build_file_diff()
|
||||
self.processing_file = None
|
||||
|
||||
def skip_parsing(self):
|
||||
return (
|
||||
@@ -118,16 +119,21 @@ class PartialParsing:
|
||||
# Need to add new files first, because changes in schema files
|
||||
# might refer to them
|
||||
for file_id in self.file_diff['added']:
|
||||
self.processing_file = file_id
|
||||
self.add_to_saved(file_id)
|
||||
# Need to process schema files next, because the dictionaries
|
||||
# need to be in place for handling SQL file changes
|
||||
for file_id in self.file_diff['changed_schema_files']:
|
||||
self.processing_file = file_id
|
||||
self.change_schema_file(file_id)
|
||||
for file_id in self.file_diff['deleted_schema_files']:
|
||||
self.processing_file = file_id
|
||||
self.delete_schema_file(file_id)
|
||||
for file_id in self.file_diff['deleted']:
|
||||
self.processing_file = file_id
|
||||
self.delete_from_saved(file_id)
|
||||
for file_id in self.file_diff['changed']:
|
||||
self.processing_file = file_id
|
||||
self.update_in_saved(file_id)
|
||||
return self.project_parser_files
|
||||
|
||||
@@ -147,6 +153,18 @@ class PartialParsing:
|
||||
file_id not in self.file_diff['deleted']):
|
||||
self.project_parser_files[project_name][parser_name].append(file_id)
|
||||
|
||||
def already_scheduled_for_parsing(self, source_file):
|
||||
file_id = source_file.file_id
|
||||
project_name = source_file.project_name
|
||||
if project_name not in self.project_parser_files:
|
||||
return False
|
||||
parser_name = parse_file_type_to_parser[source_file.parse_file_type]
|
||||
if parser_name not in self.project_parser_files[project_name]:
|
||||
return False
|
||||
if file_id not in self.project_parser_files[project_name][parser_name]:
|
||||
return False
|
||||
return True
|
||||
|
||||
# Add new files, including schema files
|
||||
def add_to_saved(self, file_id):
|
||||
# add file object to saved manifest.files
|
||||
@@ -211,6 +229,9 @@ class PartialParsing:
|
||||
# Updated schema files should have been processed already.
|
||||
def update_mssat_in_saved(self, new_source_file, old_source_file):
|
||||
|
||||
if self.already_scheduled_for_parsing(old_source_file):
|
||||
return
|
||||
|
||||
# These files only have one node.
|
||||
unique_id = old_source_file.nodes[0]
|
||||
|
||||
@@ -251,12 +272,16 @@ class PartialParsing:
|
||||
schema_file.node_patches.remove(unique_id)
|
||||
|
||||
def update_macro_in_saved(self, new_source_file, old_source_file):
|
||||
if self.already_scheduled_for_parsing(old_source_file):
|
||||
return
|
||||
self.handle_macro_file_links(old_source_file, follow_references=True)
|
||||
file_id = new_source_file.file_id
|
||||
self.saved_files[file_id] = new_source_file
|
||||
self.add_to_pp_files(new_source_file)
|
||||
|
||||
def update_doc_in_saved(self, new_source_file, old_source_file):
|
||||
if self.already_scheduled_for_parsing(old_source_file):
|
||||
return
|
||||
self.delete_doc_node(old_source_file)
|
||||
self.saved_files[new_source_file.file_id] = new_source_file
|
||||
self.add_to_pp_files(new_source_file)
|
||||
@@ -343,7 +368,8 @@ class PartialParsing:
|
||||
for unique_id in macros:
|
||||
if unique_id not in self.saved_manifest.macros:
|
||||
# This happens when a macro has already been removed
|
||||
source_file.macros.remove(unique_id)
|
||||
if unique_id in source_file.macros:
|
||||
source_file.macros.remove(unique_id)
|
||||
continue
|
||||
|
||||
base_macro = self.saved_manifest.macros.pop(unique_id)
|
||||
@@ -369,7 +395,9 @@ class PartialParsing:
|
||||
macro_patch = self.get_schema_element(macro_patches, base_macro.name)
|
||||
self.delete_schema_macro_patch(schema_file, macro_patch)
|
||||
self.merge_patch(schema_file, 'macros', macro_patch)
|
||||
source_file.macros.remove(unique_id)
|
||||
# The macro may have already been removed by handling macro children
|
||||
if unique_id in source_file.macros:
|
||||
source_file.macros.remove(unique_id)
|
||||
|
||||
# similar to schedule_nodes_for_parsing but doesn't do sources and exposures
|
||||
# and handles schema tests
|
||||
@@ -385,12 +413,21 @@ class PartialParsing:
|
||||
patch_list = []
|
||||
if key in schema_file.dict_from_yaml:
|
||||
patch_list = schema_file.dict_from_yaml[key]
|
||||
node_patch = self.get_schema_element(patch_list, name)
|
||||
if node_patch:
|
||||
self.delete_schema_mssa_links(schema_file, key, node_patch)
|
||||
self.merge_patch(schema_file, key, node_patch)
|
||||
if unique_id in schema_file.node_patches:
|
||||
schema_file.node_patches.remove(unique_id)
|
||||
patch = self.get_schema_element(patch_list, name)
|
||||
if patch:
|
||||
if key in ['models', 'seeds', 'snapshots']:
|
||||
self.delete_schema_mssa_links(schema_file, key, patch)
|
||||
self.merge_patch(schema_file, key, patch)
|
||||
if unique_id in schema_file.node_patches:
|
||||
schema_file.node_patches.remove(unique_id)
|
||||
elif key == 'sources':
|
||||
# re-schedule source
|
||||
if 'overrides' in patch:
|
||||
# This is a source patch; need to re-parse orig source
|
||||
self.remove_source_override_target(patch)
|
||||
self.delete_schema_source(schema_file, patch)
|
||||
self.remove_tests(schema_file, 'sources', patch['name'])
|
||||
self.merge_patch(schema_file, 'sources', patch)
|
||||
else:
|
||||
file_id = node.file_id
|
||||
if file_id in self.saved_files and file_id not in self.file_diff['deleted']:
|
||||
@@ -426,7 +463,13 @@ class PartialParsing:
|
||||
new_schema_file = self.new_files[file_id]
|
||||
saved_yaml_dict = saved_schema_file.dict_from_yaml
|
||||
new_yaml_dict = new_schema_file.dict_from_yaml
|
||||
saved_schema_file.pp_dict = {"version": saved_yaml_dict['version']}
|
||||
if 'version' in new_yaml_dict:
|
||||
# despite the fact that this goes in the saved_schema_file, it
|
||||
# should represent the new yaml dictionary, and should produce
|
||||
# an error if the updated yaml file doesn't have a version
|
||||
saved_schema_file.pp_dict = {"version": new_yaml_dict['version']}
|
||||
else:
|
||||
saved_schema_file.pp_dict = {}
|
||||
self.handle_schema_file_changes(saved_schema_file, saved_yaml_dict, new_yaml_dict)
|
||||
|
||||
# copy from new schema_file to saved_schema_file to preserve references
|
||||
@@ -611,8 +654,9 @@ class PartialParsing:
|
||||
def remove_tests(self, schema_file, dict_key, name):
|
||||
tests = schema_file.get_tests(dict_key, name)
|
||||
for test_unique_id in tests:
|
||||
node = self.saved_manifest.nodes.pop(test_unique_id)
|
||||
self.deleted_manifest.nodes[test_unique_id] = node
|
||||
if test_unique_id in self.saved_manifest.nodes:
|
||||
node = self.saved_manifest.nodes.pop(test_unique_id)
|
||||
self.deleted_manifest.nodes[test_unique_id] = node
|
||||
schema_file.remove_tests(dict_key, name)
|
||||
|
||||
def delete_schema_source(self, schema_file, source_dict):
|
||||
@@ -634,19 +678,17 @@ class PartialParsing:
|
||||
|
||||
def delete_schema_macro_patch(self, schema_file, macro):
|
||||
# This is just macro patches that need to be reapplied
|
||||
for unique_id in schema_file.macro_patches:
|
||||
parts = unique_id.split('.')
|
||||
macro_name = parts[-1]
|
||||
if macro_name == macro['name']:
|
||||
macro_unique_id = unique_id
|
||||
break
|
||||
macro_unique_id = None
|
||||
if macro['name'] in schema_file.macro_patches:
|
||||
macro_unique_id = schema_file.macro_patches[macro['name']]
|
||||
del schema_file.macro_patches[macro['name']]
|
||||
if macro_unique_id and macro_unique_id in self.saved_manifest.macros:
|
||||
macro = self.saved_manifest.macros.pop(macro_unique_id)
|
||||
self.deleted_manifest.macros[macro_unique_id] = macro
|
||||
macro_file_id = macro.file_id
|
||||
self.add_to_pp_files(self.saved_files[macro_file_id])
|
||||
if macro_unique_id in schema_file.macro_patches:
|
||||
schema_file.macro_patches.remove(macro_unique_id)
|
||||
if macro_file_id in self.new_files:
|
||||
self.saved_files[macro_file_id] = self.new_files[macro_file_id]
|
||||
self.add_to_pp_files(self.saved_files[macro_file_id])
|
||||
|
||||
# exposures are created only from schema files, so just delete
|
||||
# the exposure.
|
||||
|
||||
@@ -6,12 +6,13 @@ from dbt.contracts.files import (
|
||||
from dbt.parser.schemas import yaml_from_file, schema_file_keys, check_format_version
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.parser.search import FilesystemSearcher
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# This loads the files contents and creates the SourceFile object
|
||||
def load_source_file(
|
||||
path: FilePath, parse_file_type: ParseFileType,
|
||||
project_name: str) -> AnySourceFile:
|
||||
project_name: str) -> Optional[AnySourceFile]:
|
||||
file_contents = load_file_contents(path.absolute_path, strip=False)
|
||||
checksum = FileHash.from_contents(file_contents)
|
||||
sf_cls = SchemaSourceFile if parse_file_type == ParseFileType.Schema else SourceFile
|
||||
@@ -20,8 +21,11 @@ def load_source_file(
|
||||
source_file.contents = file_contents.strip()
|
||||
if parse_file_type == ParseFileType.Schema and source_file.contents:
|
||||
dfy = yaml_from_file(source_file)
|
||||
validate_yaml(source_file.path.original_file_path, dfy)
|
||||
source_file.dfy = dfy
|
||||
if dfy:
|
||||
validate_yaml(source_file.path.original_file_path, dfy)
|
||||
source_file.dfy = dfy
|
||||
else:
|
||||
source_file = None
|
||||
return source_file
|
||||
|
||||
|
||||
@@ -76,8 +80,10 @@ def get_source_files(project, paths, extension, parse_file_type):
|
||||
if parse_file_type == ParseFileType.Seed:
|
||||
fb_list.append(load_seed_source_file(fp, project.project_name))
|
||||
else:
|
||||
fb_list.append(load_source_file(
|
||||
fp, parse_file_type, project.project_name))
|
||||
file = load_source_file(fp, parse_file_type, project.project_name)
|
||||
# only append the list if it has contents. added to fix #3568
|
||||
if file:
|
||||
fb_list.append(file)
|
||||
return fb_list
|
||||
|
||||
|
||||
|
||||
@@ -190,9 +190,9 @@ class TestBuilder(Generic[Testable]):
|
||||
r'(?P<test_name>([a-zA-Z_][0-9a-zA-Z_]*))'
|
||||
)
|
||||
# kwargs representing test configs
|
||||
MODIFIER_ARGS = (
|
||||
CONFIG_ARGS = (
|
||||
'severity', 'tags', 'enabled', 'where', 'limit', 'warn_if', 'error_if',
|
||||
'fail_calc', 'store_failures'
|
||||
'fail_calc', 'store_failures', 'meta', 'database', 'schema', 'alias',
|
||||
)
|
||||
|
||||
def __init__(
|
||||
@@ -224,13 +224,24 @@ class TestBuilder(Generic[Testable]):
|
||||
groups = match.groupdict()
|
||||
self.name: str = groups['test_name']
|
||||
self.namespace: str = groups['test_namespace']
|
||||
self.modifiers: Dict[str, Any] = {}
|
||||
for key in self.MODIFIER_ARGS:
|
||||
self.config: Dict[str, Any] = {}
|
||||
|
||||
for key in self.CONFIG_ARGS:
|
||||
value = self.args.pop(key, None)
|
||||
# 'modifier' config could be either top level arg or in config
|
||||
if value and 'config' in self.args and key in self.args['config']:
|
||||
raise_compiler_error(
|
||||
'Test cannot have the same key at the top-level and in config'
|
||||
)
|
||||
if not value and 'config' in self.args:
|
||||
value = self.args['config'].pop(key, None)
|
||||
if isinstance(value, str):
|
||||
value = get_rendered(value, render_ctx, native=True)
|
||||
if value is not None:
|
||||
self.modifiers[key] = value
|
||||
self.config[key] = value
|
||||
|
||||
if 'config' in self.args:
|
||||
del self.args['config']
|
||||
|
||||
if self.namespace is not None:
|
||||
self.package_name = self.namespace
|
||||
@@ -240,8 +251,8 @@ class TestBuilder(Generic[Testable]):
|
||||
self.fqn_name: str = fqn_name
|
||||
|
||||
# use hashed name as alias if too long
|
||||
if compiled_name != fqn_name:
|
||||
self.modifiers['alias'] = compiled_name
|
||||
if compiled_name != fqn_name and 'alias' not in self.config:
|
||||
self.config['alias'] = compiled_name
|
||||
|
||||
def _bad_type(self) -> TypeError:
|
||||
return TypeError('invalid target type "{}"'.format(type(self.target)))
|
||||
@@ -282,15 +293,15 @@ class TestBuilder(Generic[Testable]):
|
||||
|
||||
@property
|
||||
def enabled(self) -> Optional[bool]:
|
||||
return self.modifiers.get('enabled')
|
||||
return self.config.get('enabled')
|
||||
|
||||
@property
|
||||
def alias(self) -> Optional[str]:
|
||||
return self.modifiers.get('alias')
|
||||
return self.config.get('alias')
|
||||
|
||||
@property
|
||||
def severity(self) -> Optional[str]:
|
||||
sev = self.modifiers.get('severity')
|
||||
sev = self.config.get('severity')
|
||||
if sev:
|
||||
return sev.upper()
|
||||
else:
|
||||
@@ -298,30 +309,72 @@ class TestBuilder(Generic[Testable]):
|
||||
|
||||
@property
|
||||
def store_failures(self) -> Optional[bool]:
|
||||
return self.modifiers.get('store_failures')
|
||||
return self.config.get('store_failures')
|
||||
|
||||
@property
|
||||
def where(self) -> Optional[str]:
|
||||
return self.modifiers.get('where')
|
||||
return self.config.get('where')
|
||||
|
||||
@property
|
||||
def limit(self) -> Optional[int]:
|
||||
return self.modifiers.get('limit')
|
||||
return self.config.get('limit')
|
||||
|
||||
@property
|
||||
def warn_if(self) -> Optional[str]:
|
||||
return self.modifiers.get('warn_if')
|
||||
return self.config.get('warn_if')
|
||||
|
||||
@property
|
||||
def error_if(self) -> Optional[str]:
|
||||
return self.modifiers.get('error_if')
|
||||
return self.config.get('error_if')
|
||||
|
||||
@property
|
||||
def fail_calc(self) -> Optional[str]:
|
||||
return self.modifiers.get('fail_calc')
|
||||
return self.config.get('fail_calc')
|
||||
|
||||
@property
|
||||
def meta(self) -> Optional[dict]:
|
||||
return self.config.get('meta')
|
||||
|
||||
@property
|
||||
def database(self) -> Optional[str]:
|
||||
return self.config.get('database')
|
||||
|
||||
@property
|
||||
def schema(self) -> Optional[str]:
|
||||
return self.config.get('schema')
|
||||
|
||||
def get_static_config(self):
|
||||
config = {}
|
||||
if self.alias is not None:
|
||||
config['alias'] = self.alias
|
||||
if self.severity is not None:
|
||||
config['severity'] = self.severity
|
||||
if self.enabled is not None:
|
||||
config['enabled'] = self.enabled
|
||||
if self.where is not None:
|
||||
config['where'] = self.where
|
||||
if self.limit is not None:
|
||||
config['limit'] = self.limit
|
||||
if self.warn_if is not None:
|
||||
config['warn_if'] = self.warn_if
|
||||
if self.error_if is not None:
|
||||
config['error_id'] = self.error_if
|
||||
if self.fail_calc is not None:
|
||||
config['fail_calc'] = self.fail_calc
|
||||
if self.store_failures is not None:
|
||||
config['store_failures'] = self.store_failures
|
||||
if self.meta is not None:
|
||||
config['meta'] = self.meta
|
||||
if self.database is not None:
|
||||
config['database'] = self.database
|
||||
if self.schema is not None:
|
||||
config['schema'] = self.schema
|
||||
if self.alias is not None:
|
||||
config['alias'] = self.alias
|
||||
return config
|
||||
|
||||
def tags(self) -> List[str]:
|
||||
tags = self.modifiers.get('tags', [])
|
||||
tags = self.config.get('tags', [])
|
||||
if isinstance(tags, str):
|
||||
tags = [tags]
|
||||
if not isinstance(tags, list):
|
||||
@@ -360,7 +413,7 @@ class TestBuilder(Generic[Testable]):
|
||||
else str(value)
|
||||
)
|
||||
for key, value
|
||||
in self.modifiers.items()
|
||||
in self.config.items()
|
||||
])
|
||||
if configs:
|
||||
return f"{{{{ config({configs}) }}}}"
|
||||
|
||||
@@ -22,8 +22,7 @@ from dbt.context.providers import (
|
||||
generate_parse_exposure, generate_test_context
|
||||
)
|
||||
from dbt.context.macro_resolver import MacroResolver
|
||||
from dbt.contracts.files import FileHash
|
||||
from dbt.contracts.graph.manifest import SchemaSourceFile
|
||||
from dbt.contracts.files import FileHash, SchemaSourceFile
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedNodePatch,
|
||||
ColumnInfo,
|
||||
@@ -47,7 +46,10 @@ from dbt.contracts.graph.unparsed import (
|
||||
from dbt.exceptions import (
|
||||
validator_error_message, JSONValidationException,
|
||||
raise_invalid_schema_yml_version, ValidationException,
|
||||
CompilationException,
|
||||
CompilationException, raise_duplicate_patch_name,
|
||||
raise_duplicate_macro_patch_name, InternalException,
|
||||
raise_duplicate_source_patch_name,
|
||||
warn_or_error,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.parser.base import SimpleParser
|
||||
@@ -171,15 +173,15 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
self.project.config_version == 2
|
||||
)
|
||||
if all_v_2:
|
||||
ctx = generate_schema_yml(
|
||||
self.render_ctx = generate_schema_yml(
|
||||
self.root_project, self.project.project_name
|
||||
)
|
||||
else:
|
||||
ctx = generate_target_context(
|
||||
self.render_ctx = generate_target_context(
|
||||
self.root_project, self.root_project.cli_vars
|
||||
)
|
||||
|
||||
self.raw_renderer = SchemaYamlRenderer(ctx)
|
||||
self.raw_renderer = SchemaYamlRenderer(self.render_ctx)
|
||||
|
||||
internal_package_names = get_adapter_package_names(
|
||||
self.root_project.credentials.type
|
||||
@@ -287,17 +289,13 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
tags: List[str],
|
||||
column_name: Optional[str],
|
||||
) -> ParsedSchemaTestNode:
|
||||
|
||||
render_ctx = generate_target_context(
|
||||
self.root_project, self.root_project.cli_vars
|
||||
)
|
||||
try:
|
||||
builder = TestBuilder(
|
||||
test=test,
|
||||
target=target,
|
||||
column_name=column_name,
|
||||
package_name=target.package_name,
|
||||
render_ctx=render_ctx,
|
||||
render_ctx=self.render_ctx,
|
||||
)
|
||||
except CompilationException as exc:
|
||||
context = _trimmed(str(target))
|
||||
@@ -318,8 +316,8 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
# is not necessarily this package's name
|
||||
fqn = self.get_fqn(fqn_path, builder.fqn_name)
|
||||
|
||||
# this is the config that is used in render_update
|
||||
config = self.initial_config(fqn)
|
||||
# this is the ContextConfig that is used in render_update
|
||||
config: ContextConfig = self.initial_config(fqn)
|
||||
|
||||
metadata = {
|
||||
'namespace': builder.namespace,
|
||||
@@ -360,37 +358,10 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
node.depends_on.add_macro(macro_unique_id)
|
||||
if (macro_unique_id in
|
||||
['macro.dbt.test_not_null', 'macro.dbt.test_unique']):
|
||||
self.update_parsed_node(node, config)
|
||||
# manually set configs
|
||||
# note: this does not respect generate_alias_name() macro
|
||||
if builder.alias is not None:
|
||||
node.unrendered_config['alias'] = builder.alias
|
||||
node.config['alias'] = builder.alias
|
||||
node.alias = builder.alias
|
||||
if builder.severity is not None:
|
||||
node.unrendered_config['severity'] = builder.severity
|
||||
node.config['severity'] = builder.severity
|
||||
if builder.enabled is not None:
|
||||
node.unrendered_config['enabled'] = builder.enabled
|
||||
node.config['enabled'] = builder.enabled
|
||||
if builder.where is not None:
|
||||
node.unrendered_config['where'] = builder.where
|
||||
node.config['where'] = builder.where
|
||||
if builder.limit is not None:
|
||||
node.unrendered_config['limit'] = builder.limit
|
||||
node.config['limit'] = builder.limit
|
||||
if builder.warn_if is not None:
|
||||
node.unrendered_config['warn_if'] = builder.warn_if
|
||||
node.config['warn_if'] = builder.warn_if
|
||||
if builder.error_if is not None:
|
||||
node.unrendered_config['error_if'] = builder.error_if
|
||||
node.config['error_if'] = builder.error_if
|
||||
if builder.fail_calc is not None:
|
||||
node.unrendered_config['fail_calc'] = builder.fail_calc
|
||||
node.config['fail_calc'] = builder.fail_calc
|
||||
if builder.store_failures is not None:
|
||||
node.unrendered_config['store_failures'] = builder.store_failures
|
||||
node.config['store_failures'] = builder.store_failures
|
||||
config_call_dict = builder.get_static_config()
|
||||
config._config_call_dict = config_call_dict
|
||||
# This sets the config from dbt_project
|
||||
self.update_parsed_node_config(node, config)
|
||||
# source node tests are processed at patch_source time
|
||||
if isinstance(builder.target, UnpatchedSourceDefinition):
|
||||
sources = [builder.target.fqn[-2], builder.target.fqn[-1]]
|
||||
@@ -410,7 +381,7 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
get_rendered(
|
||||
node.raw_sql, context, node, capture_macros=True
|
||||
)
|
||||
self.update_parsed_node(node, config)
|
||||
self.update_parsed_node_config(node, config)
|
||||
except ValidationError as exc:
|
||||
# we got a ValidationError - probably bad types in config()
|
||||
msg = validator_error_message(exc)
|
||||
@@ -678,7 +649,14 @@ class SourceParser(YamlDocsReader):
|
||||
if is_override:
|
||||
data['path'] = self.yaml.path.original_file_path
|
||||
patch = self._target_from_dict(SourcePatch, data)
|
||||
self.manifest.add_source_patch(self.yaml.file, patch)
|
||||
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||
source_file = self.yaml.file
|
||||
# source patches must be unique
|
||||
key = (patch.overrides, patch.name)
|
||||
if key in self.manifest.source_patches:
|
||||
raise_duplicate_source_patch_name(patch, self.manifest.source_patches[key])
|
||||
self.manifest.source_patches[key] = patch
|
||||
source_file.source_patches.append(key)
|
||||
else:
|
||||
source = self._target_from_dict(UnparsedSourceDefinition, data)
|
||||
self.add_source_definitions(source)
|
||||
@@ -775,6 +753,9 @@ class NonSourceParser(YamlDocsReader, Generic[NonSourceTarget, Parsed]):
|
||||
# target_type: UnparsedNodeUpdate, UnparsedAnalysisUpdate,
|
||||
# or UnparsedMacroUpdate
|
||||
self._target_type().validate(data)
|
||||
if self.key != 'macros':
|
||||
# macros don't have the 'config' key support yet
|
||||
self.normalize_meta_attribute(data, path)
|
||||
node = self._target_type().from_dict(data)
|
||||
except (ValidationError, JSONValidationException) as exc:
|
||||
msg = error_context(path, self.key, data, exc)
|
||||
@@ -782,6 +763,33 @@ class NonSourceParser(YamlDocsReader, Generic[NonSourceTarget, Parsed]):
|
||||
else:
|
||||
yield node
|
||||
|
||||
# We want to raise an error if 'meta' is in two places, and move 'meta'
|
||||
# from toplevel to config if necessary
|
||||
def normalize_meta_attribute(self, data, path):
|
||||
if 'meta' in data:
|
||||
if 'config' in data and 'meta' in data['config']:
|
||||
raise CompilationException(f"""
|
||||
In {path}: found meta dictionary in 'config' dictionary and as top-level key.
|
||||
Remove the top-level key and define it under 'config' dictionary only.
|
||||
""".strip())
|
||||
else:
|
||||
if 'config' not in data:
|
||||
data['config'] = {}
|
||||
data['config']['meta'] = data.pop('meta')
|
||||
|
||||
def patch_node_config(self, node, patch):
|
||||
# Get the ContextConfig that's used in calculating the config
|
||||
# This must match the model resource_type that's being patched
|
||||
config = ContextConfig(
|
||||
self.schema_parser.root_project,
|
||||
node.fqn,
|
||||
node.resource_type,
|
||||
self.schema_parser.project.project_name,
|
||||
)
|
||||
# We need to re-apply the config_call_dict after the patch config
|
||||
config._config_call_dict = node.config_call_dict
|
||||
self.schema_parser.update_parsed_node_config(node, config, patch_config_dict=patch.config)
|
||||
|
||||
|
||||
class NodePatchParser(
|
||||
NonSourceParser[NodeTarget, ParsedNodePatch],
|
||||
@@ -790,6 +798,9 @@ class NodePatchParser(
|
||||
def parse_patch(
|
||||
self, block: TargetBlock[NodeTarget], refs: ParserRef
|
||||
) -> None:
|
||||
# We're not passing the ParsedNodePatch around anymore, so we
|
||||
# could possibly skip creating one. Leaving here for now for
|
||||
# code consistency.
|
||||
patch = ParsedNodePatch(
|
||||
name=block.target.name,
|
||||
original_file_path=block.target.original_file_path,
|
||||
@@ -799,8 +810,35 @@ class NodePatchParser(
|
||||
columns=refs.column_info,
|
||||
meta=block.target.meta,
|
||||
docs=block.target.docs,
|
||||
config=block.target.config,
|
||||
)
|
||||
self.manifest.add_patch(self.yaml.file, patch)
|
||||
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||
source_file: SchemaSourceFile = self.yaml.file
|
||||
if patch.yaml_key in ['models', 'seeds', 'snapshots']:
|
||||
unique_id = self.manifest.ref_lookup.get_unique_id(patch.name, None)
|
||||
elif patch.yaml_key == 'analyses':
|
||||
unique_id = self.manifest.analysis_lookup.get_unique_id(patch.name, None)
|
||||
else:
|
||||
raise InternalException(
|
||||
f'Unexpected yaml_key {patch.yaml_key} for patch in '
|
||||
f'file {source_file.path.original_file_path}'
|
||||
)
|
||||
if unique_id is None:
|
||||
# This will usually happen when a node is disabled
|
||||
return
|
||||
|
||||
# patches can't be overwritten
|
||||
node = self.manifest.nodes.get(unique_id)
|
||||
if node:
|
||||
if node.patch_path:
|
||||
package_name, existing_file_path = node.patch_path.split('://')
|
||||
raise_duplicate_patch_name(patch, existing_file_path)
|
||||
source_file.append_patch(patch.yaml_key, unique_id)
|
||||
# If this patch has config changes, re-calculate the node config
|
||||
# with the patch config
|
||||
if patch.config:
|
||||
self.patch_node_config(node, patch)
|
||||
node.patch(patch)
|
||||
|
||||
|
||||
class TestablePatchParser(NodePatchParser[UnparsedNodeUpdate]):
|
||||
@@ -838,8 +876,24 @@ class MacroPatchParser(NonSourceParser[UnparsedMacroUpdate, ParsedMacroPatch]):
|
||||
description=block.target.description,
|
||||
meta=block.target.meta,
|
||||
docs=block.target.docs,
|
||||
config=block.target.config,
|
||||
)
|
||||
self.manifest.add_macro_patch(self.yaml.file, patch)
|
||||
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||
source_file = self.yaml.file
|
||||
# macros are fully namespaced
|
||||
unique_id = f'macro.{patch.package_name}.{patch.name}'
|
||||
macro = self.manifest.macros.get(unique_id)
|
||||
if not macro:
|
||||
warn_or_error(
|
||||
f'WARNING: Found patch for macro "{patch.name}" '
|
||||
f'which was not found'
|
||||
)
|
||||
return
|
||||
if macro.patch_path:
|
||||
package_name, existing_file_path = macro.patch_path.split('://')
|
||||
raise_duplicate_macro_patch_name(patch, existing_file_path)
|
||||
source_file.macro_patches[patch.name] = unique_id
|
||||
macro.patch(patch)
|
||||
|
||||
|
||||
class ExposureParser(YamlReader):
|
||||
|
||||
@@ -286,7 +286,7 @@ class SourcePatcher:
|
||||
)
|
||||
|
||||
return generator.calculate_node_config(
|
||||
config_calls=[],
|
||||
config_call_dict={},
|
||||
fqn=fqn,
|
||||
resource_type=NodeType.Source,
|
||||
project_name=project_name,
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
from packaging import version as packaging_version
|
||||
|
||||
from dbt.exceptions import VersionsNotCompatibleException
|
||||
import dbt.utils
|
||||
@@ -125,12 +128,26 @@ class VersionSpecifier(VersionSpecification):
|
||||
if self.is_unbounded or other.is_unbounded:
|
||||
return 0
|
||||
|
||||
for key in ['major', 'minor', 'patch']:
|
||||
comparison = int(getattr(self, key)) - int(getattr(other, key))
|
||||
|
||||
if comparison > 0:
|
||||
for key in ['major', 'minor', 'patch', 'prerelease']:
|
||||
(a, b) = (getattr(self, key), getattr(other, key))
|
||||
if key == 'prerelease':
|
||||
if a is None and b is None:
|
||||
continue
|
||||
if a is None:
|
||||
if self.matcher == Matchers.LESS_THAN:
|
||||
# If 'a' is not a pre-release but 'b' is, and b must be
|
||||
# less than a, return -1 to prevent installations of
|
||||
# pre-releases with greater base version than a
|
||||
# maximum specified non-pre-release version.
|
||||
return -1
|
||||
# Otherwise, stable releases are considered greater than
|
||||
# pre-release
|
||||
return 1
|
||||
if b is None:
|
||||
return -1
|
||||
if packaging_version.parse(a) > packaging_version.parse(b):
|
||||
return 1
|
||||
elif comparison < 0:
|
||||
elif packaging_version.parse(a) < packaging_version.parse(b):
|
||||
return -1
|
||||
|
||||
equal = ((self.matcher == Matchers.GREATER_THAN_OR_EQUAL and
|
||||
@@ -408,10 +425,23 @@ def resolve_to_specific_version(requested_range, available_versions):
|
||||
version = VersionSpecifier.from_version_string(version_string)
|
||||
|
||||
if(versions_compatible(version,
|
||||
requested_range.start,
|
||||
requested_range.end) and
|
||||
requested_range.start, requested_range.end) and
|
||||
(max_version is None or max_version.compare(version) < 0)):
|
||||
max_version = version
|
||||
max_version_string = version_string
|
||||
|
||||
return max_version_string
|
||||
|
||||
|
||||
def filter_installable(
|
||||
versions: List[str],
|
||||
install_prerelease: bool
|
||||
) -> List[str]:
|
||||
if install_prerelease:
|
||||
return versions
|
||||
installable = []
|
||||
for version_string in versions:
|
||||
version = VersionSpecifier.from_version_string(version_string)
|
||||
if not version.prerelease:
|
||||
installable.append(version_string)
|
||||
return installable
|
||||
|
||||
@@ -158,7 +158,7 @@ class ConfiguredTask(BaseTask):
|
||||
|
||||
|
||||
INTERNAL_ERROR_STRING = """This is an error in dbt. Please try again. If \
|
||||
the error persists, open an issue at https://github.com/fishtown-analytics/dbt
|
||||
the error persists, open an issue at https://github.com/dbt-labs/dbt
|
||||
""".strip()
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
from .compile import CompileTask
|
||||
|
||||
from .run import ModelRunner as run_model_runner
|
||||
from .run import RunTask, ModelRunner as run_model_runner
|
||||
from .snapshot import SnapshotRunner as snapshot_model_runner
|
||||
from .seed import SeedRunner as seed_runner
|
||||
from .test import TestRunner as test_runner
|
||||
@@ -10,7 +8,7 @@ from dbt.exceptions import InternalException
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
|
||||
class BuildTask(CompileTask):
|
||||
class BuildTask(RunTask):
|
||||
"""The Build task processes all assets of a given process and attempts to 'build'
|
||||
them in an opinionated fashion. Every resource type outlined in RUNNER_MAP
|
||||
will be processed by the mapped runner class.
|
||||
|
||||
@@ -19,7 +19,7 @@ from dbt.exceptions import RuntimeException, InternalException
|
||||
from dbt.logger import print_timestamped_line
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
from dbt.graph import NodeSelector, SelectionSpec, parse_difference
|
||||
from dbt.graph import ResourceTypeSelector, SelectionSpec, parse_difference
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition
|
||||
|
||||
|
||||
@@ -117,7 +117,7 @@ class FreshnessRunner(BaseRunner):
|
||||
return self.node
|
||||
|
||||
|
||||
class FreshnessSelector(NodeSelector):
|
||||
class FreshnessSelector(ResourceTypeSelector):
|
||||
def node_is_match(self, node):
|
||||
if not super().node_is_match(node):
|
||||
return False
|
||||
@@ -137,11 +137,16 @@ class FreshnessTask(GraphRunnableTask):
|
||||
return False
|
||||
|
||||
def get_selection_spec(self) -> SelectionSpec:
|
||||
include = [
|
||||
'source:{}'.format(s)
|
||||
for s in (self.args.selected or ['*'])
|
||||
]
|
||||
spec = parse_difference(include, None)
|
||||
"""Generates a selection spec from task arguments to use when
|
||||
processing graph. A SelectionSpec describes what nodes to select
|
||||
when creating queue from graph of nodes.
|
||||
"""
|
||||
if self.args.selector_name:
|
||||
# use pre-defined selector (--selector) to create selection spec
|
||||
spec = self.config.get_selector(self.args.selector_name)
|
||||
else:
|
||||
# use --select and --exclude args to create selection spec
|
||||
spec = parse_difference(self.args.select, self.args.exclude)
|
||||
return spec
|
||||
|
||||
def get_node_selector(self):
|
||||
@@ -153,6 +158,7 @@ class FreshnessTask(GraphRunnableTask):
|
||||
graph=self.graph,
|
||||
manifest=self.manifest,
|
||||
previous_state=self.previous_state,
|
||||
resource_types=[NodeType.Source]
|
||||
)
|
||||
|
||||
def get_runner_type(self, _):
|
||||
|
||||
@@ -87,9 +87,12 @@ def print_hook_end_line(
|
||||
|
||||
|
||||
def print_skip_line(
|
||||
model, schema: str, relation: str, index: int, num_models: int
|
||||
node, schema: str, relation: str, index: int, num_models: int
|
||||
) -> None:
|
||||
msg = 'SKIP relation {}.{}'.format(schema, relation)
|
||||
if node.resource_type in NodeType.refable():
|
||||
msg = f'SKIP relation {schema}.{relation}'
|
||||
else:
|
||||
msg = f'SKIP {node.resource_type} {node.name}'
|
||||
print_fancy_output_line(
|
||||
msg, ui.yellow('SKIP'), logger.info, index, num_models)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ from dbt.contracts.rpc import (
|
||||
RPCSnapshotParameters,
|
||||
RPCSourceFreshnessParameters,
|
||||
RPCListParameters,
|
||||
RPCBuildParameters,
|
||||
)
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.rpc.method import (
|
||||
@@ -37,6 +38,7 @@ from dbt.task.seed import SeedTask
|
||||
from dbt.task.snapshot import SnapshotTask
|
||||
from dbt.task.test import TestTask
|
||||
from dbt.task.list import ListTask
|
||||
from dbt.task.build import BuildTask
|
||||
|
||||
from .base import RPCTask
|
||||
from .cli import HasCLI
|
||||
@@ -228,15 +230,24 @@ class RemoteSourceFreshnessTask(
|
||||
RPCCommandTask[RPCSourceFreshnessParameters],
|
||||
FreshnessTask
|
||||
):
|
||||
METHOD_NAME = 'snapshot-freshness'
|
||||
METHOD_NAME = 'source-freshness'
|
||||
|
||||
def set_args(self, params: RPCSourceFreshnessParameters) -> None:
|
||||
self.args.selected = self._listify(params.select)
|
||||
self.args.select = self._listify(params.select)
|
||||
self.args.exclude = self._listify(params.exclude)
|
||||
self.args.selector_name = params.selector
|
||||
if params.threads is not None:
|
||||
self.args.threads = params.threads
|
||||
self.args.output = None
|
||||
|
||||
|
||||
class RemoteSourceSnapshotFreshnessTask(
|
||||
RemoteSourceFreshnessTask
|
||||
):
|
||||
""" Deprecated task method name, aliases to `source-freshness` """
|
||||
METHOD_NAME = 'snapshot-freshness'
|
||||
|
||||
|
||||
# this is a weird and special method.
|
||||
class GetManifest(
|
||||
RemoteManifestMethod[GetManifestParameters, GetManifestResult]
|
||||
@@ -296,3 +307,22 @@ class RemoteListTask(
|
||||
output=[json.loads(x) for x in results],
|
||||
logs=None
|
||||
)
|
||||
|
||||
|
||||
class RemoteBuildProjectTask(RPCCommandTask[RPCBuildParameters], BuildTask):
|
||||
METHOD_NAME = 'build'
|
||||
|
||||
def set_args(self, params: RPCBuildParameters) -> None:
|
||||
self.args.models = self._listify(params.models)
|
||||
self.args.exclude = self._listify(params.exclude)
|
||||
self.args.selector_name = params.selector
|
||||
|
||||
if params.threads is not None:
|
||||
self.args.threads = params.threads
|
||||
if params.defer is None:
|
||||
self.args.defer = flags.DEFER_MODE
|
||||
else:
|
||||
self.args.defer = params.defer
|
||||
|
||||
self.args.state = state_path(params.state)
|
||||
self.set_previous_state()
|
||||
|
||||
@@ -31,6 +31,7 @@ DEPRECATION_WARN_SPEC = 'iglu:com.dbt/deprecation_warn/jsonschema/1-0-0'
|
||||
LOAD_ALL_TIMING_SPEC = 'iglu:com.dbt/load_all_timing/jsonschema/1-0-3'
|
||||
RESOURCE_COUNTS = 'iglu:com.dbt/resource_counts/jsonschema/1-0-0'
|
||||
EXPERIMENTAL_PARSER = 'iglu:com.dbt/experimental_parser/jsonschema/1-0-0'
|
||||
PARTIAL_PARSER = 'iglu:com.dbt/partial_parser/jsonschema/1-0-1'
|
||||
DBT_INVOCATION_ENV = 'DBT_INVOCATION_ENV'
|
||||
|
||||
|
||||
@@ -131,7 +132,7 @@ class User:
|
||||
# will change in every dbt invocation until the user points to a
|
||||
# profile dir file which contains a valid profiles.yml file.
|
||||
#
|
||||
# See: https://github.com/fishtown-analytics/dbt/issues/1645
|
||||
# See: https://github.com/dbt-labs/dbt/issues/1645
|
||||
|
||||
user = {"id": str(uuid.uuid4())}
|
||||
|
||||
@@ -426,7 +427,7 @@ def track_invalid_invocation(
|
||||
def track_experimental_parser_sample(options):
|
||||
context = [SelfDescribingJson(EXPERIMENTAL_PARSER, options)]
|
||||
assert active_user is not None, \
|
||||
'Cannot track project loading time when active user is None'
|
||||
'Cannot track experimental parser info when active user is None'
|
||||
|
||||
track(
|
||||
active_user,
|
||||
@@ -437,9 +438,28 @@ def track_experimental_parser_sample(options):
|
||||
)
|
||||
|
||||
|
||||
def track_partial_parser(options):
|
||||
context = [SelfDescribingJson(PARTIAL_PARSER, options)]
|
||||
assert active_user is not None, \
|
||||
'Cannot track partial parser info when active user is None'
|
||||
|
||||
track(
|
||||
active_user,
|
||||
category='dbt',
|
||||
action='partial_parser',
|
||||
label=active_user.invocation_id,
|
||||
context=context
|
||||
)
|
||||
|
||||
|
||||
def flush():
|
||||
logger.debug("Flushing usage events")
|
||||
tracker.flush()
|
||||
try:
|
||||
tracker.flush()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"An error was encountered while trying to flush usage events"
|
||||
)
|
||||
|
||||
|
||||
def disable_tracking():
|
||||
|
||||
@@ -6,6 +6,7 @@ import decimal
|
||||
import functools
|
||||
import hashlib
|
||||
import itertools
|
||||
import jinja2
|
||||
import json
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
@@ -306,14 +307,16 @@ def timestring() -> str:
|
||||
|
||||
class JSONEncoder(json.JSONEncoder):
|
||||
"""A 'custom' json encoder that does normal json encoder things, but also
|
||||
handles `Decimal`s. Naturally, this can lose precision because they get
|
||||
converted to floats.
|
||||
handles `Decimal`s. and `Undefined`s. Decimals can lose precision because
|
||||
they get converted to floats. Undefined's are serialized to an empty string
|
||||
"""
|
||||
def default(self, obj):
|
||||
if isinstance(obj, DECIMALS):
|
||||
return float(obj)
|
||||
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, jinja2.Undefined):
|
||||
return ""
|
||||
if hasattr(obj, 'to_dict'):
|
||||
# if we have a to_dict we should try to serialize the result of
|
||||
# that!
|
||||
|
||||
@@ -96,5 +96,5 @@ def _get_dbt_plugins_info():
|
||||
yield plugin_name, mod.version
|
||||
|
||||
|
||||
__version__ = '0.21.0a1'
|
||||
__version__ = '0.21.0b2'
|
||||
installed = get_installed_version()
|
||||
|
||||
@@ -284,12 +284,12 @@ def parse_args(argv=None):
|
||||
parser.add_argument('adapter')
|
||||
parser.add_argument('--title-case', '-t', default=None)
|
||||
parser.add_argument('--dependency', action='append')
|
||||
parser.add_argument('--dbt-core-version', default='0.21.0a1')
|
||||
parser.add_argument('--dbt-core-version', default='0.21.0b2')
|
||||
parser.add_argument('--email')
|
||||
parser.add_argument('--author')
|
||||
parser.add_argument('--url')
|
||||
parser.add_argument('--sql', action='store_true')
|
||||
parser.add_argument('--package-version', default='0.21.0a1')
|
||||
parser.add_argument('--package-version', default='0.21.0b2')
|
||||
parser.add_argument('--project-version', default='1.0')
|
||||
parser.add_argument(
|
||||
'--no-dependency', action='store_false', dest='set_dependency'
|
||||
|
||||
@@ -24,7 +24,7 @@ def read(fname):
|
||||
|
||||
|
||||
package_name = "dbt-core"
|
||||
package_version = "0.21.0a1"
|
||||
package_version = "0.21.0b2"
|
||||
description = """dbt (data build tool) is a command line tool that helps \
|
||||
analysts and engineers transform data in their warehouse more effectively"""
|
||||
|
||||
@@ -34,9 +34,9 @@ setup(
|
||||
version=package_version,
|
||||
description=description,
|
||||
long_description=description,
|
||||
author="Fishtown Analytics",
|
||||
author_email="info@fishtownanalytics.com",
|
||||
url="https://github.com/fishtown-analytics/dbt",
|
||||
author="dbt Labs",
|
||||
author_email="info@dbtlabs.com",
|
||||
url="https://github.com/dbt-labs/dbt",
|
||||
packages=find_namespace_packages(include=['dbt', 'dbt.*']),
|
||||
include_package_data = True,
|
||||
test_suite='test',
|
||||
@@ -63,7 +63,7 @@ setup(
|
||||
'networkx>=2.3,<3',
|
||||
'packaging~=20.9',
|
||||
'sqlparse>=0.2.3,<0.4',
|
||||
'dbt-extractor==0.2.0',
|
||||
'dbt-extractor==0.4.0',
|
||||
'typing-extensions>=3.7.4,<3.11',
|
||||
'werkzeug>=1,<3',
|
||||
# the following are all to match snowflake-connector-python
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE_IMAGE="python:3.8-slim-buster"
|
||||
ARG BASE_IMAGE=python:3.8-slim-bullseye
|
||||
|
||||
FROM $BASE_IMAGE
|
||||
ARG BASE_REQUIREMENTS_SRC_PATH
|
||||
|
||||
75
docker/requirements/requirements.0.21.0b1.txt
Normal file
75
docker/requirements/requirements.0.21.0b1.txt
Normal file
@@ -0,0 +1,75 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==21.2.0
|
||||
azure-common==1.1.27
|
||||
azure-core==1.16.0
|
||||
azure-storage-blob==12.8.1
|
||||
Babel==2.9.1
|
||||
boto3==1.18.12
|
||||
botocore==1.21.12
|
||||
cachetools==4.2.2
|
||||
certifi==2021.5.30
|
||||
cffi==1.14.6
|
||||
chardet==4.0.0
|
||||
charset-normalizer==2.0.4
|
||||
colorama==0.4.4
|
||||
cryptography==3.4.7
|
||||
google-api-core==1.31.1
|
||||
google-auth==1.34.0
|
||||
google-cloud-bigquery==2.23.2
|
||||
google-cloud-core==1.7.2
|
||||
google-crc32c==1.1.2
|
||||
google-resumable-media==1.3.3
|
||||
googleapis-common-protos==1.53.0
|
||||
grpcio==1.39.0
|
||||
hologram==0.0.14
|
||||
idna==3.2
|
||||
importlib-metadata==4.6.3
|
||||
isodate==0.6.0
|
||||
jeepney==0.7.1
|
||||
Jinja2==2.11.3
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.8.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==2.0.1
|
||||
mashumaro==2.5
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msgpack==1.0.2
|
||||
msrest==0.6.21
|
||||
networkx==2.6.2
|
||||
oauthlib==3.1.1
|
||||
oscrypto==1.2.1
|
||||
packaging==20.9
|
||||
parsedatetime==2.6
|
||||
proto-plus==1.19.0
|
||||
protobuf==3.17.3
|
||||
psycopg2-binary==2.9.1
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.10.1
|
||||
PyJWT==2.1.0
|
||||
pyOpenSSL==20.0.1
|
||||
pyparsing==2.4.7
|
||||
pyrsistent==0.18.0
|
||||
python-dateutil==2.8.2
|
||||
python-slugify==5.0.2
|
||||
pytimeparse==1.1.8
|
||||
pytz==2021.1
|
||||
PyYAML==5.4.1
|
||||
requests==2.26.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.7.2
|
||||
s3transfer==0.5.0
|
||||
SecretStorage==3.3.1
|
||||
six==1.16.0
|
||||
snowflake-connector-python==2.5.1
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.10.0.0
|
||||
urllib3==1.26.6
|
||||
Werkzeug==2.0.1
|
||||
zipp==3.5.0
|
||||
75
docker/requirements/requirements.0.21.0b2.txt
Normal file
75
docker/requirements/requirements.0.21.0b2.txt
Normal file
@@ -0,0 +1,75 @@
|
||||
agate==1.6.1
|
||||
asn1crypto==1.4.0
|
||||
attrs==21.2.0
|
||||
azure-common==1.1.27
|
||||
azure-core==1.17.0
|
||||
azure-storage-blob==12.8.1
|
||||
Babel==2.9.1
|
||||
boto3==1.18.25
|
||||
botocore==1.21.25
|
||||
cachetools==4.2.2
|
||||
certifi==2021.5.30
|
||||
cffi==1.14.6
|
||||
chardet==4.0.0
|
||||
charset-normalizer==2.0.4
|
||||
colorama==0.4.4
|
||||
cryptography==3.4.7
|
||||
google-api-core==1.31.2
|
||||
google-auth==1.35.0
|
||||
google-cloud-bigquery==2.24.1
|
||||
google-cloud-core==1.7.2
|
||||
google-crc32c==1.1.2
|
||||
google-resumable-media==2.0.0
|
||||
googleapis-common-protos==1.53.0
|
||||
grpcio==1.39.0
|
||||
hologram==0.0.14
|
||||
idna==3.2
|
||||
importlib-metadata==4.6.4
|
||||
isodate==0.6.0
|
||||
jeepney==0.7.1
|
||||
Jinja2==2.11.3
|
||||
jmespath==0.10.0
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.1.1
|
||||
keyring==21.8.0
|
||||
leather==0.3.3
|
||||
Logbook==1.5.3
|
||||
MarkupSafe==2.0.1
|
||||
mashumaro==2.5
|
||||
minimal-snowplow-tracker==0.0.2
|
||||
msgpack==1.0.2
|
||||
msrest==0.6.21
|
||||
networkx==2.6.2
|
||||
oauthlib==3.1.1
|
||||
oscrypto==1.2.1
|
||||
packaging==20.9
|
||||
parsedatetime==2.6
|
||||
proto-plus==1.19.0
|
||||
protobuf==3.17.3
|
||||
psycopg2-binary==2.9.1
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycparser==2.20
|
||||
pycryptodomex==3.10.1
|
||||
PyJWT==2.1.0
|
||||
pyOpenSSL==20.0.1
|
||||
pyparsing==2.4.7
|
||||
pyrsistent==0.18.0
|
||||
python-dateutil==2.8.2
|
||||
python-slugify==5.0.2
|
||||
pytimeparse==1.1.8
|
||||
pytz==2021.1
|
||||
PyYAML==5.4.1
|
||||
requests==2.26.0
|
||||
requests-oauthlib==1.3.0
|
||||
rsa==4.7.2
|
||||
s3transfer==0.5.0
|
||||
SecretStorage==3.3.1
|
||||
six==1.16.0
|
||||
snowflake-connector-python==2.5.1
|
||||
sqlparse==0.3.1
|
||||
text-unidecode==1.3
|
||||
typing-extensions==3.10.0.0
|
||||
urllib3==1.26.6
|
||||
Werkzeug==2.0.1
|
||||
zipp==3.5.0
|
||||
@@ -7,5 +7,5 @@ models:
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_0
|
||||
name: node_3
|
||||
version: 2
|
||||
@@ -7,5 +7,5 @@ models:
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_1
|
||||
name: node_4
|
||||
version: 2
|
||||
@@ -0,0 +1,5 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_2') }}
|
||||
@@ -7,5 +7,5 @@ models:
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_2
|
||||
name: node_5
|
||||
version: 2
|
||||
@@ -0,0 +1,5 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_6
|
||||
version: 2
|
||||
@@ -0,0 +1,7 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_7
|
||||
version: 2
|
||||
@@ -0,0 +1,7 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_8
|
||||
version: 2
|
||||
@@ -0,0 +1,9 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
union all
|
||||
select * from {{ ref('node_7') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_9
|
||||
version: 2
|
||||
@@ -0,0 +1,9 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
union all
|
||||
select * from {{ ref('node_8') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_10
|
||||
version: 2
|
||||
@@ -0,0 +1,3 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_11
|
||||
version: 2
|
||||
@@ -0,0 +1,11 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
union all
|
||||
select * from {{ ref('node_8') }}
|
||||
union all
|
||||
select * from {{ ref('node_10') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_12
|
||||
version: 2
|
||||
@@ -0,0 +1,5 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_2') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_13
|
||||
version: 2
|
||||
@@ -0,0 +1,5 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_11') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_14
|
||||
version: 2
|
||||
@@ -0,0 +1,9 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
union all
|
||||
select * from {{ ref('node_7') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_15
|
||||
version: 2
|
||||
@@ -0,0 +1,7 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_16
|
||||
version: 2
|
||||
@@ -0,0 +1,9 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
union all
|
||||
select * from {{ ref('node_6') }}
|
||||
union all
|
||||
select * from {{ ref('node_8') }}
|
||||
@@ -0,0 +1,11 @@
|
||||
models:
|
||||
- columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- relationships:
|
||||
field: id
|
||||
to: node_0
|
||||
name: node_17
|
||||
version: 2
|
||||
@@ -0,0 +1,5 @@
|
||||
select 1 as id
|
||||
union all
|
||||
select * from {{ ref('node_0') }}
|
||||
union all
|
||||
select * from {{ ref('node_3') }}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user