mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-18 20:21:27 +00:00
Compare commits
56 Commits
jerco/setu
...
adding-sem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2da925aa25 | ||
|
|
43e24c5ae6 | ||
|
|
89d111a5f6 | ||
|
|
e1b5e68904 | ||
|
|
065ab2ebc2 | ||
|
|
20c95a4993 | ||
|
|
c40b488cb4 | ||
|
|
585e7c59e8 | ||
|
|
7077c47551 | ||
|
|
f789b2535a | ||
|
|
2bfc6917e2 | ||
|
|
d74ae19523 | ||
|
|
1c7c23ac73 | ||
|
|
86e8722cd8 | ||
|
|
7a61602738 | ||
|
|
dd4b47d8b1 | ||
|
|
eb200b4687 | ||
|
|
0fc080d222 | ||
|
|
5da63602b3 | ||
|
|
457ff3ef48 | ||
|
|
0dbdecef10 | ||
|
|
b13b0e9492 | ||
|
|
b9fdfd9e36 | ||
|
|
4d6352db14 | ||
|
|
9eb82c6497 | ||
|
|
89cc89dfdf | ||
|
|
2b0f6597a4 | ||
|
|
294def205f | ||
|
|
34fa703466 | ||
|
|
ab3f8dcbfd | ||
|
|
02c20477b9 | ||
|
|
d9a4ee126a | ||
|
|
94d6d19fb4 | ||
|
|
d43c070007 | ||
|
|
9ef236601b | ||
|
|
9d6f961d2b | ||
|
|
5453840950 | ||
|
|
d453964546 | ||
|
|
748a932811 | ||
|
|
8217ad4722 | ||
|
|
6ef3fbbf76 | ||
|
|
76fd12c7cd | ||
|
|
9ecb6e50e4 | ||
|
|
ce9d0afb8a | ||
|
|
c39ea807e8 | ||
|
|
1e35339389 | ||
|
|
304797b099 | ||
|
|
b9bdb775ab | ||
|
|
df93858b4b | ||
|
|
e8da84fb9e | ||
|
|
7e90e067af | ||
|
|
5e4e917de5 | ||
|
|
05dc0212e7 | ||
|
|
c00052cbfb | ||
|
|
3d54a83822 | ||
|
|
fafd5edbda |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.4.0a1
|
||||
current_version = 1.5.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
|
||||
time: 2022-09-23T00:06:46.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: "5917"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump black from 22.8.0 to 22.10.0"
|
||||
time: 2022-10-07T00:08:48.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: "6019"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core"
|
||||
time: 2022-10-20T00:07:53.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: "6108"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
|
||||
time: 2022-10-26T00:09:10.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: "6144"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: "Dependency"
|
||||
body: "Bump mashumaro[msgpack] from 3.1.1 to 3.2 in /core"
|
||||
time: 2022-12-05T00:21:18.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6375
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: minor doc correction
|
||||
time: 2022-09-08T15:41:57.689162-04:00
|
||||
custom:
|
||||
Author: andy-clapson
|
||||
Issue: "5791"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Generate API docs for new CLI interface
|
||||
time: 2022-10-07T09:06:56.446078-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5528"
|
||||
@@ -1,5 +0,0 @@
|
||||
kind: Docs
|
||||
time: 2022-10-17T17:14:11.715348-05:00
|
||||
custom:
|
||||
Author: paulbenschmidt
|
||||
Issue: "5880"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix rendering of sample code for metrics
|
||||
time: 2022-11-16T15:57:43.204201+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "323"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Alphabetize `core/dbt/README.md`
|
||||
time: 2022-12-02T15:05:23.695333-07:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "6368"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Added favor-state flag to optionally favor state nodes even if unselected node
|
||||
exists
|
||||
time: 2022-04-08T16:54:59.696564+01:00
|
||||
custom:
|
||||
Author: daniel-murray josephberni
|
||||
Issue: "2968"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Update structured logging. Convert to using protobuf messages. Ensure events are enriched with node_info.
|
||||
time: 2022-08-17T15:48:57.225267-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5610"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Friendlier error messages when packages.yml is malformed
|
||||
time: 2022-09-12T12:59:35.121188+01:00
|
||||
custom:
|
||||
Author: jared-rimmer
|
||||
Issue: "5486"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Migrate dbt-utils current_timestamp macros into core + adapters
|
||||
time: 2022-09-14T09:56:25.97818-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "5521"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Allow partitions in external tables to be supplied as a list
|
||||
time: 2022-09-25T21:16:51.051239654+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "5929"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: extend -f flag shorthand for seed command
|
||||
time: 2022-10-03T11:07:05.381632-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "5990"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: This pulls the profile name from args when constructing a RuntimeConfig in lib.py,
|
||||
enabling the dbt-server to override the value that's in the dbt_project.yml
|
||||
time: 2022-11-02T15:00:03.000805-05:00
|
||||
custom:
|
||||
Author: racheldaniel
|
||||
Issue: "6201"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Features
|
||||
body: Adding tarball install method for packages. Allowing package tarball to be specified
|
||||
via url in the packages.yaml.
|
||||
time: 2022-11-07T10:50:18.464545-05:00
|
||||
custom:
|
||||
Author: timle2
|
||||
Issue: "4205"
|
||||
PR: "4689"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Added an md5 function to the base context
|
||||
time: 2022-11-14T18:52:07.788593+02:00
|
||||
custom:
|
||||
Author: haritamar
|
||||
Issue: "6246"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Exposures support metrics in lineage
|
||||
time: 2022-11-30T11:29:13.256034-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6057"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Add support for Python 3.11
|
||||
time: 2022-12-06T15:07:04.753127+01:00
|
||||
custom:
|
||||
Author: joshuataylor MichelleArk jtcohen6
|
||||
Issue: "6147"
|
||||
PR: "6326"
|
||||
6
.changes/unreleased/Features-20230118-134804.yaml
Normal file
6
.changes/unreleased/Features-20230118-134804.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Adding the entity node
|
||||
time: 2023-01-18T13:48:04.487817-06:00
|
||||
custom:
|
||||
Author: callum-mcdata
|
||||
Issue: "6627"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Account for disabled flags on models in schema files more completely
|
||||
time: 2022-09-16T10:48:54.162273-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "3992"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add validation of enabled config for metrics, exposures and sources
|
||||
time: 2022-10-10T11:32:18.752322-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6030"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: check length of args of python model function before accessing it
|
||||
time: 2022-10-11T16:07:15.464093-04:00
|
||||
custom:
|
||||
Author: chamini2
|
||||
Issue: "6041"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add functors to ensure event types with str-type attributes are initialized
|
||||
to spec, even when provided non-str type params.
|
||||
time: 2022-10-16T17:37:42.846683-07:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "5436"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Allow hooks to fail without halting execution flow
|
||||
time: 2022-11-07T09:53:14.340257-06:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "5625"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Clarify Error Message for how many models are allowed in a Python file
|
||||
time: 2022-11-15T08:10:21.527884-05:00
|
||||
custom:
|
||||
Author: justbldwn
|
||||
Issue: "6245"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: After this, will be possible to use default values for dbt.config.get
|
||||
time: 2022-11-24T16:34:19.039512764-03:00
|
||||
custom:
|
||||
Author: devmessias
|
||||
Issue: "6309"
|
||||
PR: "6317"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Use full path for writing manifest
|
||||
time: 2022-12-02T16:48:59.029519-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6055"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Put black config in explicit config
|
||||
time: 2022-09-27T19:42:59.241433-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "5946"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Added flat_graph attribute the Manifest class's deepcopy() coverage
|
||||
time: 2022-09-29T13:44:06.275941-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "5809"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add mypy configs so `mypy` passes from CLI
|
||||
time: 2022-10-05T12:03:10.061263-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "5983"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Exception message cleanup.
|
||||
time: 2022-10-07T09:46:27.682872-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6023"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add dmypy cache to gitignore
|
||||
time: 2022-10-07T14:00:44.227644-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "6028"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Provide useful errors when the value of 'materialized' is invalid
|
||||
time: 2022-10-13T18:19:12.167548-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "5229"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fixed extra whitespace in strings introduced by black.
|
||||
time: 2022-10-17T15:15:11.499246-05:00
|
||||
custom:
|
||||
Author: luke-bassett
|
||||
Issue: "1350"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Clean up string formatting
|
||||
time: 2022-10-17T15:58:44.676549-04:00
|
||||
custom:
|
||||
Author: eve-johns
|
||||
Issue: "6068"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Remove the 'root_path' field from most nodes
|
||||
time: 2022-10-28T10:48:37.687886-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6171"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Combine certain logging events with different levels
|
||||
time: 2022-10-28T11:03:44.887836-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6173"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert threading tests to pytest
|
||||
time: 2022-11-08T07:45:50.589147-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert postgres index tests to pytest
|
||||
time: 2022-11-08T11:56:33.743042-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5770"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert use color tests to pytest
|
||||
time: 2022-11-08T13:31:04.788547-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5771"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add github actions workflow to generate high level CLI API docs
|
||||
time: 2022-11-16T13:00:37.916202-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Functionality-neutral refactor of event logging system to improve encapsulation
|
||||
and modularity.
|
||||
time: 2022-11-18T14:57:17.792622-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6139"
|
||||
PR: "6291"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Consolidate ParsedNode and CompiledNode classes
|
||||
time: 2022-12-05T16:49:48.563583-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6383"
|
||||
PR: "6384"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Prevent doc gen workflow from running on forks
|
||||
time: 2022-12-06T09:40:15.301984-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "6386"
|
||||
PR: "6390"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix intermittent database connection failure in Windows CI test
|
||||
time: 2022-12-06T11:30:53.166009-07:00
|
||||
custom:
|
||||
Author: MichelleArk dbeatty10
|
||||
Issue: "6394"
|
||||
PR: "6395"
|
||||
6
.changes/unreleased/Under the Hood-20230113-132513.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230113-132513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fix use of ConnectionReused logging event
|
||||
time: 2023-01-13T13:25:13.023168-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6168"
|
||||
6
.changes/unreleased/Under the Hood-20230117-111737.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230117-111737.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Update deprecated github action command
|
||||
time: 2023-01-17T11:17:37.046095-06:00
|
||||
custom:
|
||||
Author: davidbloss
|
||||
Issue: "6153"
|
||||
2
.flake8
2
.flake8
@@ -9,4 +9,4 @@ ignore =
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test
|
||||
exclude = test/
|
||||
|
||||
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
core/dbt/include/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
20
.github/_README.md
vendored
20
.github/_README.md
vendored
@@ -63,12 +63,12 @@ permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
```
|
||||
|
||||
|
||||
### Secrets
|
||||
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
||||
|
||||
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
||||
|
||||
|
||||
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
||||
|
||||
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
||||
@@ -105,7 +105,7 @@ Some triggers of note that we use:
|
||||
|
||||
```
|
||||
# **what?**
|
||||
# Describe what the action does.
|
||||
# Describe what the action does.
|
||||
|
||||
# **why?**
|
||||
# Why does this action exist?
|
||||
@@ -138,7 +138,7 @@ Some triggers of note that we use:
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "::set-output name=FILEPATH::$FILEPATH"
|
||||
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
||||
```
|
||||
|
||||
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
||||
@@ -158,14 +158,14 @@ Some triggers of note that we use:
|
||||
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
||||
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
||||
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
||||
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set Variables
|
||||
id: variables
|
||||
run: |
|
||||
echo "::set-output name=important_path::'performance/runner/Cargo.toml'"
|
||||
echo "::set-output name=release_id::${{github.event.inputs.release_id}}"
|
||||
echo "::set-output name=open_prs::${{github.event.inputs.open_prs}}"
|
||||
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
||||
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
||||
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
||||
|
||||
job2:
|
||||
needs: [job1]
|
||||
@@ -190,7 +190,7 @@ ___
|
||||
### Actions from the Marketplace
|
||||
- Don’t use external actions for things that can easily be accomplished manually.
|
||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||
- Pin actions _we don't control_ to tags.
|
||||
- Pin actions _we don't control_ to tags.
|
||||
|
||||
### Connecting to AWS
|
||||
- Authenticate with the aws managed workflow
|
||||
@@ -208,7 +208,7 @@ ___
|
||||
|
||||
```yaml
|
||||
- name: Copy Artifacts from S3 via CLI
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
17
.github/actions/latest-wrangler/main.py
vendored
17
.github/actions/latest-wrangler/main.py
vendored
@@ -28,11 +28,12 @@ if __name__ == "__main__":
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
else:
|
||||
# everything is the latest if the package doesn't exist
|
||||
print(f"::set-output name=latest::{True}")
|
||||
print(f"::set-output name=minor_latest::{True}")
|
||||
sys.exit(0)
|
||||
# everything is the latest if the package doesn't exist
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write("latest=True")
|
||||
gh_output.write("minor_latest=True")
|
||||
sys.exit(0)
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
@@ -91,5 +92,7 @@ if __name__ == "__main__":
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
|
||||
print(f"::set-output name=latest::{latest}")
|
||||
print(f"::set-output name=minor_latest::{minor_latest}")
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write(f"latest={latest}")
|
||||
gh_output.write(f"minor_latest={minor_latest}")
|
||||
|
||||
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
@@ -101,7 +101,9 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
@@ -168,7 +170,9 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
|
||||
12
.github/workflows/release-docker.yml
vendored
12
.github/workflows/release-docker.yml
vendored
@@ -41,9 +41,9 @@ jobs:
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "::set-output name=major::$MAJOR"
|
||||
echo "::set-output name=minor::$MINOR"
|
||||
echo "::set-output name=patch::$PATCH"
|
||||
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
@@ -70,8 +70,10 @@ jobs:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v1
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -165,7 +165,7 @@ jobs:
|
||||
env:
|
||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||
run: |
|
||||
echo ::set-output name=isPrerelease::$IS_PRERELEASE
|
||||
echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Creating GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
|
||||
2
.github/workflows/version-bump.yml
vendored
2
.github/workflows/version-bump.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- name: Set branch value
|
||||
id: variables
|
||||
run: |
|
||||
echo "::set-output name=BRANCH_NAME::prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID"
|
||||
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
|
||||
@@ -5,12 +5,12 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
|
||||
@@ -96,12 +96,15 @@ brew install postgresql
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
||||
|
||||
```sh
|
||||
make dev
|
||||
# or
|
||||
```
|
||||
or, alternatively:
|
||||
```sh
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||
|
||||
9
Makefile
9
Makefile
@@ -19,11 +19,16 @@ CI_FLAGS =\
|
||||
LOG_DIR=./logs\
|
||||
DBT_LOG_FORMAT=json
|
||||
|
||||
.PHONY: dev
|
||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@\
|
||||
pre-commit install
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@\
|
||||
|
||||
@@ -2,7 +2,7 @@ from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -85,7 +85,7 @@ class Column:
|
||||
|
||||
def string_size(self) -> int:
|
||||
if not self.is_string():
|
||||
raise RuntimeException("Called string_size() on non-string field!")
|
||||
raise DbtRuntimeError("Called string_size() on non-string field!")
|
||||
|
||||
if self.dtype == "text" or self.char_size is None:
|
||||
# char_size should never be None. Handle it reasonably just in case
|
||||
@@ -124,7 +124,7 @@ class Column:
|
||||
def from_description(cls, name: str, raw_data_type: str) -> "Column":
|
||||
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
|
||||
if match is None:
|
||||
raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
|
||||
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
|
||||
data_type, size_info = match.groups()
|
||||
char_size = None
|
||||
numeric_precision = None
|
||||
@@ -137,7 +137,7 @@ class Column:
|
||||
try:
|
||||
char_size = int(parts[0])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
@@ -145,14 +145,14 @@ class Column:
|
||||
try:
|
||||
numeric_precision = int(parts[0])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
try:
|
||||
numeric_scale = int(parts[1])
|
||||
except ValueError:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[1]}" to an integer'
|
||||
)
|
||||
|
||||
@@ -91,13 +91,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
@@ -137,49 +137,49 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
conn_name: str
|
||||
if name is None:
|
||||
# if a name isn't specified, we'll re-use a single handle
|
||||
# named 'master'
|
||||
conn_name = "master"
|
||||
else:
|
||||
if not isinstance(name, str):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"For connection name, got {name} - not a string!"
|
||||
)
|
||||
assert isinstance(name, str)
|
||||
conn_name = name
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
|
||||
conn_name: str = "master" if name is None else name
|
||||
|
||||
# Get a connection for this thread
|
||||
conn = self.get_if_exists()
|
||||
|
||||
if conn and conn.name == conn_name and conn.state == "open":
|
||||
# Found a connection and nothing to do, so just return it
|
||||
return conn
|
||||
|
||||
if conn is None:
|
||||
# Create a new connection
|
||||
conn = Connection(
|
||||
type=Identifier(self.TYPE),
|
||||
name=None,
|
||||
name=conn_name,
|
||||
state=ConnectionState.INIT,
|
||||
transaction_open=False,
|
||||
handle=None,
|
||||
credentials=self.profile.credentials,
|
||||
)
|
||||
self.set_thread_connection(conn)
|
||||
|
||||
if conn.name == conn_name and conn.state == "open":
|
||||
return conn
|
||||
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
|
||||
if conn.state == "open":
|
||||
fire_event(ConnectionReused(conn_name=conn_name))
|
||||
else:
|
||||
conn.handle = LazyHandle(self.open)
|
||||
# Add the connection to thread_connections for this thread
|
||||
self.set_thread_connection(conn)
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
else: # existing connection either wasn't open or didn't have the right name
|
||||
if conn.state != "open":
|
||||
conn.handle = LazyHandle(self.open)
|
||||
if conn.name != conn_name:
|
||||
orig_conn_name: str = conn.name or ""
|
||||
conn.name = conn_name
|
||||
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
|
||||
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
@@ -211,7 +211,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
connect should trigger a retry.
|
||||
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||
is exceeded before a successful call, a FailedToConnectException will be raised.
|
||||
is exceeded before a successful call, a FailedToConnectError will be raised.
|
||||
Must be non-negative.
|
||||
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||
@@ -220,14 +220,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectException(
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
@@ -235,7 +235,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
@@ -246,7 +246,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
@@ -268,12 +268,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -288,7 +288,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -320,16 +320,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`begin` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`commit` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
@@ -365,7 +361,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
@@ -415,6 +411,4 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
|
||||
@@ -22,13 +22,20 @@ import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
raise_database_error,
|
||||
raise_compiler_error,
|
||||
invalid_type_error,
|
||||
get_relation_returned_multiple_results,
|
||||
InternalException,
|
||||
NotImplementedException,
|
||||
RuntimeException,
|
||||
DbtInternalError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
QuoteConfigTypeError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
DbtRuntimeError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNullError,
|
||||
UnexpectedNonTimestampError,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import (
|
||||
@@ -68,7 +75,7 @@ FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
|
||||
def _expect_row_value(key: str, row: agate.Row):
|
||||
if key not in row.keys():
|
||||
raise InternalException(
|
||||
raise DbtInternalError(
|
||||
'Got a row without "{}" column, columns: {}'.format(key, row.keys())
|
||||
)
|
||||
return row[key]
|
||||
@@ -97,18 +104,10 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
if dt is None:
|
||||
raise raise_database_error(
|
||||
"Expected a non-null value when querying field '{}' of table "
|
||||
" {} but received value 'null' instead".format(field_name, source)
|
||||
)
|
||||
raise UnexpectedNullError(field_name, source)
|
||||
|
||||
elif not hasattr(dt, "tzinfo"):
|
||||
raise raise_database_error(
|
||||
"Expected a timestamp value when querying field '{}' of table "
|
||||
"{} but received value of type '{}' instead".format(
|
||||
field_name, source, type(dt).__name__
|
||||
)
|
||||
)
|
||||
raise UnexpectedNonTimestampError(field_name, source, dt)
|
||||
|
||||
elif dt.tzinfo:
|
||||
return dt.astimezone(pytz.UTC)
|
||||
@@ -434,7 +433,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""Cache a new relation in dbt. It will show up in `list relations`."""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise_compiler_error("Attempted to cache a null relation for {}".format(name))
|
||||
raise NullRelationCacheAttemptedError(name)
|
||||
self.cache.add(relation)
|
||||
# so jinja doesn't render things
|
||||
return ""
|
||||
@@ -446,7 +445,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise_compiler_error("Attempted to drop a null relation for {}".format(name))
|
||||
raise NullRelationDropAttemptedError(name)
|
||||
self.cache.drop(relation)
|
||||
return ""
|
||||
|
||||
@@ -463,9 +462,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
name = self.nice_connection_name()
|
||||
src_name = _relation_name(from_relation)
|
||||
dst_name = _relation_name(to_relation)
|
||||
raise_compiler_error(
|
||||
"Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
|
||||
)
|
||||
raise RenameToNoneAttemptedError(src_name, dst_name, name)
|
||||
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
return ""
|
||||
@@ -477,12 +474,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def date_function(cls) -> str:
|
||||
"""Get the date function used by this adapter's database."""
|
||||
raise NotImplementedException("`date_function` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`date_function` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`is_cancelable` is not implemented for this adapter!")
|
||||
|
||||
###
|
||||
# Abstract methods about schemas
|
||||
@@ -490,7 +487,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
"""Get a list of existing schemas in database"""
|
||||
raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
|
||||
|
||||
@available.parse(lambda *a, **k: False)
|
||||
def check_schema_exists(self, database: str, schema: str) -> bool:
|
||||
@@ -513,13 +510,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
*Implementors must call self.cache.drop() to preserve cache state!*
|
||||
"""
|
||||
raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def truncate_relation(self, relation: BaseRelation) -> None:
|
||||
"""Truncate the given relation."""
|
||||
raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
@@ -528,15 +525,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Implementors must call self.cache.rename() to preserve cache state.
|
||||
"""
|
||||
raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_list
|
||||
def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
|
||||
"""Get a list of the columns in the given Relation."""
|
||||
raise NotImplementedException(
|
||||
"`get_columns_in_relation` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
|
||||
|
||||
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
|
||||
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
|
||||
@@ -558,7 +553,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param self.Relation current: A relation that currently exists in the
|
||||
database with columns of unspecified types.
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
raise NotImplementedError(
|
||||
"`expand_target_column_types` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -573,7 +568,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:return: The relations in schema
|
||||
:rtype: List[self.Relation]
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
raise NotImplementedError(
|
||||
"`list_relations_without_caching` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -615,7 +610,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
to_relation.
|
||||
"""
|
||||
if not isinstance(from_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="get_missing_columns",
|
||||
arg_name="from_relation",
|
||||
got_value=from_relation,
|
||||
@@ -623,7 +618,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
if not isinstance(to_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="get_missing_columns",
|
||||
arg_name="to_relation",
|
||||
got_value=to_relation,
|
||||
@@ -644,11 +639,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
expected columns.
|
||||
|
||||
:param Relation relation: The relation to check
|
||||
:raises CompilationException: If the columns are
|
||||
:raises InvalidMacroArgType: If the columns are
|
||||
incorrect.
|
||||
"""
|
||||
if not isinstance(relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="valid_snapshot_target",
|
||||
arg_name="relation",
|
||||
got_value=relation,
|
||||
@@ -669,24 +664,16 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
if missing:
|
||||
if extra:
|
||||
msg = (
|
||||
'Snapshot target has ("{}") but not ("{}") - is it an '
|
||||
"unmigrated previous version archive?".format(
|
||||
'", "'.join(extra), '", "'.join(missing)
|
||||
)
|
||||
)
|
||||
raise SnapshotTargetIncompleteError(extra, missing)
|
||||
else:
|
||||
msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
|
||||
'", "'.join(missing)
|
||||
)
|
||||
raise_compiler_error(msg)
|
||||
raise SnapshotTargetNotSnapshotTableError(missing)
|
||||
|
||||
@available.parse_none
|
||||
def expand_target_column_types(
|
||||
self, from_relation: BaseRelation, to_relation: BaseRelation
|
||||
) -> None:
|
||||
if not isinstance(from_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="expand_target_column_types",
|
||||
arg_name="from_relation",
|
||||
got_value=from_relation,
|
||||
@@ -694,7 +681,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
if not isinstance(to_relation, self.Relation):
|
||||
invalid_type_error(
|
||||
raise MacroArgTypeError(
|
||||
method_name="expand_target_column_types",
|
||||
arg_name="to_relation",
|
||||
got_value=to_relation,
|
||||
@@ -776,7 +763,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"schema": schema,
|
||||
"database": database,
|
||||
}
|
||||
get_relation_returned_multiple_results(kwargs, matches)
|
||||
raise RelationReturnedMultipleResultsError(kwargs, matches)
|
||||
|
||||
elif matches:
|
||||
return matches[0]
|
||||
@@ -798,20 +785,20 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse_none
|
||||
def create_schema(self, relation: BaseRelation):
|
||||
"""Create the given schema if it does not exist."""
|
||||
raise NotImplementedException("`create_schema` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`create_schema` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def drop_schema(self, relation: BaseRelation):
|
||||
"""Drop the given schema (and everything in it) if it exists."""
|
||||
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`drop_schema` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def quote(cls, identifier: str) -> str:
|
||||
"""Quote the given identifier, as appropriate for the database."""
|
||||
raise NotImplementedException("`quote` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`quote` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
def quote_as_configured(self, identifier: str, quote_key: str) -> str:
|
||||
@@ -840,10 +827,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
elif quote_config is None:
|
||||
pass
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f'The seed configuration value of "quote_columns" has an '
|
||||
f"invalid type {type(quote_config)}"
|
||||
)
|
||||
raise QuoteConfigTypeError(quote_config)
|
||||
|
||||
if quote_columns:
|
||||
return self.quote(column)
|
||||
@@ -864,7 +848,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_text_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -876,7 +860,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -888,9 +872,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_boolean_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -902,9 +884,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`convert_datetime_type` is not implemented for this adapter!"
|
||||
)
|
||||
raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -916,7 +896,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_date_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -928,7 +908,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
|
||||
raise NotImplementedError("`convert_time_type` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@@ -995,7 +975,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
package_name = 'the "{}" package'.format(project)
|
||||
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find a macro with the name "{}" in {}'.format(
|
||||
macro_name, package_name
|
||||
)
|
||||
@@ -1093,11 +1073,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||
# the current time according to the db.
|
||||
if len(table) != 1 or len(table[0]) != 2:
|
||||
raise_compiler_error(
|
||||
'Got an invalid result from "{}" macro: {}'.format(
|
||||
FRESHNESS_MACRO_NAME, [tuple(r) for r in table]
|
||||
)
|
||||
)
|
||||
raise MacroResultError(FRESHNESS_MACRO_NAME, table)
|
||||
if table[0][0] is None:
|
||||
# no records in the table, so really the max_loaded_at was
|
||||
# infinitely long ago. Just call it 0:00 January 1 year UTC
|
||||
@@ -1174,7 +1150,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
elif location == "prepend":
|
||||
return f"'{value}' || {add_to}"
|
||||
else:
|
||||
raise RuntimeException(f'Got an unexpected location value of "{location}"')
|
||||
raise DbtRuntimeError(f'Got an unexpected location value of "{location}"')
|
||||
|
||||
def get_rows_different_sql(
|
||||
self,
|
||||
@@ -1232,7 +1208,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return self.generate_python_submission_response(submission_result)
|
||||
|
||||
def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse:
|
||||
raise NotImplementedException(
|
||||
raise NotImplementedError(
|
||||
"Your adapter need to implement generate_python_submission_response"
|
||||
)
|
||||
|
||||
@@ -1256,7 +1232,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
valid_strategies.append("default")
|
||||
builtin_strategies = self.builtin_incremental_strategies()
|
||||
if strategy in builtin_strategies and strategy not in valid_strategies:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"The incremental strategy '{strategy}' is not valid for this adapter"
|
||||
)
|
||||
|
||||
@@ -1264,7 +1240,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
macro_name = f"get_incremental_{strategy}_sql"
|
||||
# The model_context should have MacroGenerator callable objects for all macros
|
||||
if macro_name not in model_context:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||
macro_name, self.config.project_name
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import List, Optional, Type
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ def project_name_from_path(include_path: str) -> str:
|
||||
|
||||
partial = Project.partial_load(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationException(f"Invalid project at {include_path}: name not set!")
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
@@ -48,7 +48,7 @@ class _QueryComment(local):
|
||||
if isinstance(comment, str) and "*/" in comment:
|
||||
# tell the user "no" so they don't hurt themselves by writing
|
||||
# garbage
|
||||
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
|
||||
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
|
||||
self.query_comment = comment
|
||||
self.append = append
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ParsedNode
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
RelationType,
|
||||
ComponentName,
|
||||
@@ -11,7 +11,11 @@ from dbt.contracts.relation import (
|
||||
Policy,
|
||||
Path,
|
||||
)
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.exceptions import (
|
||||
ApproximateMatchError,
|
||||
DbtInternalError,
|
||||
MultipleDatabasesNotAllowedError,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
|
||||
@@ -83,7 +87,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
raise dbt.exceptions.DbtRuntimeError(
|
||||
"Tried to match relation, but no search path was passed!"
|
||||
)
|
||||
|
||||
@@ -100,7 +104,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if approximate_match and not exact_match:
|
||||
target = self.create(database=database, schema=schema, identifier=identifier)
|
||||
dbt.exceptions.approximate_relation_match(target, self)
|
||||
raise ApproximateMatchError(target, self)
|
||||
|
||||
return exact_match
|
||||
|
||||
@@ -210,7 +214,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_ephemeral_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ParsedNode,
|
||||
node: ManifestNode,
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
@@ -223,7 +227,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ParsedNode,
|
||||
node: ManifestNode,
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
@@ -244,18 +248,21 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, SourceDefinition],
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise InternalException(
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise InternalException(f"type mismatch, expected ParsedNode but got {type(node)}")
|
||||
raise DbtInternalError(
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@classmethod
|
||||
@@ -351,7 +358,7 @@ class InformationSchema(BaseRelation):
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationException(
|
||||
raise dbt.exceptions.CompilationError(
|
||||
"Got an invalid name: {}".format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@@ -435,7 +442,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
if not allow_multiple_databases:
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
raise MultipleDatabasesNotAllowedError(seen)
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {"database": information_schema_name.database, "schema": schema}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import re
|
||||
import threading
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
@@ -9,23 +8,15 @@ from dbt.adapters.reference_keys import (
|
||||
_make_msg_from_ref_key,
|
||||
_ReferenceKey,
|
||||
)
|
||||
import dbt.exceptions
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import (
|
||||
AddLink,
|
||||
AddRelation,
|
||||
DropCascade,
|
||||
DropMissingRelation,
|
||||
DropRelation,
|
||||
DumpAfterAddGraph,
|
||||
DumpAfterRenameSchema,
|
||||
DumpBeforeAddGraph,
|
||||
DumpBeforeRenameSchema,
|
||||
RenameSchema,
|
||||
TemporaryRelation,
|
||||
UncachedRelation,
|
||||
UpdateReference,
|
||||
from dbt.exceptions import (
|
||||
DependentLinkNotCachedError,
|
||||
NewNameAlreadyInCacheError,
|
||||
NoneRelationFoundError,
|
||||
ReferencedLinkNotCachedError,
|
||||
TruncatedModelNameCausedCollisionError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
import dbt.flags as flags
|
||||
from dbt.utils import lowercase
|
||||
|
||||
@@ -150,11 +141,7 @@ class _CachedRelation:
|
||||
:raises InternalError: If the new key already exists.
|
||||
"""
|
||||
if new_key in self.referenced_by:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in rename of "{}" -> "{}", new name is in the cache already'.format(
|
||||
old_key, new_key
|
||||
)
|
||||
)
|
||||
raise NewNameAlreadyInCacheError(old_key, new_key)
|
||||
|
||||
if old_key not in self.referenced_by:
|
||||
return
|
||||
@@ -270,21 +257,17 @@ class RelationsCache:
|
||||
if referenced is None:
|
||||
return
|
||||
if referenced is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, referenced link key {} not in cache!".format(referenced_key)
|
||||
)
|
||||
raise ReferencedLinkNotCachedError(referenced_key)
|
||||
|
||||
dependent = self.relations.get(dependent_key)
|
||||
if dependent is None:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, dependent link key {} not in cache!".format(dependent_key)
|
||||
)
|
||||
raise DependentLinkNotCachedError(dependent_key)
|
||||
|
||||
assert dependent is not None # we just raised!
|
||||
|
||||
referenced.add_reference(dependent)
|
||||
|
||||
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
|
||||
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
|
||||
def add_link(self, referenced, dependent):
|
||||
"""Add a link between two relations to the database. If either relation
|
||||
does not exist, it will be added as an "external" relation.
|
||||
@@ -306,9 +289,9 @@ class RelationsCache:
|
||||
# referring to a table outside our control. There's no need to make
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
fire_event(
|
||||
UncachedRelation(
|
||||
dep_key=_make_msg_from_ref_key(dep_key),
|
||||
CacheAction(
|
||||
ref_key=_make_msg_from_ref_key(ref_key),
|
||||
ref_key_2=_make_msg_from_ref_key(dep_key),
|
||||
)
|
||||
)
|
||||
return
|
||||
@@ -321,8 +304,10 @@ class RelationsCache:
|
||||
dependent = dependent.replace(type=referenced.External)
|
||||
self.add(dependent)
|
||||
fire_event(
|
||||
AddLink(
|
||||
dep_key=_make_msg_from_ref_key(dep_key), ref_key=_make_msg_from_ref_key(ref_key)
|
||||
CacheAction(
|
||||
action="add_link",
|
||||
ref_key=_make_msg_from_ref_key(dep_key),
|
||||
ref_key_2=_make_msg_from_ref_key(ref_key),
|
||||
)
|
||||
)
|
||||
with self.lock:
|
||||
@@ -335,12 +320,18 @@ class RelationsCache:
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event(AddRelation(relation=_make_ref_key_msg(cached)))
|
||||
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpBeforeAddGraph(dump=self.dump_graph()))
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_msg(cached)))
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpAfterAddGraph(dump=self.dump_graph()))
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
def _remove_refs(self, keys):
|
||||
"""Removes all references to all entries in keys. This does not
|
||||
@@ -368,16 +359,19 @@ class RelationsCache:
|
||||
"""
|
||||
dropped_key = _make_ref_key(relation)
|
||||
dropped_key_msg = _make_ref_key_msg(relation)
|
||||
fire_event(DropRelation(dropped=dropped_key_msg))
|
||||
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
||||
with self.lock:
|
||||
if dropped_key not in self.relations:
|
||||
fire_event(DropMissingRelation(relation=dropped_key_msg))
|
||||
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
|
||||
return
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||
consequence_msgs = [_make_msg_from_ref_key(key) for key in consequences]
|
||||
|
||||
fire_event(DropCascade(dropped=dropped_key_msg, consequences=consequence_msgs))
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
|
||||
)
|
||||
)
|
||||
self._remove_refs(consequences)
|
||||
|
||||
def _rename_relation(self, old_key, new_relation):
|
||||
@@ -400,12 +394,14 @@ class RelationsCache:
|
||||
for cached in self.relations.values():
|
||||
if cached.is_referenced_by(old_key):
|
||||
fire_event(
|
||||
UpdateReference(
|
||||
old_key=_make_ref_key_msg(old_key),
|
||||
new_key=_make_ref_key_msg(new_key),
|
||||
cached_key=_make_ref_key_msg(cached.key()),
|
||||
CacheAction(
|
||||
action="update_reference",
|
||||
ref_key=_make_ref_key_msg(old_key),
|
||||
ref_key_2=_make_ref_key_msg(new_key),
|
||||
ref_key_3=_make_ref_key_msg(cached.key()),
|
||||
)
|
||||
)
|
||||
|
||||
cached.rename_key(old_key, new_key)
|
||||
|
||||
self.relations[new_key] = relation
|
||||
@@ -430,27 +426,12 @@ class RelationsCache:
|
||||
if new_key in self.relations:
|
||||
# Tell user when collision caused by model names truncated during
|
||||
# materialization.
|
||||
match = re.search("__dbt_backup|__dbt_tmp$", new_key.identifier)
|
||||
if match:
|
||||
truncated_model_name_prefix = new_key.identifier[: match.start()]
|
||||
message_addendum = (
|
||||
"\n\nName collisions can occur when the length of two "
|
||||
"models' names approach your database's builtin limit. "
|
||||
"Try restructuring your project such that no two models "
|
||||
"share the prefix '{}'.".format(truncated_model_name_prefix)
|
||||
+ " Then, clean your warehouse of any removed models."
|
||||
)
|
||||
else:
|
||||
message_addendum = ""
|
||||
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in rename, new key {} already in cache: {}{}".format(
|
||||
new_key, list(self.relations.keys()), message_addendum
|
||||
)
|
||||
)
|
||||
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
||||
|
||||
if old_key not in self.relations:
|
||||
fire_event(TemporaryRelation(key=_make_msg_from_ref_key(old_key)))
|
||||
fire_event(
|
||||
CacheAction(action="temporary_relation", ref_key=_make_msg_from_ref_key(old_key))
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -469,13 +450,16 @@ class RelationsCache:
|
||||
old_key = _make_ref_key(old)
|
||||
new_key = _make_ref_key(new)
|
||||
fire_event(
|
||||
RenameSchema(
|
||||
old_key=_make_msg_from_ref_key(old_key), new_key=_make_msg_from_ref_key(new)
|
||||
CacheAction(
|
||||
action="rename_relation",
|
||||
ref_key=_make_msg_from_ref_key(old_key),
|
||||
ref_key_2=_make_msg_from_ref_key(new),
|
||||
)
|
||||
)
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS, lambda: DumpBeforeRenameSchema(dump=self.dump_graph())
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
@@ -485,7 +469,8 @@ class RelationsCache:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS, lambda: DumpAfterRenameSchema(dump=self.dump_graph())
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
|
||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||
@@ -505,9 +490,7 @@ class RelationsCache:
|
||||
]
|
||||
|
||||
if None in results:
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in get_relations, a None relation was found in the cache!"
|
||||
)
|
||||
raise NoneRelationFoundError()
|
||||
return results
|
||||
|
||||
def clear(self):
|
||||
|
||||
@@ -10,7 +10,7 @@ from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtoc
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError
|
||||
from dbt.exceptions import InternalException, RuntimeException
|
||||
from dbt.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
|
||||
@@ -34,7 +34,7 @@ class AdapterContainer:
|
||||
names = ", ".join(self.plugins.keys())
|
||||
|
||||
message = f"Invalid adapter type {name}! Must be one of {names}"
|
||||
raise RuntimeException(message)
|
||||
raise DbtRuntimeError(message)
|
||||
|
||||
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
||||
plugin = self.get_plugin_by_name(name)
|
||||
@@ -60,7 +60,7 @@ class AdapterContainer:
|
||||
# the user about it via a runtime error
|
||||
if exc.name == "dbt.adapters." + name:
|
||||
fire_event(AdapterImportError(exc=str(exc)))
|
||||
raise RuntimeException(f"Could not find adapter type {name}!")
|
||||
raise DbtRuntimeError(f"Could not find adapter type {name}!")
|
||||
# otherwise, the error had to have come from some underlying
|
||||
# library. Log the stack trace.
|
||||
|
||||
@@ -70,7 +70,7 @@ class AdapterContainer:
|
||||
plugin_type = plugin.adapter.type()
|
||||
|
||||
if plugin_type != name:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"Expected to find adapter with type named {name}, got "
|
||||
f"adapter with type {plugin_type}"
|
||||
)
|
||||
@@ -132,7 +132,7 @@ class AdapterContainer:
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise InternalException(f"No plugin found for {plugin_name}") from None
|
||||
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
for dep in plugin.dependencies:
|
||||
@@ -151,7 +151,7 @@ class AdapterContainer:
|
||||
try:
|
||||
path = self.packages[package_name]
|
||||
except KeyError:
|
||||
raise InternalException(f"No internal package listing found for {package_name}")
|
||||
raise DbtInternalError(f"No internal package listing found for {package_name}")
|
||||
paths.append(path)
|
||||
return paths
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import (
|
||||
Generic,
|
||||
TypeVar,
|
||||
Tuple,
|
||||
Union,
|
||||
Dict,
|
||||
Any,
|
||||
)
|
||||
@@ -17,7 +16,7 @@ from typing_extensions import Protocol
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||
from dbt.contracts.graph.nodes import ParsedNode, SourceDefinition, ManifestNode
|
||||
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.relation import Policy, HasQuoting
|
||||
@@ -47,11 +46,7 @@ class RelationProtocol(Protocol):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, SourceDefinition],
|
||||
) -> Self:
|
||||
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
|
||||
...
|
||||
|
||||
|
||||
|
||||
@@ -27,9 +27,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
@abc.abstractmethod
|
||||
def cancel(self, connection: Connection):
|
||||
"""Cancel the given connection."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`cancel` is not implemented for this adapter!"
|
||||
)
|
||||
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
|
||||
|
||||
def cancel_open(self) -> List[str]:
|
||||
names = []
|
||||
@@ -95,7 +93,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
@abc.abstractmethod
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`get_response` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -151,7 +149,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
"it already had one open!".format(connection.name)
|
||||
)
|
||||
@@ -164,7 +162,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
"it does not have one open!".format(connection.name)
|
||||
)
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import agate
|
||||
from typing import Any, Optional, Tuple, Type, List
|
||||
|
||||
import dbt.clients.agate_helper
|
||||
from dbt.contracts.connection import Connection
|
||||
import dbt.exceptions
|
||||
from dbt.exceptions import RelationTypeNullError
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.cache import _make_ref_key_msg
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
@@ -132,9 +131,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def drop_relation(self, relation):
|
||||
if relation.type is None:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
"Tried to drop relation {}, but its type is null.".format(relation)
|
||||
)
|
||||
raise RelationTypeNullError(relation)
|
||||
|
||||
self.cache_dropped(relation)
|
||||
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
||||
|
||||
@@ -46,6 +46,7 @@ def cli_runner():
|
||||
@p.version
|
||||
@p.version_check
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from click import ParamType
|
||||
import yaml
|
||||
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
|
||||
|
||||
class YAML(ParamType):
|
||||
"""The Click YAML type. Converts YAML strings into objects."""
|
||||
@@ -17,6 +19,19 @@ class YAML(ParamType):
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
class WarnErrorOptionsType(YAML):
|
||||
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
|
||||
|
||||
name = "WarnErrorOptionsType"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
|
||||
return WarnErrorOptions(
|
||||
include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", [])
|
||||
)
|
||||
|
||||
|
||||
class Truthy(ParamType):
|
||||
"""The Click Truthy type. Converts strings into a "truthy" type"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import click
|
||||
from dbt.cli.option_types import YAML
|
||||
from dbt.cli.option_types import YAML, WarnErrorOptionsType
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
|
||||
|
||||
@@ -270,7 +270,7 @@ show = click.option(
|
||||
)
|
||||
|
||||
skip_profile_setup = click.option(
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interative profile setup.", is_flag=True
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
@@ -358,9 +358,20 @@ version_check = click.option(
|
||||
)
|
||||
|
||||
warn_error = click.option(
|
||||
"--warn-error/--no-warn-error",
|
||||
"--warn-error",
|
||||
envvar="DBT_WARN_ERROR",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --models that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
default=None,
|
||||
flag_value=True,
|
||||
)
|
||||
|
||||
warn_error_options = click.option(
|
||||
"--warn-error-options",
|
||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||
default=None,
|
||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||
type=WarnErrorOptionsType(),
|
||||
)
|
||||
|
||||
write_json = click.option(
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import re
|
||||
from collections import namedtuple
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.exceptions import (
|
||||
BlockDefinitionNotAtTopError,
|
||||
DbtInternalError,
|
||||
MissingCloseTagError,
|
||||
MissingControlFlowStartTagError,
|
||||
NestedTagsError,
|
||||
UnexpectedControlFlowEndTagError,
|
||||
UnexpectedMacroEOFError,
|
||||
)
|
||||
|
||||
|
||||
def regex(pat):
|
||||
@@ -139,10 +147,7 @@ class TagIterator:
|
||||
def _expect_match(self, expected_name, *patterns, **kwargs):
|
||||
match = self._first_match(*patterns, **kwargs)
|
||||
if match is None:
|
||||
msg = 'unexpected EOF, expected {}, got "{}"'.format(
|
||||
expected_name, self.data[self.pos :]
|
||||
)
|
||||
dbt.exceptions.raise_compiler_error(msg)
|
||||
raise UnexpectedMacroEOFError(expected_name, self.data[self.pos :])
|
||||
return match
|
||||
|
||||
def handle_expr(self, match):
|
||||
@@ -256,7 +261,7 @@ class TagIterator:
|
||||
elif block_type_name is not None:
|
||||
yield self.handle_tag(match)
|
||||
else:
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise DbtInternalError(
|
||||
"Invalid regex match in next_block, expected block start, "
|
||||
"expr start, or comment start"
|
||||
)
|
||||
@@ -265,13 +270,6 @@ class TagIterator:
|
||||
return self.find_tags()
|
||||
|
||||
|
||||
duplicate_tags = (
|
||||
"Got nested tags: {outer.block_type_name} (started at {outer.start}) did "
|
||||
"not have a matching {{% end{outer.block_type_name} %}} before a "
|
||||
"subsequent {inner.block_type_name} was found (started at {inner.start})"
|
||||
)
|
||||
|
||||
|
||||
_CONTROL_FLOW_TAGS = {
|
||||
"if": "endif",
|
||||
"for": "endfor",
|
||||
@@ -319,33 +317,16 @@ class BlockIterator:
|
||||
found = self.stack.pop()
|
||||
else:
|
||||
expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name]
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"never saw a preceeding {} (@ {})"
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
raise UnexpectedControlFlowEndTagError(tag, expected, self.tag_parser)
|
||||
expected = _CONTROL_FLOW_TAGS[found]
|
||||
if expected != tag.block_type_name:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"expected {} next (@ {})"
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
raise MissingControlFlowStartTagError(tag, expected, self.tag_parser)
|
||||
|
||||
if tag.block_type_name in allowed_blocks:
|
||||
if self.stack:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got a block definition inside control flow at {}. "
|
||||
"All dbt block definitions must be at the top level"
|
||||
).format(self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
raise BlockDefinitionNotAtTopError(self.tag_parser, tag.start)
|
||||
if self.current is not None:
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
duplicate_tags.format(outer=self.current, inner=tag)
|
||||
)
|
||||
raise NestedTagsError(outer=self.current, inner=tag)
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position : tag.start]
|
||||
self.last_position = tag.start
|
||||
@@ -366,11 +347,7 @@ class BlockIterator:
|
||||
|
||||
if self.current:
|
||||
linecount = self.data[: self.current.end].count("\n") + 1
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
("Reached EOF without finding a close tag for {} (searched from line {})").format(
|
||||
self.current.block_type_name, linecount
|
||||
)
|
||||
)
|
||||
raise MissingCloseTagError(self.current.block_type_name, linecount)
|
||||
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position :]
|
||||
|
||||
@@ -7,7 +7,7 @@ import json
|
||||
import dbt.utils
|
||||
from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
@@ -168,7 +168,7 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
return
|
||||
elif not isinstance(value, type(existing_type)):
|
||||
# actual type mismatch!
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"Tables contain columns with the same names ({key}), "
|
||||
f"but different types ({value} vs {existing_type})"
|
||||
)
|
||||
|
||||
@@ -14,10 +14,10 @@ from dbt.events.types import (
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
CommandResultError,
|
||||
RuntimeException,
|
||||
bad_package_spec,
|
||||
raise_git_cloning_error,
|
||||
raise_git_cloning_problem,
|
||||
GitCheckoutError,
|
||||
GitCloningError,
|
||||
UnknownGitCloningProblemError,
|
||||
DbtRuntimeError,
|
||||
)
|
||||
from packaging import version
|
||||
|
||||
@@ -27,16 +27,6 @@ def _is_commit(revision: str) -> bool:
|
||||
return bool(re.match(r"\b[0-9a-f]{40}\b", revision))
|
||||
|
||||
|
||||
def _raise_git_cloning_error(repo, revision, error):
|
||||
stderr = error.stderr.strip()
|
||||
if "usage: git" in stderr:
|
||||
stderr = stderr.split("\nusage: git")[0]
|
||||
if re.match("fatal: destination path '(.+)' already exists", stderr):
|
||||
raise_git_cloning_error(error)
|
||||
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
|
||||
|
||||
def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None):
|
||||
has_revision = revision is not None
|
||||
is_commit = _is_commit(revision or "")
|
||||
@@ -64,7 +54,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
|
||||
try:
|
||||
result = run_cmd(cwd, clone_cmd, env={"LC_ALL": "C"})
|
||||
except CommandResultError as exc:
|
||||
_raise_git_cloning_error(repo, revision, exc)
|
||||
raise GitCloningError(repo, revision, exc)
|
||||
|
||||
if subdirectory:
|
||||
cwd_subdir = os.path.join(cwd, dirname or "")
|
||||
@@ -72,7 +62,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
|
||||
try:
|
||||
run_cmd(cwd_subdir, clone_cmd_subdir)
|
||||
except CommandResultError as exc:
|
||||
_raise_git_cloning_error(repo, revision, exc)
|
||||
raise GitCloningError(repo, revision, exc)
|
||||
|
||||
if remove_git_dir:
|
||||
rmdir(os.path.join(dirname, ".git"))
|
||||
@@ -115,8 +105,7 @@ def checkout(cwd, repo, revision=None):
|
||||
try:
|
||||
return _checkout(cwd, repo, revision)
|
||||
except CommandResultError as exc:
|
||||
stderr = exc.stderr.strip()
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
raise GitCheckoutError(repo=repo, revision=revision, error=exc)
|
||||
|
||||
|
||||
def get_current_sha(cwd):
|
||||
@@ -145,7 +134,7 @@ def clone_and_checkout(
|
||||
err = exc.stderr
|
||||
exists = re.match("fatal: destination path '(.+)' already exists", err)
|
||||
if not exists:
|
||||
raise_git_cloning_problem(repo)
|
||||
raise UnknownGitCloningProblemError(repo)
|
||||
|
||||
directory = None
|
||||
start_sha = None
|
||||
@@ -155,7 +144,7 @@ def clone_and_checkout(
|
||||
else:
|
||||
matches = re.match("Cloning into '(.+)'", err.decode("utf-8"))
|
||||
if matches is None:
|
||||
raise RuntimeException(f'Error cloning {repo} - never saw "Cloning into ..." from git')
|
||||
raise DbtRuntimeError(f'Error cloning {repo} - never saw "Cloning into ..." from git')
|
||||
directory = matches.group(1)
|
||||
fire_event(GitProgressPullingNewDependency(dir=directory))
|
||||
full_path = os.path.join(cwd, directory)
|
||||
|
||||
@@ -28,13 +28,17 @@ from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
|
||||
from dbt.contracts.graph.nodes import GenericTestNode
|
||||
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
raise_compiler_error,
|
||||
CompilationException,
|
||||
invalid_materialization_argument,
|
||||
CaughtMacroError,
|
||||
CaughtMacroErrorWithNodeError,
|
||||
CompilationError,
|
||||
DbtInternalError,
|
||||
MaterializationArgError,
|
||||
JinjaRenderingError,
|
||||
MacroReturn,
|
||||
JinjaRenderingException,
|
||||
UndefinedMacroException,
|
||||
MaterializtionMacroNotUsedError,
|
||||
NoSupportedLanguagesFoundError,
|
||||
UndefinedCompilationError,
|
||||
UndefinedMacroError,
|
||||
)
|
||||
from dbt import flags
|
||||
from dbt.node_types import ModelLanguage
|
||||
@@ -157,9 +161,9 @@ def quoted_native_concat(nodes):
|
||||
except (ValueError, SyntaxError, MemoryError):
|
||||
result = raw
|
||||
if isinstance(raw, BoolMarker) and not isinstance(result, bool):
|
||||
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'bool'")
|
||||
raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'bool'")
|
||||
if isinstance(raw, NumberMarker) and not _is_number(result):
|
||||
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'number'")
|
||||
raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'number'")
|
||||
|
||||
return result
|
||||
|
||||
@@ -237,12 +241,12 @@ class BaseMacroGenerator:
|
||||
try:
|
||||
yield
|
||||
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
|
||||
raise_compiler_error(str(e))
|
||||
raise CaughtMacroError(e)
|
||||
|
||||
def call_macro(self, *args, **kwargs):
|
||||
# called from __call__ methods
|
||||
if self.context is None:
|
||||
raise InternalException("Context is still None in call_macro!")
|
||||
raise DbtInternalError("Context is still None in call_macro!")
|
||||
assert self.context is not None
|
||||
|
||||
macro = self.get_macro()
|
||||
@@ -269,7 +273,7 @@ class MacroStack(threading.local):
|
||||
def pop(self, name):
|
||||
got = self.call_stack.pop()
|
||||
if got != name:
|
||||
raise InternalException(f"popped {got}, expected {name}")
|
||||
raise DbtInternalError(f"popped {got}, expected {name}")
|
||||
|
||||
|
||||
class MacroGenerator(BaseMacroGenerator):
|
||||
@@ -296,8 +300,8 @@ class MacroGenerator(BaseMacroGenerator):
|
||||
try:
|
||||
yield
|
||||
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
|
||||
raise_compiler_error(str(e), self.macro)
|
||||
except CompilationException as e:
|
||||
raise CaughtMacroErrorWithNodeError(exc=e, node=self.macro)
|
||||
except CompilationError as e:
|
||||
e.stack.append(self.macro)
|
||||
raise e
|
||||
|
||||
@@ -376,7 +380,7 @@ class MaterializationExtension(jinja2.ext.Extension):
|
||||
node.defaults.append(languages)
|
||||
|
||||
else:
|
||||
invalid_materialization_argument(materialization_name, target.name)
|
||||
raise MaterializationArgError(materialization_name, target.name)
|
||||
|
||||
if SUPPORTED_LANG_ARG not in node.args:
|
||||
node.args.append(SUPPORTED_LANG_ARG)
|
||||
@@ -451,7 +455,7 @@ def create_undefined(node=None):
|
||||
return self
|
||||
|
||||
def __reduce__(self):
|
||||
raise_compiler_error(f"{self.name} is undefined", node=node)
|
||||
raise UndefinedCompilationError(name=self.name, node=node)
|
||||
|
||||
return Undefined
|
||||
|
||||
@@ -509,10 +513,10 @@ def catch_jinja(node=None) -> Iterator[None]:
|
||||
yield
|
||||
except jinja2.exceptions.TemplateSyntaxError as e:
|
||||
e.translated = False
|
||||
raise CompilationException(str(e), node) from e
|
||||
raise CompilationError(str(e), node) from e
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
raise UndefinedMacroException(str(e), node) from e
|
||||
except CompilationException as exc:
|
||||
raise UndefinedMacroError(str(e), node) from e
|
||||
except CompilationError as exc:
|
||||
exc.add_node(node)
|
||||
raise
|
||||
|
||||
@@ -651,13 +655,13 @@ def add_rendered_test_kwargs(
|
||||
|
||||
def get_supported_languages(node: jinja2.nodes.Macro) -> List[ModelLanguage]:
|
||||
if "materialization" not in node.name:
|
||||
raise_compiler_error("Only materialization macros can be used with this function")
|
||||
raise MaterializtionMacroNotUsedError(node=node)
|
||||
|
||||
no_kwargs = not node.defaults
|
||||
no_langs_found = SUPPORTED_LANG_ARG not in node.args
|
||||
|
||||
if no_kwargs or no_langs_found:
|
||||
raise_compiler_error(f"No supported_languages found in materialization macro {node.name}")
|
||||
raise NoSupportedLanguagesFoundError(node=node)
|
||||
|
||||
lang_idx = node.args.index(SUPPORTED_LANG_ARG)
|
||||
# indexing defaults from the end
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import jinja2
|
||||
from dbt.clients.jinja import get_environment
|
||||
from dbt.exceptions import raise_compiler_error
|
||||
from dbt.exceptions import MacroNamespaceNotStringError, MacroNameNotStringError
|
||||
|
||||
|
||||
def statically_extract_macro_calls(string, ctx, db_wrapper=None):
|
||||
@@ -117,20 +117,14 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
func_name = kwarg.value.value
|
||||
possible_macro_calls.append(func_name)
|
||||
else:
|
||||
raise_compiler_error(
|
||||
f"The macro_name parameter ({kwarg.value.value}) "
|
||||
"to adapter.dispatch was not a string"
|
||||
)
|
||||
raise MacroNameNotStringError(kwarg_value=kwarg.value.value)
|
||||
elif kwarg.key == "macro_namespace":
|
||||
# This will remain to enable static resolution
|
||||
kwarg_type = type(kwarg.value).__name__
|
||||
if kwarg_type == "Const":
|
||||
macro_namespace = kwarg.value.value
|
||||
else:
|
||||
raise_compiler_error(
|
||||
"The macro_namespace parameter to adapter.dispatch "
|
||||
f"is a {kwarg_type}, not a string"
|
||||
)
|
||||
raise MacroNamespaceNotStringError(kwarg_type)
|
||||
|
||||
# positional arguments
|
||||
if packages_arg:
|
||||
|
||||
@@ -19,8 +19,8 @@ from dbt.events.types import (
|
||||
SystemErrorRetrievingModTime,
|
||||
SystemCouldNotWrite,
|
||||
SystemExecutingCmd,
|
||||
SystemStdOutMsg,
|
||||
SystemStdErrMsg,
|
||||
SystemStdOut,
|
||||
SystemStdErr,
|
||||
SystemReportReturnCode,
|
||||
)
|
||||
import dbt.exceptions
|
||||
@@ -144,7 +144,8 @@ def make_symlink(source: str, link_path: str) -> None:
|
||||
Create a symlink at `link_path` referring to `source`.
|
||||
"""
|
||||
if not supports_symlinks():
|
||||
dbt.exceptions.system_error("create a symbolic link")
|
||||
# TODO: why not import these at top?
|
||||
raise dbt.exceptions.SymbolicLinkError()
|
||||
|
||||
os.symlink(source, link_path)
|
||||
|
||||
@@ -411,7 +412,7 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
_handle_posix_error(exc, cwd, cmd)
|
||||
|
||||
# this should not be reachable, raise _something_ at least!
|
||||
raise dbt.exceptions.InternalException(
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
"Unhandled exception in _interpret_oserror: {}".format(exc)
|
||||
)
|
||||
|
||||
@@ -440,8 +441,8 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T
|
||||
except OSError as exc:
|
||||
_interpret_oserror(exc, cwd, cmd)
|
||||
|
||||
fire_event(SystemStdOutMsg(bmsg=out))
|
||||
fire_event(SystemStdErrMsg(bmsg=err))
|
||||
fire_event(SystemStdOut(bmsg=out))
|
||||
fire_event(SystemStdErr(bmsg=err))
|
||||
|
||||
if proc.returncode != 0:
|
||||
fire_event(SystemReportReturnCode(returncode=proc.returncode))
|
||||
|
||||
@@ -60,4 +60,4 @@ def load_yaml_text(contents, path=None):
|
||||
else:
|
||||
error = str(e)
|
||||
|
||||
raise dbt.exceptions.ValidationException(error)
|
||||
raise dbt.exceptions.DbtValidationError(error)
|
||||
|
||||
@@ -13,16 +13,17 @@ from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model_context
|
||||
from dbt.contracts.graph.manifest import Manifest, UniqueID
|
||||
from dbt.contracts.graph.nodes import (
|
||||
ParsedNode,
|
||||
ManifestNode,
|
||||
ManifestSQLNode,
|
||||
GenericTestNode,
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
SeedNode,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
dependency_not_found,
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
GraphDependencyNotFoundError,
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
)
|
||||
from dbt.graph import Graph
|
||||
from dbt.events.functions import fire_event
|
||||
@@ -47,6 +48,7 @@ def print_compile_stats(stats):
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Entity: "entity",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -82,6 +84,8 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[exposure.resource_type] += 1
|
||||
for metric in manifest.metrics.values():
|
||||
stats[metric.resource_type] += 1
|
||||
for entity in manifest.entities.values():
|
||||
stats[entity.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
@@ -167,7 +171,7 @@ class Compiler:
|
||||
# a dict for jinja rendering of SQL
|
||||
def _create_node_context(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
@@ -186,14 +190,6 @@ class Compiler:
|
||||
relation_cls = adapter.Relation
|
||||
return relation_cls.add_ephemeral_prefix(name)
|
||||
|
||||
def _get_relation_name(self, node: ParsedNode):
|
||||
relation_name = None
|
||||
if node.is_relational and not node.is_ephemeral_model:
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
return relation_name
|
||||
|
||||
def _inject_ctes_into_sql(self, sql: str, ctes: List[InjectedCTE]) -> str:
|
||||
"""
|
||||
`ctes` is a list of InjectedCTEs like:
|
||||
@@ -252,10 +248,10 @@ class Compiler:
|
||||
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: ManifestNode,
|
||||
model: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[ManifestNode, List[InjectedCTE]]:
|
||||
) -> Tuple[ManifestSQLNode, List[InjectedCTE]]:
|
||||
"""This method is called by the 'compile_node' method. Starting
|
||||
from the node that it is passed in, it will recursively call
|
||||
itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
@@ -264,13 +260,14 @@ class Compiler:
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_code is None:
|
||||
raise RuntimeException("Cannot inject ctes into an unparsed node", model)
|
||||
raise DbtRuntimeError("Cannot inject ctes into an unparsed node", model)
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
model.extra_ctes_injected = True
|
||||
if not isinstance(model, SeedNode):
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
@@ -284,14 +281,15 @@ class Compiler:
|
||||
# ephemeral model.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
raise DbtInternalError(
|
||||
f"During compilation, found a cte reference that "
|
||||
f"could not be resolved: {cte.id}"
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
assert not isinstance(cte_model, SeedNode)
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f"{cte.id} is not ephemeral")
|
||||
raise DbtInternalError(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
@@ -332,16 +330,16 @@ class Compiler:
|
||||
|
||||
return model, prepended_ctes
|
||||
|
||||
# Sets compiled fields in the ManifestNode passed in,
|
||||
# Sets compiled fields in the ManifestSQLNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> ManifestNode:
|
||||
) -> ManifestSQLNode:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
@@ -383,8 +381,6 @@ class Compiler:
|
||||
node,
|
||||
)
|
||||
|
||||
node.relation_name = self._get_relation_name(node)
|
||||
|
||||
node.compiled = True
|
||||
|
||||
return node
|
||||
@@ -405,8 +401,10 @@ class Compiler:
|
||||
linker.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
elif dependency in manifest.metrics:
|
||||
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
elif dependency in manifest.entities:
|
||||
linker.dependency(node.unique_id, (manifest.entities[dependency].unique_id))
|
||||
else:
|
||||
dependency_not_found(node, dependency)
|
||||
raise GraphDependencyNotFoundError(node, dependency)
|
||||
|
||||
def link_graph(self, linker: Linker, manifest: Manifest, add_test_edges: bool = False):
|
||||
for source in manifest.sources.values():
|
||||
@@ -417,6 +415,8 @@ class Compiler:
|
||||
self.link_node(linker, exposure, manifest)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(linker, metric, manifest)
|
||||
for entity in manifest.entities.values():
|
||||
self.link_node(linker, entity, manifest)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
@@ -494,8 +494,11 @@ class Compiler:
|
||||
return Graph(linker.graph)
|
||||
|
||||
# writes the "compiled_code" into the target/compiled directory
|
||||
def _write_node(self, node: ManifestNode) -> ManifestNode:
|
||||
if not node.extra_ctes_injected or node.resource_type == NodeType.Snapshot:
|
||||
def _write_node(self, node: ManifestSQLNode) -> ManifestSQLNode:
|
||||
if not node.extra_ctes_injected or node.resource_type in (
|
||||
NodeType.Snapshot,
|
||||
NodeType.Seed,
|
||||
):
|
||||
return node
|
||||
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
|
||||
|
||||
@@ -507,11 +510,11 @@ class Compiler:
|
||||
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
write: bool = True,
|
||||
) -> ManifestNode:
|
||||
) -> ManifestSQLNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
|
||||
@@ -9,12 +9,14 @@ from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
from dbt.contracts.project import ProfileConfig, UserConfig
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import DbtProfileError
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import ValidationException
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.exceptions import (
|
||||
CompilationError,
|
||||
DbtProfileError,
|
||||
DbtProjectError,
|
||||
DbtValidationError,
|
||||
DbtRuntimeError,
|
||||
ProfileConfigError,
|
||||
)
|
||||
from dbt.events.types import MissingProfileTarget
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.utils import coerce_dict_str
|
||||
@@ -58,9 +60,9 @@ def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
msg = f"The profiles.yml file at {path} is empty"
|
||||
raise DbtProfileError(INVALID_PROFILE_MESSAGE.format(error_string=msg))
|
||||
return yaml_content
|
||||
except ValidationException as e:
|
||||
except DbtValidationError as e:
|
||||
msg = INVALID_PROFILE_MESSAGE.format(error_string=e)
|
||||
raise ValidationException(msg) from e
|
||||
raise DbtValidationError(msg) from e
|
||||
|
||||
return {}
|
||||
|
||||
@@ -73,7 +75,7 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
if user_config is not None:
|
||||
UserConfig.validate(user_config)
|
||||
return UserConfig.from_dict(user_config)
|
||||
except (RuntimeException, ValidationError):
|
||||
except (DbtRuntimeError, ValidationError):
|
||||
pass
|
||||
return UserConfig()
|
||||
|
||||
@@ -156,7 +158,7 @@ class Profile(HasCredentials):
|
||||
dct = self.to_profile_info(serialize_credentials=True)
|
||||
ProfileConfig.validate(dct)
|
||||
except ValidationError as exc:
|
||||
raise DbtProfileError(validator_error_message(exc)) from exc
|
||||
raise ProfileConfigError(exc) from exc
|
||||
|
||||
@staticmethod
|
||||
def _credentials_from_profile(
|
||||
@@ -180,8 +182,8 @@ class Profile(HasCredentials):
|
||||
data = cls.translate_aliases(profile)
|
||||
cls.validate(data)
|
||||
credentials = cls.from_dict(data)
|
||||
except (RuntimeException, ValidationError) as e:
|
||||
msg = str(e) if isinstance(e, RuntimeException) else e.message
|
||||
except (DbtRuntimeError, ValidationError) as e:
|
||||
msg = str(e) if isinstance(e, DbtRuntimeError) else e.message
|
||||
raise DbtProfileError(
|
||||
'Credentials in profile "{}", target "{}" invalid: {}'.format(
|
||||
profile_name, target_name, msg
|
||||
@@ -297,7 +299,7 @@ class Profile(HasCredentials):
|
||||
|
||||
try:
|
||||
profile_data = renderer.render_data(raw_profile_data)
|
||||
except CompilationException as exc:
|
||||
except CompilationError as exc:
|
||||
raise DbtProfileError(str(exc)) from exc
|
||||
return target_name, profile_data
|
||||
|
||||
|
||||
@@ -16,19 +16,19 @@ import hashlib
|
||||
import os
|
||||
|
||||
from dbt import flags, deprecations
|
||||
from dbt.clients.system import resolve_path_from_base
|
||||
from dbt.clients.system import path_exists
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import SemverException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import (
|
||||
DbtProjectError,
|
||||
SemverError,
|
||||
ProjectContractBrokenError,
|
||||
ProjectContractError,
|
||||
DbtRuntimeError,
|
||||
)
|
||||
from dbt.graph import SelectionSpec
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.semver import VersionSpecifier
|
||||
from dbt.semver import versions_compatible
|
||||
from dbt.semver import VersionSpecifier, versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.node_types import NodeType
|
||||
@@ -219,7 +219,7 @@ def _get_required_version(
|
||||
|
||||
try:
|
||||
dbt_version = _parse_versions(dbt_raw_version)
|
||||
except SemverException as e:
|
||||
except SemverError as e:
|
||||
raise DbtProjectError(str(e)) from e
|
||||
|
||||
if verify_version:
|
||||
@@ -325,7 +325,7 @@ class PartialProject(RenderComponents):
|
||||
ProjectContract.validate(rendered.project_dict)
|
||||
cfg = ProjectContract.from_dict(rendered.project_dict)
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
raise ProjectContractError(e) from e
|
||||
# name/version are required in the Project definition, so we can assume
|
||||
# they are present
|
||||
name = cfg.name
|
||||
@@ -381,6 +381,7 @@ class PartialProject(RenderComponents):
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
@@ -391,6 +392,7 @@ class PartialProject(RenderComponents):
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
metrics = cfg.metrics
|
||||
entities = cfg.entities
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -446,6 +448,7 @@ class PartialProject(RenderComponents):
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
metrics=metrics,
|
||||
entities=entities,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
@@ -550,6 +553,7 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -624,6 +628,7 @@ class Project:
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -642,7 +647,7 @@ class Project:
|
||||
try:
|
||||
ProjectContract.validate(self.to_project_config())
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
raise ProjectContractBrokenError(e) from e
|
||||
|
||||
@classmethod
|
||||
def partial_load(cls, project_root: str, *, verify_version: bool = False) -> PartialProject:
|
||||
@@ -667,7 +672,7 @@ class Project:
|
||||
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"Could not find selector named {name}, expected one of {list(self.selectors)}"
|
||||
)
|
||||
return self.selectors[name]["definition"]
|
||||
|
||||
@@ -8,7 +8,7 @@ from dbt.context.target import TargetContext
|
||||
from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER
|
||||
from dbt.context.base import BaseContext
|
||||
from dbt.contracts.connection import HasCredentials
|
||||
from dbt.exceptions import DbtProjectError, CompilationException, RecursionException
|
||||
from dbt.exceptions import DbtProjectError, CompilationError, RecursionError
|
||||
from dbt.utils import deep_map_render
|
||||
|
||||
|
||||
@@ -40,14 +40,14 @@ class BaseRenderer:
|
||||
try:
|
||||
with catch_jinja():
|
||||
return get_rendered(value, self.context, native=True)
|
||||
except CompilationException as exc:
|
||||
except CompilationError as exc:
|
||||
msg = f"Could not render {value}: {exc.msg}"
|
||||
raise CompilationException(msg) from exc
|
||||
raise CompilationError(msg) from exc
|
||||
|
||||
def render_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
return deep_map_render(self.render_entry, data)
|
||||
except RecursionException:
|
||||
except RecursionError:
|
||||
raise DbtProjectError(
|
||||
f"Cycle detected: {self.name} input has a reference to itself", project=data
|
||||
)
|
||||
@@ -159,7 +159,8 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
if first in {"seeds", "models", "snapshots", "tests"}:
|
||||
keypath_parts = {(k.lstrip("+ ") if isinstance(k, str) else k) for k in keypath}
|
||||
# model-level hooks
|
||||
if "pre-hook" in keypath_parts or "post-hook" in keypath_parts:
|
||||
late_rendered_hooks = {"pre-hook", "post-hook", "pre_hook", "post_hook"}
|
||||
if keypath_parts.intersection(late_rendered_hooks):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -25,10 +25,11 @@ from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from dbt.exceptions import (
|
||||
ConfigContractBrokenError,
|
||||
DbtProjectError,
|
||||
RuntimeException,
|
||||
raise_compiler_error,
|
||||
validator_error_message,
|
||||
NonUniquePackageNameError,
|
||||
DbtRuntimeError,
|
||||
UninstalledPackagesFoundError,
|
||||
)
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
@@ -116,6 +117,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -186,7 +188,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
try:
|
||||
Configuration.validate(self.serialize())
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
raise ConfigContractBrokenError(e) from e
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
@@ -257,7 +259,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
:raises DbtValidationError: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
@@ -311,6 +313,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"entities": self._get_config_paths(self.entities),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
@@ -352,22 +355,15 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
count_packages_specified = len(self.packages.packages) # type: ignore
|
||||
count_packages_installed = len(tuple(self._get_project_directories()))
|
||||
if count_packages_specified > count_packages_installed:
|
||||
raise_compiler_error(
|
||||
f"dbt found {count_packages_specified} package(s) "
|
||||
f"specified in packages.yml, but only "
|
||||
f"{count_packages_installed} package(s) installed "
|
||||
f'in {self.packages_install_path}. Run "dbt deps" to '
|
||||
f"install package dependencies."
|
||||
raise UninstalledPackagesFoundError(
|
||||
count_packages_specified,
|
||||
count_packages_installed,
|
||||
self.packages_install_path,
|
||||
)
|
||||
project_paths = itertools.chain(internal_packages, self._get_project_directories())
|
||||
for project_name, project in self.load_projects(project_paths):
|
||||
if project_name in all_projects:
|
||||
raise_compiler_error(
|
||||
f"dbt found more than one package with the name "
|
||||
f'"{project_name}" included in this project. Package '
|
||||
f"names must be unique in a project. Please rename "
|
||||
f"one of these packages."
|
||||
)
|
||||
raise NonUniquePackageNameError(project_name)
|
||||
all_projects[project_name] = project
|
||||
self.dependencies = all_projects
|
||||
return self.dependencies
|
||||
@@ -432,7 +428,7 @@ class UnsetProfile(Profile):
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name in {"profile_name", "target_name", "threads"}:
|
||||
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
return Profile.__getattribute__(self, name)
|
||||
|
||||
@@ -459,7 +455,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
def __getattribute__(self, name):
|
||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
||||
if name in {"profile_name", "target_name"}:
|
||||
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
# avoid every attribute access triggering infinite recursion
|
||||
return RuntimeConfig.__getattribute__(self, name)
|
||||
@@ -506,6 +502,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -568,6 +565,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -608,7 +606,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
:raises DbtValidationError: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from dbt.clients.system import (
|
||||
resolve_path_from_base,
|
||||
)
|
||||
from dbt.contracts.selection import SelectorFile
|
||||
from dbt.exceptions import DbtSelectorsError, RuntimeException
|
||||
from dbt.exceptions import DbtSelectorsError, DbtRuntimeError
|
||||
from dbt.graph import parse_from_selectors_definition, SelectionSpec
|
||||
from dbt.graph.selector_spec import SelectionCriteria
|
||||
|
||||
@@ -46,7 +46,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
f"yaml-selectors",
|
||||
result_type="invalid_selector",
|
||||
) from exc
|
||||
except RuntimeException as exc:
|
||||
except DbtRuntimeError as exc:
|
||||
raise DbtSelectorsError(
|
||||
f"Could not read selector file data: {exc}",
|
||||
result_type="invalid_selector",
|
||||
@@ -62,7 +62,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
rendered = renderer.render_data(data)
|
||||
except (ValidationError, RuntimeException) as exc:
|
||||
except (ValidationError, DbtRuntimeError) as exc:
|
||||
raise DbtSelectorsError(
|
||||
f"Could not render selector data: {exc}",
|
||||
result_type="invalid_selector",
|
||||
@@ -77,7 +77,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
data = load_yaml_text(load_file_contents(str(path)))
|
||||
except (ValidationError, RuntimeException) as exc:
|
||||
except (ValidationError, DbtRuntimeError) as exc:
|
||||
raise DbtSelectorsError(
|
||||
f"Could not read selector file: {exc}",
|
||||
result_type="invalid_selector",
|
||||
|
||||
@@ -8,24 +8,24 @@ from dbt.clients import yaml_helper
|
||||
from dbt.config import Profile, Project, read_user_config
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import InvalidVarsYAML
|
||||
from dbt.exceptions import ValidationException, raise_compiler_error
|
||||
from dbt.events.types import InvalidOptionYAML
|
||||
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
|
||||
|
||||
|
||||
def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
return parse_cli_yaml_string(var_string, "vars")
|
||||
|
||||
|
||||
def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, Any]:
|
||||
try:
|
||||
cli_vars = yaml_helper.load_yaml_text(var_string)
|
||||
var_type = type(cli_vars)
|
||||
if var_type is dict:
|
||||
return cli_vars
|
||||
else:
|
||||
type_name = var_type.__name__
|
||||
raise_compiler_error(
|
||||
"The --vars argument must be a YAML dictionary, but was "
|
||||
"of type '{}'".format(type_name)
|
||||
)
|
||||
except ValidationException:
|
||||
fire_event(InvalidVarsYAML())
|
||||
raise OptionNotYamlDictError(var_type, cli_option_name)
|
||||
except DbtValidationError:
|
||||
fire_event(InvalidOptionYAML(option_name=cli_option_name))
|
||||
raise
|
||||
|
||||
|
||||
|
||||
@@ -10,11 +10,12 @@ from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.contracts.graph.nodes import Resource
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
SecretEnvVarLocationError,
|
||||
EnvVarMissingError,
|
||||
MacroReturn,
|
||||
raise_compiler_error,
|
||||
raise_parsing_error,
|
||||
disallow_secret_env_var,
|
||||
RequiredVarNotFoundError,
|
||||
SetStrictWrongTypeError,
|
||||
ZipStrictWrongTypeError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, get_invocation_id
|
||||
from dbt.events.types import JinjaLogInfo, JinjaLogDebug
|
||||
@@ -128,7 +129,6 @@ class ContextMeta(type):
|
||||
|
||||
|
||||
class Var:
|
||||
UndefinedVarError = "Required var '{}' not found in config:\nVars supplied to {} = {}"
|
||||
_VAR_NOTSET = object()
|
||||
|
||||
def __init__(
|
||||
@@ -153,10 +153,7 @@ class Var:
|
||||
return "<Configuration>"
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
dct = {k: self._merged[k] for k in self._merged}
|
||||
pretty_vars = json.dumps(dct, sort_keys=True, indent=4)
|
||||
msg = self.UndefinedVarError.format(var_name, self.node_name, pretty_vars)
|
||||
raise_compiler_error(msg, self._node)
|
||||
raise RequiredVarNotFoundError(var_name, self._merged, self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
return var_name in self._merged
|
||||
@@ -300,7 +297,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
disallow_secret_env_var(var)
|
||||
raise SecretEnvVarLocationError(var)
|
||||
if var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
@@ -315,8 +312,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
return return_value
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
if os.environ.get("DBT_MACRO_DEBUGGING"):
|
||||
|
||||
@@ -497,7 +493,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
try:
|
||||
return set(value)
|
||||
except TypeError as e:
|
||||
raise CompilationException(e)
|
||||
raise SetStrictWrongTypeError(e)
|
||||
|
||||
@contextmember("zip")
|
||||
@staticmethod
|
||||
@@ -541,7 +537,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
try:
|
||||
return zip(*args)
|
||||
except TypeError as e:
|
||||
raise CompilationException(e)
|
||||
raise ZipStrictWrongTypeError(e)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
@@ -638,9 +634,8 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endif %}
|
||||
|
||||
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
||||
TODO: Replace with object that provides read-only access to flag values
|
||||
"""
|
||||
return flags
|
||||
return flags.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
|
||||
@@ -8,7 +8,7 @@ from dbt.utils import MultiDict
|
||||
|
||||
from dbt.context.base import contextproperty, contextmember, Var
|
||||
from dbt.context.target import TargetContext
|
||||
from dbt.exceptions import raise_parsing_error, disallow_secret_env_var
|
||||
from dbt.exceptions import EnvVarMissingError, SecretEnvVarLocationError
|
||||
|
||||
|
||||
class ConfiguredContext(TargetContext):
|
||||
@@ -86,7 +86,7 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
disallow_secret_env_var(var)
|
||||
raise SecretEnvVarLocationError(var)
|
||||
if var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
@@ -104,8 +104,7 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
|
||||
return return_value
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
|
||||
class MacroResolvingContext(ConfiguredContext):
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import List, Iterator, Dict, Any, TypeVar, Generic
|
||||
|
||||
from dbt.config import RuntimeConfig, Project, IsFQNResource
|
||||
from dbt.contracts.graph.model_config import BaseConfig, get_config_for, _listify
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import fqn_search
|
||||
|
||||
@@ -45,6 +45,8 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = unrendered.get("entities")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
else:
|
||||
@@ -70,6 +72,8 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = self.project.entities
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
else:
|
||||
@@ -89,7 +93,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
return self._active_project
|
||||
dependencies = self._active_project.load_dependencies()
|
||||
if project_name not in dependencies:
|
||||
raise InternalException(
|
||||
raise DbtInternalError(
|
||||
f"Project name {project_name} not found in dependencies "
|
||||
f"(found {list(dependencies)})"
|
||||
)
|
||||
@@ -287,14 +291,14 @@ class ContextConfig:
|
||||
|
||||
elif k in BaseConfig.mergebehavior["update"]:
|
||||
if not isinstance(v, dict):
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
raise DbtInternalError(f"expected dict, got {v}")
|
||||
if k in config_call_dict and isinstance(config_call_dict[k], dict):
|
||||
config_call_dict[k].update(v)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
elif k in BaseConfig.mergebehavior["dict_key_append"]:
|
||||
if not isinstance(v, dict):
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
raise DbtInternalError(f"expected dict, got {v}")
|
||||
if k in config_call_dict: # should always be a dict
|
||||
for key, value in v.items():
|
||||
extend = False
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
from dbt.exceptions import (
|
||||
doc_invalid_args,
|
||||
doc_target_not_found,
|
||||
DocTargetNotFoundError,
|
||||
DocArgsError,
|
||||
)
|
||||
from dbt.config.runtime import RuntimeConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
@@ -52,7 +52,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
elif len(args) == 2:
|
||||
doc_package_name, doc_name = args
|
||||
else:
|
||||
doc_invalid_args(self.node, args)
|
||||
raise DocArgsError(self.node, args)
|
||||
|
||||
# Documentation
|
||||
target_doc = self.manifest.resolve_doc(
|
||||
@@ -68,7 +68,9 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
# TODO CT-211
|
||||
source_file.add_node(self.node.unique_id) # type: ignore[union-attr]
|
||||
else:
|
||||
doc_target_not_found(self.node, doc_name, doc_package_name)
|
||||
raise DocTargetNotFoundError(
|
||||
node=self.node, target_doc_name=doc_name, target_doc_package=doc_package_name
|
||||
)
|
||||
|
||||
return target_doc.block_contents
|
||||
|
||||
|
||||
144
core/dbt/context/exceptions_jinja.py
Normal file
144
core/dbt/context/exceptions_jinja.py
Normal file
@@ -0,0 +1,144 @@
|
||||
import functools
|
||||
from typing import NoReturn
|
||||
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import JinjaLogWarning
|
||||
|
||||
from dbt.exceptions import (
|
||||
DbtRuntimeError,
|
||||
MissingConfigError,
|
||||
MissingMaterializationError,
|
||||
MissingRelationError,
|
||||
AmbiguousAliasError,
|
||||
AmbiguousCatalogMatchError,
|
||||
CacheInconsistencyError,
|
||||
DataclassNotDictError,
|
||||
CompilationError,
|
||||
DbtDatabaseError,
|
||||
DependencyNotFoundError,
|
||||
DependencyError,
|
||||
DuplicatePatchPathError,
|
||||
DuplicateResourceNameError,
|
||||
PropertyYMLError,
|
||||
NotImplementedError,
|
||||
RelationWrongTypeError,
|
||||
)
|
||||
|
||||
|
||||
def warn(msg, node=None):
|
||||
warn_or_error(JinjaLogWarning(msg=msg), node=node)
|
||||
return ""
|
||||
|
||||
|
||||
def missing_config(model, name) -> NoReturn:
|
||||
raise MissingConfigError(unique_id=model.unique_id, name=name)
|
||||
|
||||
|
||||
def missing_materialization(model, adapter_type) -> NoReturn:
|
||||
raise MissingMaterializationError(
|
||||
materialization=model.config.materialized, adapter_type=adapter_type
|
||||
)
|
||||
|
||||
|
||||
def missing_relation(relation, model=None) -> NoReturn:
|
||||
raise MissingRelationError(relation, model)
|
||||
|
||||
|
||||
def raise_ambiguous_alias(node_1, node_2, duped_name=None) -> NoReturn:
|
||||
raise AmbiguousAliasError(node_1, node_2, duped_name)
|
||||
|
||||
|
||||
def raise_ambiguous_catalog_match(unique_id, match_1, match_2) -> NoReturn:
|
||||
raise AmbiguousCatalogMatchError(unique_id, match_1, match_2)
|
||||
|
||||
|
||||
def raise_cache_inconsistent(message) -> NoReturn:
|
||||
raise CacheInconsistencyError(message)
|
||||
|
||||
|
||||
def raise_dataclass_not_dict(obj) -> NoReturn:
|
||||
raise DataclassNotDictError(obj)
|
||||
|
||||
|
||||
def raise_compiler_error(msg, node=None) -> NoReturn:
|
||||
raise CompilationError(msg, node)
|
||||
|
||||
|
||||
def raise_database_error(msg, node=None) -> NoReturn:
|
||||
raise DbtDatabaseError(msg, node)
|
||||
|
||||
|
||||
def raise_dep_not_found(node, node_description, required_pkg) -> NoReturn:
|
||||
raise DependencyNotFoundError(node, node_description, required_pkg)
|
||||
|
||||
|
||||
def raise_dependency_error(msg) -> NoReturn:
|
||||
raise DependencyError(scrub_secrets(msg, env_secrets()))
|
||||
|
||||
|
||||
def raise_duplicate_patch_name(patch_1, existing_patch_path) -> NoReturn:
|
||||
raise DuplicatePatchPathError(patch_1, existing_patch_path)
|
||||
|
||||
|
||||
def raise_duplicate_resource_name(node_1, node_2) -> NoReturn:
|
||||
raise DuplicateResourceNameError(node_1, node_2)
|
||||
|
||||
|
||||
def raise_invalid_property_yml_version(path, issue) -> NoReturn:
|
||||
raise PropertyYMLError(path, issue)
|
||||
|
||||
|
||||
def raise_not_implemented(msg) -> NoReturn:
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
def relation_wrong_type(relation, expected_type, model=None) -> NoReturn:
|
||||
raise RelationWrongTypeError(relation, expected_type, model)
|
||||
|
||||
|
||||
# Update this when a new function should be added to the
|
||||
# dbt context's `exceptions` key!
|
||||
CONTEXT_EXPORTS = {
|
||||
fn.__name__: fn
|
||||
for fn in [
|
||||
warn,
|
||||
missing_config,
|
||||
missing_materialization,
|
||||
missing_relation,
|
||||
raise_ambiguous_alias,
|
||||
raise_ambiguous_catalog_match,
|
||||
raise_cache_inconsistent,
|
||||
raise_dataclass_not_dict,
|
||||
raise_compiler_error,
|
||||
raise_database_error,
|
||||
raise_dep_not_found,
|
||||
raise_dependency_error,
|
||||
raise_duplicate_patch_name,
|
||||
raise_duplicate_resource_name,
|
||||
raise_invalid_property_yml_version,
|
||||
raise_not_implemented,
|
||||
relation_wrong_type,
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# wraps context based exceptions in node info
|
||||
def wrapper(model):
|
||||
def wrap(func):
|
||||
@functools.wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except DbtRuntimeError as exc:
|
||||
exc.add_node(model)
|
||||
raise exc
|
||||
|
||||
return inner
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def wrapped_exports(model):
|
||||
wrap = wrapper(model)
|
||||
return {name: wrap(export) for name, export in CONTEXT_EXPORTS.items()}
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Dict, MutableMapping, Optional
|
||||
from dbt.contracts.graph.nodes import Macro
|
||||
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
|
||||
from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
|
||||
@@ -86,7 +86,7 @@ class MacroResolver:
|
||||
package_namespaces[macro.package_name] = namespace
|
||||
|
||||
if macro.name in namespace:
|
||||
raise_duplicate_macro_name(macro, macro, macro.package_name)
|
||||
raise DuplicateMacroNameError(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: Macro):
|
||||
@@ -187,7 +187,7 @@ class TestMacroNamespace:
|
||||
elif package_name in self.macro_resolver.packages:
|
||||
macro = self.macro_resolver.packages[package_name].get(name)
|
||||
else:
|
||||
raise_compiler_error(f"Could not find package '{package_name}'")
|
||||
raise PackageNotFoundForMacroError(package_name)
|
||||
if not macro:
|
||||
return None
|
||||
macro_func = MacroGenerator(macro, self.ctx, self.node, self.thread_ctx)
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping
|
||||
from dbt.clients.jinja import MacroGenerator, MacroStack
|
||||
from dbt.contracts.graph.nodes import Macro
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
|
||||
from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError
|
||||
|
||||
|
||||
FlatNamespace = Dict[str, MacroGenerator]
|
||||
@@ -75,7 +75,7 @@ class MacroNamespace(Mapping):
|
||||
elif package_name in self.packages:
|
||||
return self.packages[package_name].get(name)
|
||||
else:
|
||||
raise_compiler_error(f"Could not find package '{package_name}'")
|
||||
raise PackageNotFoundForMacroError(package_name)
|
||||
|
||||
|
||||
# This class builds the MacroNamespace by adding macros to
|
||||
@@ -122,7 +122,7 @@ class MacroNamespaceBuilder:
|
||||
hierarchy[macro.package_name] = namespace
|
||||
|
||||
if macro.name in namespace:
|
||||
raise_duplicate_macro_name(macro_func.macro, macro, macro.package_name)
|
||||
raise DuplicateMacroNameError(macro_func.macro, macro, macro.package_name)
|
||||
hierarchy[macro.package_name][macro.name] = macro_func
|
||||
|
||||
def add_macro(self, macro: Macro, ctx: Dict[str, Any]):
|
||||
|
||||
@@ -19,19 +19,21 @@ from dbt.adapters.factory import get_adapter, get_adapter_package_names, get_ada
|
||||
from dbt.clients import agate_helper
|
||||
from dbt.clients.jinja import get_rendered, MacroGenerator, MacroStack
|
||||
from dbt.config import RuntimeConfig, Project
|
||||
from .base import contextmember, contextproperty, Var
|
||||
from .configured import FQNLookup
|
||||
from .context_config import ContextConfig
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.context.base import contextmember, contextproperty, Var
|
||||
from dbt.context.configured import FQNLookup
|
||||
from dbt.context.context_config import ContextConfig
|
||||
from dbt.context.exceptions_jinja import wrapped_exports
|
||||
from dbt.context.macro_resolver import MacroResolver, TestMacroNamespace
|
||||
from .macros import MacroNamespaceBuilder, MacroNamespace
|
||||
from .manifest import ManifestContext
|
||||
from dbt.context.macros import MacroNamespaceBuilder, MacroNamespace
|
||||
from dbt.context.manifest import ManifestContext
|
||||
from dbt.contracts.connection import AdapterResponse
|
||||
from dbt.contracts.graph.manifest import Manifest, Disabled
|
||||
from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
SeedNode,
|
||||
SourceDefinition,
|
||||
Resource,
|
||||
@@ -40,21 +42,28 @@ from dbt.contracts.graph.nodes import (
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.events.functions import get_metadata_vars
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
ParsingException,
|
||||
InternalException,
|
||||
ValidationException,
|
||||
RuntimeException,
|
||||
macro_invalid_dispatch_arg,
|
||||
missing_config,
|
||||
raise_compiler_error,
|
||||
ref_invalid_args,
|
||||
metric_invalid_args,
|
||||
target_not_found,
|
||||
ref_bad_context,
|
||||
wrapped_exports,
|
||||
raise_parsing_error,
|
||||
disallow_secret_env_var,
|
||||
CompilationError,
|
||||
ConflictingConfigKeysError,
|
||||
SecretEnvVarLocationError,
|
||||
EnvVarMissingError,
|
||||
DbtInternalError,
|
||||
InlineModelConfigError,
|
||||
NumberSourceArgsError,
|
||||
PersistDocsValueTypeError,
|
||||
LoadAgateTableNotSeedError,
|
||||
LoadAgateTableValueError,
|
||||
MacroDispatchArgError,
|
||||
MacrosSourcesUnWriteableError,
|
||||
MetricArgsError,
|
||||
MissingConfigError,
|
||||
OperationsCannotRefEphemeralNodesError,
|
||||
PackageNotInDepsError,
|
||||
ParsingError,
|
||||
RefBadContextError,
|
||||
RefArgsError,
|
||||
DbtRuntimeError,
|
||||
TargetNotFoundError,
|
||||
DbtValidationError,
|
||||
)
|
||||
from dbt.config import IsFQNResource
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
@@ -136,10 +145,10 @@ class BaseDatabaseWrapper:
|
||||
f'`adapter.dispatch("{suggest_macro_name}", '
|
||||
f'macro_namespace="{suggest_macro_namespace}")`?'
|
||||
)
|
||||
raise CompilationException(msg)
|
||||
raise CompilationError(msg)
|
||||
|
||||
if packages is not None:
|
||||
raise macro_invalid_dispatch_arg(macro_name)
|
||||
raise MacroDispatchArgError(macro_name)
|
||||
|
||||
namespace = macro_namespace
|
||||
|
||||
@@ -151,7 +160,7 @@ class BaseDatabaseWrapper:
|
||||
search_packages = [self.config.project_name, namespace]
|
||||
else:
|
||||
# Not a string and not None so must be a list
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"In adapter.dispatch, got a list macro_namespace argument "
|
||||
f'("{macro_namespace}"), but macro_namespace should be None or a string.'
|
||||
)
|
||||
@@ -164,8 +173,8 @@ class BaseDatabaseWrapper:
|
||||
try:
|
||||
# this uses the namespace from the context
|
||||
macro = self._namespace.get_from_package(package_name, search_name)
|
||||
except CompilationException:
|
||||
# Only raise CompilationException if macro is not found in
|
||||
except CompilationError:
|
||||
# Only raise CompilationError if macro is not found in
|
||||
# any package
|
||||
macro = None
|
||||
|
||||
@@ -179,7 +188,7 @@ class BaseDatabaseWrapper:
|
||||
|
||||
searched = ", ".join(repr(a) for a in attempts)
|
||||
msg = f"In dispatch: No macro named '{macro_name}' found\n Searched for: {searched}"
|
||||
raise CompilationException(msg)
|
||||
raise CompilationError(msg)
|
||||
|
||||
|
||||
class BaseResolver(metaclass=abc.ABCMeta):
|
||||
@@ -215,12 +224,12 @@ class BaseRefResolver(BaseResolver):
|
||||
|
||||
def validate_args(self, name: str, package: Optional[str]):
|
||||
if not isinstance(name, str):
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"The name argument to ref() must be a string, got {type(name)}"
|
||||
)
|
||||
|
||||
if package is not None and not isinstance(package, str):
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"The package argument to ref() must be a string or None, got {type(package)}"
|
||||
)
|
||||
|
||||
@@ -233,7 +242,7 @@ class BaseRefResolver(BaseResolver):
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
ref_invalid_args(self.model, args)
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
return self.resolve(name, package)
|
||||
|
||||
@@ -245,21 +254,19 @@ class BaseSourceResolver(BaseResolver):
|
||||
|
||||
def validate_args(self, source_name: str, table_name: str):
|
||||
if not isinstance(source_name, str):
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"The source name (first) argument to source() must be a "
|
||||
f"string, got {type(source_name)}"
|
||||
)
|
||||
if not isinstance(table_name, str):
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"The table name (second) argument to source() must be a "
|
||||
f"string, got {type(table_name)}"
|
||||
)
|
||||
|
||||
def __call__(self, *args: str) -> RelationProxy:
|
||||
if len(args) != 2:
|
||||
raise_compiler_error(
|
||||
f"source() takes exactly two arguments ({len(args)} given)", self.model
|
||||
)
|
||||
raise NumberSourceArgsError(args, node=self.model)
|
||||
self.validate_args(args[0], args[1])
|
||||
return self.resolve(args[0], args[1])
|
||||
|
||||
@@ -276,12 +283,12 @@ class BaseMetricResolver(BaseResolver):
|
||||
|
||||
def validate_args(self, name: str, package: Optional[str]):
|
||||
if not isinstance(name, str):
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"The name argument to metric() must be a string, got {type(name)}"
|
||||
)
|
||||
|
||||
if package is not None and not isinstance(package, str):
|
||||
raise CompilationException(
|
||||
raise CompilationError(
|
||||
f"The package argument to metric() must be a string or None, got {type(package)}"
|
||||
)
|
||||
|
||||
@@ -294,7 +301,7 @@ class BaseMetricResolver(BaseResolver):
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
metric_invalid_args(self.model, args)
|
||||
raise MetricArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
return self.resolve(name, package)
|
||||
|
||||
@@ -315,12 +322,7 @@ class ParseConfigObject(Config):
|
||||
if oldkey in config:
|
||||
newkey = oldkey.replace("_", "-")
|
||||
if newkey in config:
|
||||
raise_compiler_error(
|
||||
'Invalid config, has conflicting keys "{}" and "{}"'.format(
|
||||
oldkey, newkey
|
||||
),
|
||||
self.model,
|
||||
)
|
||||
raise ConflictingConfigKeysError(oldkey, newkey, node=self.model)
|
||||
config[newkey] = config.pop(oldkey)
|
||||
return config
|
||||
|
||||
@@ -330,14 +332,14 @@ class ParseConfigObject(Config):
|
||||
elif len(args) == 0 and len(kwargs) > 0:
|
||||
opts = kwargs
|
||||
else:
|
||||
raise_compiler_error("Invalid inline model config", self.model)
|
||||
raise InlineModelConfigError(node=self.model)
|
||||
|
||||
opts = self._transform_config(opts)
|
||||
|
||||
# it's ok to have a parse context with no context config, but you must
|
||||
# not call it!
|
||||
if self.context_config is None:
|
||||
raise RuntimeException("At parse time, did not receive a context config")
|
||||
raise DbtRuntimeError("At parse time, did not receive a context config")
|
||||
self.context_config.add_config_call(opts)
|
||||
return ""
|
||||
|
||||
@@ -378,7 +380,7 @@ class RuntimeConfigObject(Config):
|
||||
else:
|
||||
result = self.model.config.get(name, default)
|
||||
if result is _MISSING:
|
||||
missing_config(self.model, name)
|
||||
raise MissingConfigError(unique_id=self.model.unique_id, name=name)
|
||||
return result
|
||||
|
||||
def require(self, name, validator=None):
|
||||
@@ -400,20 +402,14 @@ class RuntimeConfigObject(Config):
|
||||
def persist_relation_docs(self) -> bool:
|
||||
persist_docs = self.get("persist_docs", default={})
|
||||
if not isinstance(persist_docs, dict):
|
||||
raise_compiler_error(
|
||||
f"Invalid value provided for 'persist_docs'. Expected dict "
|
||||
f"but received {type(persist_docs)}"
|
||||
)
|
||||
raise PersistDocsValueTypeError(persist_docs)
|
||||
|
||||
return persist_docs.get("relation", False)
|
||||
|
||||
def persist_column_docs(self) -> bool:
|
||||
persist_docs = self.get("persist_docs", default={})
|
||||
if not isinstance(persist_docs, dict):
|
||||
raise_compiler_error(
|
||||
f"Invalid value provided for 'persist_docs'. Expected dict "
|
||||
f"but received {type(persist_docs)}"
|
||||
)
|
||||
raise PersistDocsValueTypeError(persist_docs)
|
||||
|
||||
return persist_docs.get("columns", False)
|
||||
|
||||
@@ -472,7 +468,7 @@ class RuntimeRefResolver(BaseRefResolver):
|
||||
)
|
||||
|
||||
if target_model is None or isinstance(target_model, Disabled):
|
||||
target_not_found(
|
||||
raise TargetNotFoundError(
|
||||
node=self.model,
|
||||
target_name=target_name,
|
||||
target_kind="node",
|
||||
@@ -494,7 +490,7 @@ class RuntimeRefResolver(BaseRefResolver):
|
||||
) -> None:
|
||||
if resolved.unique_id not in self.model.depends_on.nodes:
|
||||
args = self._repack_args(target_name, target_package)
|
||||
ref_bad_context(self.model, args)
|
||||
raise RefBadContextError(node=self.model, args=args)
|
||||
|
||||
|
||||
class OperationRefResolver(RuntimeRefResolver):
|
||||
@@ -510,12 +506,7 @@ class OperationRefResolver(RuntimeRefResolver):
|
||||
if target_model.is_ephemeral_model:
|
||||
# In operations, we can't ref() ephemeral nodes, because
|
||||
# Macros do not support set_cte
|
||||
raise_compiler_error(
|
||||
"Operations can not ref() ephemeral nodes, but {} is ephemeral".format(
|
||||
target_model.name
|
||||
),
|
||||
self.model,
|
||||
)
|
||||
raise OperationsCannotRefEphemeralNodesError(target_model.name, node=self.model)
|
||||
else:
|
||||
return super().create_relation(target_model, name)
|
||||
|
||||
@@ -538,7 +529,7 @@ class RuntimeSourceResolver(BaseSourceResolver):
|
||||
)
|
||||
|
||||
if target_source is None or isinstance(target_source, Disabled):
|
||||
target_not_found(
|
||||
raise TargetNotFoundError(
|
||||
node=self.model,
|
||||
target_name=f"{source_name}.{table_name}",
|
||||
target_kind="source",
|
||||
@@ -565,7 +556,7 @@ class RuntimeMetricResolver(BaseMetricResolver):
|
||||
)
|
||||
|
||||
if target_metric is None or isinstance(target_metric, Disabled):
|
||||
target_not_found(
|
||||
raise TargetNotFoundError(
|
||||
node=self.model,
|
||||
target_name=target_name,
|
||||
target_kind="metric",
|
||||
@@ -594,7 +585,7 @@ class ModelConfiguredVar(Var):
|
||||
if package_name != self._config.project_name:
|
||||
if package_name not in dependencies:
|
||||
# I don't think this is actually reachable
|
||||
raise_compiler_error(f"Node package named {package_name} not found!", self._node)
|
||||
raise PackageNotInDepsError(package_name, node=self._node)
|
||||
yield dependencies[package_name]
|
||||
yield self._config
|
||||
|
||||
@@ -684,7 +675,7 @@ class ProviderContext(ManifestContext):
|
||||
context_config: Optional[ContextConfig],
|
||||
) -> None:
|
||||
if provider is None:
|
||||
raise InternalException(f"Invalid provider given to context: {provider}")
|
||||
raise DbtInternalError(f"Invalid provider given to context: {provider}")
|
||||
# mypy appeasement - we know it'll be a RuntimeConfig
|
||||
self.config: RuntimeConfig
|
||||
self.model: Union[Macro, ManifestNode] = model
|
||||
@@ -761,7 +752,7 @@ class ProviderContext(ManifestContext):
|
||||
return
|
||||
elif value == arg:
|
||||
return
|
||||
raise ValidationException(
|
||||
raise DbtValidationError(
|
||||
'Expected value "{}" to be one of {}'.format(value, ",".join(map(str, args)))
|
||||
)
|
||||
|
||||
@@ -777,7 +768,7 @@ class ProviderContext(ManifestContext):
|
||||
def write(self, payload: str) -> str:
|
||||
# macros/source defs aren't 'writeable'.
|
||||
if isinstance(self.model, (Macro, SourceDefinition)):
|
||||
raise_compiler_error('cannot "write" macros or sources')
|
||||
raise MacrosSourcesUnWriteableError(node=self.model)
|
||||
self.model.build_path = self.model.write_node(self.config.target_path, "run", payload)
|
||||
return ""
|
||||
|
||||
@@ -792,21 +783,19 @@ class ProviderContext(ManifestContext):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
raise_compiler_error(message_if_exception, self.model)
|
||||
raise CompilationError(message_if_exception, self.model)
|
||||
|
||||
@contextmember
|
||||
def load_agate_table(self) -> agate.Table:
|
||||
if not isinstance(self.model, SeedNode):
|
||||
raise_compiler_error(
|
||||
"can only load_agate_table for seeds (got a {})".format(self.model.resource_type)
|
||||
)
|
||||
raise LoadAgateTableNotSeedError(self.model.resource_type, node=self.model)
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
column_types = self.model.config.column_types
|
||||
try:
|
||||
table = agate_helper.from_csv(path, text_columns=column_types)
|
||||
except ValueError as e:
|
||||
raise_compiler_error(str(e))
|
||||
raise LoadAgateTableValueError(e, node=self.model)
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
return table
|
||||
|
||||
@@ -1197,7 +1186,7 @@ class ProviderContext(ManifestContext):
|
||||
"https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch)"
|
||||
" adapter_macro was called for: {macro_name}".format(macro_name=name)
|
||||
)
|
||||
raise CompilationException(msg)
|
||||
raise CompilationError(msg)
|
||||
|
||||
@contextmember
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
@@ -1208,7 +1197,7 @@ class ProviderContext(ManifestContext):
|
||||
"""
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
disallow_secret_env_var(var)
|
||||
raise SecretEnvVarLocationError(var)
|
||||
if var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
@@ -1241,8 +1230,7 @@ class ProviderContext(ManifestContext):
|
||||
source_file.env_vars.append(var) # type: ignore[union-attr]
|
||||
return return_value
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
@contextproperty
|
||||
def selected_resources(self) -> List[str]:
|
||||
@@ -1261,7 +1249,7 @@ class ProviderContext(ManifestContext):
|
||||
and self.context_macro_stack.call_stack[1] == "macro.dbt.statement"
|
||||
and "materialization" in self.context_macro_stack.call_stack[0]
|
||||
):
|
||||
raise RuntimeException(
|
||||
raise DbtRuntimeError(
|
||||
f"submit_python_job is not intended to be called here, at model {parsed_model['alias']}, with macro call_stack {self.context_macro_stack.call_stack}."
|
||||
)
|
||||
return self.adapter.submit_python_job(parsed_model, compiled_code)
|
||||
@@ -1423,7 +1411,7 @@ def generate_runtime_macro_context(
|
||||
class ExposureRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) not in (1, 2):
|
||||
ref_invalid_args(self.model, args)
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.model.refs.append(list(args))
|
||||
return ""
|
||||
|
||||
@@ -1431,9 +1419,7 @@ class ExposureRefResolver(BaseResolver):
|
||||
class ExposureSourceResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) != 2:
|
||||
raise_compiler_error(
|
||||
f"source() takes exactly two arguments ({len(args)} given)", self.model
|
||||
)
|
||||
raise NumberSourceArgsError(args, node=self.model)
|
||||
self.model.sources.append(list(args))
|
||||
return ""
|
||||
|
||||
@@ -1441,7 +1427,7 @@ class ExposureSourceResolver(BaseResolver):
|
||||
class ExposureMetricResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) not in (1, 2):
|
||||
metric_invalid_args(self.model, args)
|
||||
raise MetricArgsError(node=self.model, args=args)
|
||||
self.model.metrics.append(list(args))
|
||||
return ""
|
||||
|
||||
@@ -1483,14 +1469,14 @@ class MetricRefResolver(BaseResolver):
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
ref_invalid_args(self.model, args)
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
self.model.refs.append(list(args))
|
||||
return ""
|
||||
|
||||
def validate_args(self, name, package):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingException(
|
||||
raise ParsingError(
|
||||
f"In a metrics section in {self.model.original_file_path} "
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
@@ -1519,6 +1505,44 @@ def generate_parse_metrics(
|
||||
}
|
||||
|
||||
|
||||
class EntityRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
package = None
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
self.model.refs.append(list(args))
|
||||
return ""
|
||||
|
||||
def validate_args(self, name, package):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingError(
|
||||
f"In the entity associated with {self.model.original_file_path} "
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
|
||||
|
||||
def generate_parse_entities(
|
||||
entity: Entity,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
project = config.load_dependencies()[package_name]
|
||||
return {
|
||||
"ref": EntityRefResolver(
|
||||
None,
|
||||
entity,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# This class is currently used by the schema parser in order
|
||||
# to limit the number of macros in the context by using
|
||||
# the TestMacroNamespace
|
||||
@@ -1573,7 +1597,7 @@ class TestContext(ProviderContext):
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
disallow_secret_env_var(var)
|
||||
raise SecretEnvVarLocationError(var)
|
||||
if var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
@@ -1599,8 +1623,7 @@ class TestContext(ProviderContext):
|
||||
source_file.add_env_var(var, yaml_key, name) # type: ignore[union-attr]
|
||||
return return_value
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
|
||||
def generate_test_context(
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Dict, Optional
|
||||
from .base import BaseContext, contextmember
|
||||
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.exceptions import raise_parsing_error
|
||||
from dbt.exceptions import EnvVarMissingError
|
||||
|
||||
|
||||
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
||||
@@ -50,8 +50,7 @@ class SecretContext(BaseContext):
|
||||
self.env_vars[var] = return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER
|
||||
return return_value
|
||||
else:
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
raise EnvVarMissingError(var)
|
||||
|
||||
|
||||
def generate_secret_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user