Compare commits

..

10 Commits

Author SHA1 Message Date
Emily Rockman
734b6429c7 update to use Docs object 2022-07-27 13:34:23 -05:00
Benoit Perigaud
a75b2c0a90 Make docs a dataclass instead of a Dict 2022-07-26 19:38:22 +02:00
Benoit Perigaud
6b6ae22434 Merge branch 'feature/custom-node-colors-dbt_project' of github.com:dbt-labs/dbt-core into feature/custom-node-colors-dbt_project 2022-07-20 12:51:29 +02:00
Benoit Perigaud
287f443ec9 Make docs a Dict to avoid parsing errors 2022-07-20 12:44:01 +02:00
Benoit Perigaud
aea2c4a29b Add node_color to Docs 2022-07-20 12:43:09 +02:00
Benoit Perigaud
21ffe31270 Handle when docs is both under docs and config.docs 2022-07-20 12:42:25 +02:00
Sung Won Chung
70c9074625 Merge branch 'main' of https://github.com/dbt-labs/dbt into feature/custom-node-colors-dbt_project 2022-07-19 14:27:18 -05:00
Benoit Perigaud
9fca33cb29 Add docs config and input validation 2022-06-21 09:10:15 +02:00
Benoit Perigaud
6360247d39 Remove node_color from the original docs config 2022-06-21 09:09:35 +02:00
Matt Winkler
f0fbb0e551 add Optional node_color config in Docs dataclass 2022-06-07 09:17:15 -06:00
957 changed files with 23882 additions and 52922 deletions

View File

@@ -1,14 +1,12 @@
[bumpversion]
current_version = 1.5.0a1
current_version = 1.3.0a1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
((?P<prekind>a|b|rc)
(?P<pre>\d+) # pre-release version num
)(\.(?P<nightly>[a-z..0-9]+)
)?
serialize =
{major}.{minor}.{patch}{prekind}{pre}.{nightly}
{major}.{minor}.{patch}{prekind}{pre}
{major}.{minor}.{patch}
commit = False
@@ -26,8 +24,6 @@ values =
[bumpversion:part:pre]
first_value = 1
[bumpversion:part:nightly]
[bumpversion:file:core/setup.py]
[bumpversion:file:core/dbt/version.py]

View File

@@ -3,8 +3,6 @@
For information on prior major and minor releases, see their changelogs:
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)

View File

@@ -0,0 +1,8 @@
kind: Features
body: Add reusable function for retrying adapter connections. Utilize said function
to add retries for Postgres (and Redshift).
time: 2022-07-15T03:55:55.270637265+02:00
custom:
Author: tomasfarias
Issue: "5022"
PR: "5432"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Have dbt debug spit out structured json logs with flags enabled.
time: 2023-01-07T00:31:57.516063-08:00
custom:
Author: versusfacit
Issue: "5353"

View File

@@ -1,6 +0,0 @@
kind: Features
body: add adapter_response to dbt test and freshness result
time: 2023-01-18T23:38:01.857342+08:00
custom:
Author: aezomz
Issue: "2964"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Improve error message for packages missing `dbt_project.yml`
time: 2023-01-20T11:29:21.509967-07:00
custom:
Author: dbeatty10
Issue: "6663"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Adjust makefile to have clearer instructions for CI env var changes.
time: 2023-01-26T15:47:16.887327-08:00
custom:
Author: versusfacit
Issue: "6689"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Stand-alone Python module for PostgresColumn
time: 2023-01-27T16:28:12.212427-08:00
custom:
Author: nssalian
Issue: "6772"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Rename try to strict for more intuitiveness
time: 2022-07-15T23:11:48.327928+12:00
custom:
Author: jeremyyeo
Issue: "5475"
PR: "5477"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Respect quoting config for dbt.ref(), dbt.source(), and dbt.this() in dbt-py models
time: 2023-01-16T12:36:45.63092+01:00
custom:
Author: jtcohen6
Issue: 6103 6619

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Provide backward compatibility for `get_merge_sql` arguments
time: 2023-01-17T10:13:42.118336-06:00
custom:
Author: dave-connors-3
Issue: "6625"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Include adapter_response in NodeFinished run_result log event
time: 2023-01-24T11:58:37.74179-05:00
custom:
Author: gshank
Issue: "6703"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Sort cli vars before hashing for partial parsing
time: 2023-01-24T14:19:43.333628-05:00
custom:
Author: gshank
Issue: "6710"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: '[Regression] exposure_content referenced incorrectly'
time: 2023-01-25T19:17:39.942081-05:00
custom:
Author: Mathyoub
Issue: "6738"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Fix use of ConnectionReused logging event
time: 2023-01-13T13:25:13.023168-05:00
custom:
Author: gshank
Issue: "6168"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Port docs tests to pytest
time: 2023-01-13T15:07:00.477038-05:00
custom:
Author: peterallenwebb
Issue: "6573"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Update deprecated github action command
time: 2023-01-17T11:17:37.046095-06:00
custom:
Author: davidbloss
Issue: "6153"

View File

@@ -1,7 +0,0 @@
kind: Under the Hood
body: Replaced the EmptyLine event with a more general Formatting event, and added
a Note event.
time: 2023-01-20T17:22:54.45828-05:00
custom:
Author: peterallenwebb
Issue: "6481"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Small optimization on manifest parsing benefitting large DAGs
time: 2023-01-22T21:52:35.549814+01:00
custom:
Author: boxysean
Issue: "6697"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Revised and simplified various structured logging events
time: 2023-01-24T15:35:53.065356-05:00
custom:
Author: peterallenwebb
Issue: 6664 6665 6666

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: ' Optimized GraphQueue to remove graph analysis bottleneck in large dags.'
time: 2023-01-26T13:59:39.518345-05:00
custom:
Author: peterallenwebb
Issue: "6759"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: '[CT-1841] Convert custom target test to Pytest'
time: 2023-01-26T16:47:41.198714-08:00
custom:
Author: aranke
Issue: "6638"

130
.changie.yaml Normal file → Executable file
View File

@@ -6,128 +6,56 @@ changelogPath: CHANGELOG.md
versionExt: md
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
kindFormat: '### {{.Kind}}'
changeFormat: |-
{{- $IssueList := list }}
{{- $changes := splitList " " $.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
kinds:
- label: Breaking Changes
- label: Features
- label: Fixes
- label: Docs
changeFormat: |-
{{- $IssueList := list }}
{{- $changes := splitList " " $.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
- label: Under the Hood
- label: Dependencies
changeFormat: |-
{{- $PRList := list }}
{{- $changes := splitList " " $.Custom.PR }}
{{- range $pullrequest := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
{{- $PRList = append $PRList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
skipGlobalChoices: true
additionalChoices:
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: PR
label: GitHub Pull Request Number (separated by a single space if multiple)
type: string
minLength: 1
- label: Security
changeFormat: |-
{{- $PRList := list }}
{{- $changes := splitList " " $.Custom.PR }}
{{- range $pullrequest := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
{{- $PRList = append $PRList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
skipGlobalChoices: true
additionalChoices:
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: PR
label: GitHub Pull Request Number (separated by a single space if multiple)
type: string
minLength: 1
newlines:
afterChangelogHeader: 1
afterKind: 1
afterChangelogVersion: 1
beforeKind: 1
endOfVersion: 1
- label: Breaking Changes
- label: Features
- label: Fixes
- label: Docs
- label: Under the Hood
- label: Dependencies
- label: Security
custom:
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: Issue
label: GitHub Issue Number (separated by a single space if multiple)
type: string
minLength: 1
label: GitHub Issue Number
type: int
minLength: 4
- key: PR
label: GitHub Pull Request Number
type: int
minLength: 4
footerFormat: |
{{- $contributorDict := dict }}
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
{{- $core_team := list "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot" }}
{{- range $change := .Changes }}
{{- $authorList := splitList " " $change.Custom.Author }}
{{- /* loop through all authors for a single changelog */}}
{{- /* loop through all authors for a PR */}}
{{- range $author := $authorList }}
{{- $authorLower := lower $author }}
{{- /* we only want to include non-core team contributors */}}
{{- if not (has $authorLower $core_team)}}
{{- $changeList := splitList " " $change.Custom.Author }}
{{- $IssueList := list }}
{{- $changeLink := $change.Kind }}
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
{{- $changes := splitList " " $change.Custom.PR }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
{{- else }}
{{- $changes := splitList " " $change.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
{{- end }}
{{- /* check if this contributor has other changes associated with them already */}}
{{- if hasKey $contributorDict $author }}
{{- $contributionList := get $contributorDict $author }}
{{- $contributionList = concat $contributionList $IssueList }}
{{- $contributorDict := set $contributorDict $author $contributionList }}
{{- else }}
{{- $contributionList := $IssueList }}
{{- $contributorDict := set $contributorDict $author $contributionList }}
{{- end }}
{{- end}}
{{- $pr := $change.Custom.PR }}
{{- /* check if this contributor has other PRs associated with them already */}}
{{- if hasKey $contributorDict $author }}
{{- $prList := get $contributorDict $author }}
{{- $prList = append $prList $pr }}
{{- $contributorDict := set $contributorDict $author $prList }}
{{- else }}
{{- $prList := list $change.Custom.PR }}
{{- $contributorDict := set $contributorDict $author $prList }}
{{- end }}
{{- end}}
{{- end}}
{{- end }}
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
{{- if $contributorDict}}
### Contributors
{{- range $k,$v := $contributorDict }}
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}})
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-core/pull/{{$element}}){{end}})
{{- end }}
{{- end }}

View File

@@ -9,4 +9,4 @@ ignore =
E203 # makes Flake8 work like black
E741
E501 # long line checking is done in black
exclude = test/
exclude = test

2
.gitattributes vendored
View File

@@ -1,2 +0,0 @@
core/dbt/include/index.html binary
tests/functional/artifacts/data/state/*/manifest.json binary

60
.github/CODEOWNERS vendored
View File

@@ -16,57 +16,25 @@
# Changes to GitHub configurations including Actions
/.github/ @leahwicz
### LANGUAGE
# Language core modules
/core/dbt/config/ @dbt-labs/core-language
/core/dbt/context/ @dbt-labs/core-language
/core/dbt/contracts/ @dbt-labs/core-language
/core/dbt/deps/ @dbt-labs/core-language
/core/dbt/events/ @dbt-labs/core-language # structured logging
/core/dbt/parser/ @dbt-labs/core-language
# Language misc files
/core/dbt/dataclass_schema.py @dbt-labs/core-language
/core/dbt/hooks.py @dbt-labs/core-language
/core/dbt/node_types.py @dbt-labs/core-language
/core/dbt/semver.py @dbt-labs/core-language
### EXECUTION
/core/dbt/config/ @dbt-labs/core-language
/core/dbt/context/ @dbt-labs/core-language
/core/dbt/contracts/ @dbt-labs/core-language
/core/dbt/deps/ @dbt-labs/core-language
/core/dbt/parser/ @dbt-labs/core-language
# Execution core modules
/core/dbt/graph/ @dbt-labs/core-execution
/core/dbt/task/ @dbt-labs/core-execution
/core/dbt/events/ @dbt-labs/core-execution @dbt-labs/core-language # eventually remove language but they have knowledge here now
/core/dbt/graph/ @dbt-labs/core-execution
/core/dbt/task/ @dbt-labs/core-execution
# Execution misc files
/core/dbt/compilation.py @dbt-labs/core-execution
/core/dbt/flags.py @dbt-labs/core-execution
/core/dbt/lib.py @dbt-labs/core-execution
/core/dbt/main.py @dbt-labs/core-execution
/core/dbt/profiler.py @dbt-labs/core-execution
/core/dbt/selected_resources.py @dbt-labs/core-execution
/core/dbt/tracking.py @dbt-labs/core-execution
/core/dbt/version.py @dbt-labs/core-execution
# Adapter interface, scaffold, Postgres plugin
/core/dbt/adapters @dbt-labs/core-adapters
/core/scripts/create_adapter_plugin.py @dbt-labs/core-adapters
/plugins/ @dbt-labs/core-adapters
### ADAPTERS
# Adapter interface ("base" + "sql" adapter defaults, cache)
/core/dbt/adapters @dbt-labs/core-adapters
# Global project (default macros + materializations), starter project
/core/dbt/include @dbt-labs/core-adapters
# Postgres plugin
/plugins/ @dbt-labs/core-adapters
# Functional tests for adapter plugins
/tests/adapter @dbt-labs/core-adapters
### TESTS
# Overlapping ownership for vast majority of unit + functional tests
# Global project: default macros, including generic tests + materializations
/core/dbt/include/global_project @dbt-labs/core-execution @dbt-labs/core-adapters
# Perf regression testing framework
# This excludes the test project files itself since those aren't specific

View File

@@ -9,33 +9,23 @@ body:
Thanks for taking the time to fill out this bug report!
- type: checkboxes
attributes:
label: Is this a new bug in dbt-core?
description: >
In other words, is this an error, flaw, failure or fault in our software?
If this is a bug that broke existing functionality that used to work, please open a regression issue.
If this is a bug in an adapter plugin, please open an issue in the adapter's repository.
If this is a bug experienced while using dbt Cloud, please report to [support](mailto:support@getdbt.com).
If this is a request for help or troubleshooting code in your own dbt project, please join our [dbt Community Slack](https://www.getdbt.com/community/join-the-community/) or open a [Discussion question](https://github.com/dbt-labs/docs.getdbt.com/discussions).
Please search to see if an issue already exists for the bug you encountered.
label: Is there an existing issue for this?
description: Please search to see if an issue already exists for the bug you encountered.
options:
- label: I believe this is a new bug in dbt-core
required: true
- label: I have searched the existing issues, and I could not find an existing issue for this bug
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Current Behavior
description: A concise description of what you're experiencing.
validations:
required: true
required: false
- type: textarea
attributes:
label: Expected Behavior
description: A concise description of what you expected to happen.
validations:
required: true
required: false
- type: textarea
attributes:
label: Steps To Reproduce
@@ -46,7 +36,7 @@ body:
3. Run '...'
4. See error...
validations:
required: true
required: false
- type: textarea
id: logs
attributes:
@@ -62,8 +52,8 @@ body:
description: |
examples:
- **OS**: Ubuntu 20.04
- **Python**: 3.9.12 (`python3 --version`)
- **dbt-core**: 1.1.1 (`dbt --version`)
- **Python**: 3.7.2 (`python --version`)
- **dbt**: 0.21.0 (`dbt --version`)
value: |
- OS:
- Python:
@@ -74,15 +64,13 @@ body:
- type: dropdown
id: database
attributes:
label: Which database adapter are you using with dbt?
description: If the bug is specific to the database or adapter, please open the issue in that adapter's repository instead
label: What database are you using dbt with?
multiple: true
options:
- postgres
- redshift
- snowflake
- bigquery
- spark
- other (mention it in "Additional Context")
validations:
required: false

View File

@@ -1,14 +1,4 @@
blank_issues_enabled: false
contact_links:
- name: Ask the community for help
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
about: Need help troubleshooting? Check out our guide on how to ask
- name: Contact dbt Cloud support
url: mailto:support@getdbt.com
about: Are you using dbt Cloud? Contact our support team for help!
- name: Participate in Discussions
url: https://github.com/dbt-labs/dbt-core/discussions
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
- name: Create an issue for dbt-redshift
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
about: Report a bug or request a feature for dbt-redshift
@@ -18,6 +8,9 @@ contact_links:
- name: Create an issue for dbt-snowflake
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
about: Report a bug or request a feature for dbt-snowflake
- name: Create an issue for dbt-spark
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
about: Report a bug or request a feature for dbt-spark
- name: Ask a question or get support
url: https://docs.getdbt.com/docs/guides/getting-help
about: Ask a question or request support
- name: Questions on Stack Overflow
url: https://stackoverflow.com/questions/tagged/dbt
about: Look at questions/answers at Stack Overflow

View File

@@ -1,5 +1,5 @@
name: ✨ Feature
description: Propose a straightforward extension of dbt functionality
description: Suggest an idea for dbt
title: "[Feature] <title>"
labels: ["enhancement", "triage"]
body:
@@ -9,24 +9,18 @@ body:
Thanks for taking the time to fill out this feature request!
- type: checkboxes
attributes:
label: Is this your first time submitting a feature request?
description: >
We want to make sure that features are distinct and discoverable,
so that other members of the community can find them and offer their thoughts.
Issues are the right place to request straightforward extensions of existing dbt functionality.
For "big ideas" about future capabilities of dbt, we ask that you open a
[discussion](https://github.com/dbt-labs/dbt-core/discussions) in the "Ideas" category instead.
label: Is there an existing feature request for this?
description: Please search to see if an issue already exists for the feature you would like.
options:
- label: I have searched the existing issues
required: true
label: Is this your first time opening an issue?
options:
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
required: true
- label: I have searched the existing issues, and I could not find an existing issue for this feature
required: true
- label: I am requesting a straightforward extension of existing dbt functionality, rather than a Big Idea better suited to a discussion
required: true
- type: textarea
attributes:
label: Describe the feature
label: Describe the Feature
description: A clear and concise description of what you want to happen.
validations:
required: true

View File

@@ -1,93 +0,0 @@
name: ☣️ Regression
description: Report a regression you've observed in a newer version of dbt
title: "[Regression] <title>"
labels: ["bug", "regression", "triage"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this regression report!
- type: checkboxes
attributes:
label: Is this a regression in a recent version of dbt-core?
description: >
A regression is when documented functionality works as expected in an older version of dbt-core,
and no longer works after upgrading to a newer version of dbt-core
options:
- label: I believe this is a regression in dbt-core functionality
required: true
- label: I have searched the existing issues, and I could not find an existing issue for this regression
required: true
- type: textarea
attributes:
label: Current Behavior
description: A concise description of what you're experiencing.
validations:
required: true
- type: textarea
attributes:
label: Expected/Previous Behavior
description: A concise description of what you expected to happen.
validations:
required: true
- type: textarea
attributes:
label: Steps To Reproduce
description: Steps to reproduce the behavior.
placeholder: |
1. In this environment...
2. With this config...
3. Run '...'
4. See error...
validations:
required: true
- type: textarea
id: logs
attributes:
label: Relevant log output
description: |
If applicable, log output to help explain your problem.
render: shell
validations:
required: false
- type: textarea
attributes:
label: Environment
description: |
examples:
- **OS**: Ubuntu 20.04
- **Python**: 3.9.12 (`python3 --version`)
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
value: |
- OS:
- Python:
- dbt (working version):
- dbt (regression version):
render: markdown
validations:
required: true
- type: dropdown
id: database
attributes:
label: Which database adapter are you using with dbt?
description: If the regression is specific to the database or adapter, please open the issue in that adapter's repository instead
multiple: true
options:
- postgres
- redshift
- snowflake
- bigquery
- spark
- other (mention it in "Additional Context")
validations:
required: false
- type: textarea
attributes:
label: Additional Context
description: |
Links? References? Anything that will give us more context about the issue you are encountering!
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
validations:
required: false

216
.github/_README.md vendored
View File

@@ -1,216 +0,0 @@
<!-- GitHub will publish this readme on the main repo page if the name is `README.md` so we've added the leading underscore to prevent this -->
<!-- Do not rename this file `README.md` -->
<!-- See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-readmes -->
## What are GitHub Actions?
GitHub Actions are used for many different purposes. We use them to run tests in CI, validate PRs are in an expected state, and automate processes.
- [Overview of GitHub Actions](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions)
- [What's a workflow?](https://docs.github.com/en/actions/using-workflows/about-workflows)
- [GitHub Actions guides](https://docs.github.com/en/actions/guides)
___
## Where do actions and workflows live
We try to maintain actions that are shared across repositories in a single place so that necesary changes can be made in a single place.
[dbt-labs/actions](https://github.com/dbt-labs/actions/) is the central repository of actions and workflows we use across repositories.
GitHub Actions also live locally within a repository. The workflows can be found at `.github/workflows` from the root of the repository. These should be specific to that code base.
Note: We are actively moving actions into the central Action repository so there is currently some duplication across repositories.
___
## Basics of Using Actions
### Viewing Output
- View the detailed action output for your PR in the **Checks** tab of the PR. This only shows the most recent run. You can also view high level **Checks** output at the bottom on the PR.
- View _all_ action output for a repository from the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab. Workflow results last 1 year. Artifacts last 90 days, unless specified otherwise in individual workflows.
This view often shows what seem like duplicates of the same workflow. This occurs when files are renamed but the workflow name has not changed. These are in fact _not_ duplicates.
You can see the branch the workflow runs from in this view. It is listed in the table between the workflow name and the time/duration of the run. When blank, the workflow is running in the context of the `main` branch.
### How to view what workflow file is being referenced from a run
- When viewing the output of a specific workflow run, click the 3 dots at the top right of the display. There will be an option to `View workflow file`.
### How to manually run a workflow
- If a workflow has the `on: workflow_dispatch` trigger, it can be manually triggered
- From the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab, find the workflow you want to run, select it and fill in any inputs requied. That's it!
### How to re-run jobs
- Some actions cannot be rerun in the GitHub UI. Namely the snyk checks and the cla check. Snyk checks are rerun by closing and reopening the PR. You can retrigger the cla check by commenting on the PR with `@cla-bot check`
___
## General Standards
### Permissions
- By default, workflows have read permissions in the repository for the contents scope only when no permissions are explicitly set.
- It is best practice to always define the permissions explicitly. This will allow actions to continue to work when the default permissions on the repository are changed. It also allows explicit grants of the least permissions possible.
- There are a lot of permissions available. [Read up on them](https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs) if you're unsure what to use.
```yaml
permissions:
contents: read
pull-requests: write
```
### Secrets
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
### Triggers
You can configure your workflows to run when specific activity on GitHub happens, at a scheduled time, or when an event outside of GitHub occurs. Read more details in the [GitHub docs](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows).
These triggers are under the `on` key of the workflow and more than one can be listed.
```yaml
on:
push:
branches:
- "main"
- "*.latest"
- "releases/*"
pull_request:
# catch when the PR is opened with the label or when the label is added
types: [opened, labeled]
workflow_dispatch:
```
Some triggers of note that we use:
- `push` - Runs your workflow when you push a commit or tag.
- `pull_request` - Runs your workflow when activity on a pull request in the workflow's repository occurs. Takes in a list of activity types (opened, labeled, etc) if appropriate.
- `pull_request_target` - Same as `pull_request` but runs in the context of the PR target branch.
- `workflow_call` - used with reusable workflows. Triggered by another workflow calling it.
- `workflow_dispatch` - Gives the ability to manually trigger a workflow from the GitHub API, GitHub CLI, or GitHub browser interface.
### Basic Formatting
- Add a description of what your workflow does at the top in this format
```
# **what?**
# Describe what the action does.
# **why?**
# Why does this action exist?
# **when?**
# How/when will it be triggered?
```
- Leave blank lines between steps and jobs
```yaml
jobs:
dependency_changelog:
runs-on: ubuntu-latest
steps:
- name: Get File Name Timestamp
id: filename_time
uses: nanzm/get-time-action@v1.1
with:
format: 'YYYYMMDD-HHmmss'
- name: Get File Content Timestamp
id: file_content_time
uses: nanzm/get-time-action@v1.1
with:
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
- name: Generate Filepath
id: fp
run: |
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
```
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
When possible, generate variables at the top of your workflow in a single place to reference later. This is not always strictly possible since you may generate a value to be used later mid-workflow.
Be sure to use quotes around these logs so special characters are not interpreted.
```yaml
job1:
- name: "[DEBUG] Print Variables"
run: |
echo "all variables defined as inputs"
echo "The last commit sha in the release: ${{ inputs.sha }}"
echo "The release version number: ${{ inputs.version_number }}"
echo "The changelog_path: ${{ inputs.changelog_path }}"
echo "The build_script_path: ${{ inputs.build_script_path }}"
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
echo "The package_test_command: ${{ inputs.package_test_command }}"
# collect all the variables that need to be used in subsequent jobs
- name: Set Variables
id: variables
run: |
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
job2:
needs: [job1]
- name: "[DEBUG] Print Variables"
run: |
echo "all variables defined in job1 > Set Variables > outputs"
echo "important_path: ${{ needs.job1.outputs.important_path }}"
echo "release_id: ${{ needs.job1.outputs.release_id }}"
echo "open_prs: ${{ needs.job1.outputs.open_prs }}"
```
- When it's not obvious what something does, add a comment!
___
## Tips
### Context
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
### Actions from the Marketplace
- Dont use external actions for things that can easily be accomplished manually.
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and wont change under us) and clear as to whats actually happening. It also prevents any
- Pin actions _we don't control_ to tags.
### Connecting to AWS
- Authenticate with the aws managed workflow
```yaml
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
```
- Then access with the aws command that comes installed on the action runner machines
```yaml
- name: Copy Artifacts from S3 via CLI
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
```
### Testing
- Depending on what your action does, you may be able to use [`act`](https://github.com/nektos/act) to test the action locally. Some features of GitHub Actions do not work with `act`, among those are reusable workflows. If you can't use `act`, you'll have to push your changes up before being able to test. This can be slow.

View File

@@ -28,12 +28,11 @@ if __name__ == "__main__":
if package_request.status_code == 404:
if halt_on_missing:
sys.exit(1)
# everything is the latest if the package doesn't exist
github_output = os.environ.get("GITHUB_OUTPUT")
with open(github_output, "at", encoding="utf-8") as gh_output:
gh_output.write("latest=True")
gh_output.write("minor_latest=True")
sys.exit(0)
else:
# everything is the latest if the package doesn't exist
print(f"::set-output name=latest::{True}")
print(f"::set-output name=minor_latest::{True}")
sys.exit(0)
# TODO: verify package meta is "correct"
# https://github.com/dbt-labs/dbt-core/issues/4640
@@ -92,7 +91,5 @@ if __name__ == "__main__":
latest = is_latest(pre_rel, new_version, current_latest)
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
github_output = os.environ.get("GITHUB_OUTPUT")
with open(github_output, "at", encoding="utf-8") as gh_output:
gh_output.write(f"latest={latest}")
gh_output.write(f"minor_latest={minor_latest}")
print(f"::set-output name=latest::{latest}")
print(f"::set-output name=minor_latest::{minor_latest}")

View File

@@ -20,4 +20,4 @@ resolves #
- [ ] I have run this code in development and it appears to resolve the stated issue
- [ ] This PR includes tests, or tests are not required/relevant for this PR
- [ ] I have [opened an issue to add/update docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose), or docs changes are not required/relevant for this PR
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-a-changelog-entry)
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#Adding-CHANGELOG-Entry)

View File

@@ -1,61 +0,0 @@
# **what?**
# When bots create a PR, this action will add a corresponding changie yaml file to that
# PR when a specific label is added.
#
# The file is created off a template:
#
# kind: <per action matrix>
# body: <PR title>
# time: <current timestamp>
# custom:
# Author: <PR User Login (generally the bot)>
# Issue: 4904
# PR: <PR number>
#
# **why?**
# Automate changelog generation for more visability with automated bot PRs.
#
# **when?**
# Once a PR is created, label should be added to PR before or after creation. You can also
# manually trigger this by adding the appropriate label at any time.
#
# **how to add another bot?**
# Add the label and changie kind to the include matrix. That's it!
#
name: Bot Changelog
on:
pull_request:
# catch when the PR is opened with the label or when the label is added
types: [labeled]
permissions:
contents: write
pull-requests: read
jobs:
generate_changelog:
strategy:
matrix:
include:
- label: "dependencies"
changie_kind: "Dependencies"
- label: "snyk"
changie_kind: "Security"
runs-on: ubuntu-latest
steps:
- name: Create and commit changelog on bot PR
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
id: bot_changelog
uses: emmyoop/changie_bot@v1.0.1
with:
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
commit_author_name: "Github Build Bot"
commit_author_email: "<buildbot@fishtownanalytics.com>"
commit_message: "Add automated changelog yaml from template for bot PR"
changie_kind: ${{ matrix.changie_kind }}
label: ${{ matrix.label }}
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"

78
.github/workflows/changelog-check.yml vendored Normal file
View File

@@ -0,0 +1,78 @@
# **what?**
# Checks that a file has been committed under the /.changes directory
# as a new CHANGELOG entry. Cannot check for a specific filename as
# it is dynamically generated by change type and timestamp.
# This workflow should not require any secrets since it runs for PRs
# from forked repos.
# By default, secrets are not passed to workflows running from
# a forked repo.
# **why?**
# Ensure code change gets reflected in the CHANGELOG.
# **when?**
# This will run for all PRs going into main and *.latest. It will
# run when they are opened, reopened, when any label is added or removed
# and when new code is pushed to the branch. The action will then get
# skipped if the 'Skip Changelog' label is present is any of the labels.
name: Check Changelog Entry
on:
pull_request:
types: [opened, reopened, labeled, unlabeled, synchronize]
workflow_dispatch:
defaults:
run:
shell: bash
permissions:
contents: read
pull-requests: write
env:
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
jobs:
changelog:
name: changelog
if: "!contains(github.event.pull_request.labels.*.name, 'Skip Changelog')"
runs-on: ubuntu-latest
steps:
- name: Check if changelog file was added
# https://github.com/marketplace/actions/paths-changes-filter
# For each filter, it sets output variable named by the filter to the text:
# 'true' - if any of changed files matches any of filter rules
# 'false' - if none of changed files matches any of filter rules
# also, returns:
# `changes` - JSON array with names of all filters matching any of the changed files
uses: dorny/paths-filter@v2
id: filter
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: |
changelog:
- added: '.changes/unreleased/**.yaml'
- name: Check if comment already exists
uses: peter-evans/find-comment@v1
id: changelog_comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: ${{ env.changelog_comment }}
- name: Create PR comment if changelog entry is missing, required, and does not exist
if: |
steps.filter.outputs.changelog == 'false' &&
steps.changelog_comment.outputs.comment-body == ''
uses: peter-evans/create-or-update-comment@v1
with:
issue-number: ${{ github.event.pull_request.number }}
body: ${{ env.changelog_comment }}
- name: Fail job if changelog entry is missing and required
if: steps.filter.outputs.changelog == 'false'
uses: actions/github-script@v6
with:
script: core.setFailed('Changelog entry required to merge.')

View File

@@ -1,40 +0,0 @@
# **what?**
# Checks that a file has been committed under the /.changes directory
# as a new CHANGELOG entry. Cannot check for a specific filename as
# it is dynamically generated by change type and timestamp.
# This workflow should not require any secrets since it runs for PRs
# from forked repos.
# By default, secrets are not passed to workflows running from
# a forked repo.
# **why?**
# Ensure code change gets reflected in the CHANGELOG.
# **when?**
# This will run for all PRs going into main and *.latest. It will
# run when they are opened, reopened, when any label is added or removed
# and when new code is pushed to the branch. The action will then get
# skipped if the 'Skip Changelog' label is present is any of the labels.
name: Check Changelog Entry
on:
pull_request:
types: [opened, reopened, labeled, unlabeled, synchronize]
workflow_dispatch:
defaults:
run:
shell: bash
permissions:
contents: read
pull-requests: write
jobs:
changelog:
uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main
with:
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
skip_label: 'Skip Changelog'
secrets: inherit

View File

@@ -0,0 +1,114 @@
# **what?**
# When dependabot create a PR, it always adds the `dependencies` label. This
# action will add a corresponding changie yaml file to that PR when that label is added.
# The file is created off a template:
#
# kind: Dependencies
# body: <PR title>
# time: <current timestamp>
# custom:
# Author: dependabot
# Issue: 4904
# PR: <PR number>
#
# **why?**
# Automate changelog generation for more visability with automated dependency updates via dependabot.
# **when?**
# Once a PR is created and it has been correctly labeled with `dependencies`. The intended use
# is for the PRs created by dependabot. You can also manually trigger this by adding the
# `dependencies` label at any time.
name: Dependency Changelog
on:
pull_request:
# catch when the PR is opened with the label or when the label is added
types: [opened, labeled]
permissions:
contents: write
pull-requests: read
jobs:
dependency_changelog:
if: "contains(github.event.pull_request.labels.*.name, 'dependencies')"
runs-on: ubuntu-latest
steps:
# timestamp changes the order the changelog entries are listed in the final Changelog.md file. Precision is not
# important here.
# The timestamp on the filename and the timestamp in the contents of the file have different expected formats.
- name: Get File Name Timestamp
id: filename_time
uses: nanzm/get-time-action@v1.1
with:
format: 'YYYYMMDD-HHmmss'
- name: Get File Content Timestamp
id: file_content_time
uses: nanzm/get-time-action@v1.1
with:
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
# changie expects files to be named in a specific pattern.
- name: Generate Filepath
id: fp
run: |
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
echo "::set-output name=FILEPATH::$FILEPATH"
- name: Check if changelog file exists already
# if there's already a changelog entry, don't add another one!
# https://github.com/marketplace/actions/paths-changes-filter
# For each filter, it sets output variable named by the filter to the text:
# 'true' - if any of changed files matches any of filter rules
# 'false' - if none of changed files matches any of filter rules
# also, returns:
# `changes` - JSON array with names of all filters matching any of the changed files
uses: dorny/paths-filter@v2
id: changelog_check
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: |
exists:
- added: '.changes/unreleased/**.yaml'
- name: Checkout Branch
if: steps.changelog_check.outputs.exists == 'false'
uses: actions/checkout@v2
with:
# specifying the ref avoids checking out the repository in a detached state
ref: ${{ github.event.pull_request.head.ref }}
# If this is not set to false, Git push is performed with github.token and not the token
# configured using the env: GITHUB_TOKEN in commit step
persist-credentials: false
- name: Create file from template
if: steps.changelog_check.outputs.exists == 'false'
run: |
echo kind: Dependencies > "${{ steps.fp.outputs.FILEPATH }}"
echo 'body: "${{ github.event.pull_request.title }}"' >> "${{ steps.fp.outputs.FILEPATH }}"
echo time: "${{ steps.file_content_time.outputs.time }}" >> "${{ steps.fp.outputs.FILEPATH }}"
echo custom: >> "${{ steps.fp.outputs.FILEPATH }}"
echo ' Author: ${{ github.event.pull_request.user.login }}' >> "${{ steps.fp.outputs.FILEPATH }}"
echo ' Issue: "4904"' >> "${{ steps.fp.outputs.FILEPATH }}" # github.event.pull_request.issue for auto id?
echo ' PR: "${{ github.event.pull_request.number }}"' >> "${{ steps.fp.outputs.FILEPATH }}"
- name: Commit Changelog File
if: steps.changelog_check.outputs.exists == 'false'
uses: gr2m/create-or-update-pull-request-action@v1
env:
# When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions
# Workflow run. This is due to limitations set by GitHub.
# See: https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow
# When you use the repository's GITHUB_TOKEN to perform tasks on behalf of the GitHub Actions
# app, events triggered by the GITHUB_TOKEN will not create a new workflow run. This prevents
# you from accidentally creating recursive workflow runs. To get around this, use a Personal
# Access Token to commit changes.
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
with:
branch: ${{ github.event.pull_request.head.ref }}
# author expected in the format "Lorem J. Ipsum <lorem@example.com>"
author: "Github Build Bot <buildbot@fishtownanalytics.com>"
commit-message: "Add automated changelog yaml from template"

View File

@@ -1,165 +0,0 @@
# **what?**
# On push, if anything in core/dbt/docs or core/dbt/cli has been
# created or modified, regenerate the CLI API docs using sphinx.
# **why?**
# We watch for changes in core/dbt/cli because the CLI API docs rely on click
# and all supporting flags/params to be generated. We watch for changes in
# core/dbt/docs since any changes to sphinx configuration or any of the
# .rst files there could result in a differently build final index.html file.
# **when?**
# Whenever a change has been pushed to a branch, and only if there is a diff
# between the PR branch and main's core/dbt/cli and or core/dbt/docs dirs.
# TODO: add bot comment to PR informing contributor that the docs have been committed
# TODO: figure out why github action triggered pushes cause github to fail to report
# the status of jobs
name: Generate CLI API docs
on:
pull_request:
permissions:
contents: write
pull-requests: write
env:
CLI_DIR: ${{ github.workspace }}/core/dbt/cli
DOCS_DIR: ${{ github.workspace }}/core/dbt/docs
DOCS_BUILD_DIR: ${{ github.workspace }}/core/dbt/docs/build
jobs:
check_gen:
name: check if generation needed
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.head.repo.fork == false }}
outputs:
cli_dir_changed: ${{ steps.check_cli.outputs.cli_dir_changed }}
docs_dir_changed: ${{ steps.check_docs.outputs.docs_dir_changed }}
steps:
- name: "[DEBUG] print variables"
run: |
echo "env.CLI_DIR: ${{ env.CLI_DIR }}"
echo "env.DOCS_BUILD_DIR: ${{ env.DOCS_BUILD_DIR }}"
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
- name: git checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.head_ref }}
- name: set shas
id: set_shas
run: |
THIS_SHA=$(git rev-parse @)
LAST_SHA=$(git rev-parse @~1)
echo "this sha: $THIS_SHA"
echo "last sha: $LAST_SHA"
echo "this_sha=$THIS_SHA" >> $GITHUB_OUTPUT
echo "last_sha=$LAST_SHA" >> $GITHUB_OUTPUT
- name: check for changes in core/dbt/cli
id: check_cli
run: |
CLI_DIR_CHANGES=$(git diff \
${{ steps.set_shas.outputs.last_sha }} \
${{ steps.set_shas.outputs.this_sha }} \
-- ${{ env.CLI_DIR }})
if [ -n "$CLI_DIR_CHANGES" ]; then
echo "changes found"
echo $CLI_DIR_CHANGES
echo "cli_dir_changed=true" >> $GITHUB_OUTPUT
exit 0
fi
echo "cli_dir_changed=false" >> $GITHUB_OUTPUT
echo "no changes found"
- name: check for changes in core/dbt/docs
id: check_docs
if: steps.check_cli.outputs.cli_dir_changed == 'false'
run: |
DOCS_DIR_CHANGES=$(git diff --name-only \
${{ steps.set_shas.outputs.last_sha }} \
${{ steps.set_shas.outputs.this_sha }} \
-- ${{ env.DOCS_DIR }} ':!${{ env.DOCS_BUILD_DIR }}')
DOCS_BUILD_DIR_CHANGES=$(git diff --name-only \
${{ steps.set_shas.outputs.last_sha }} \
${{ steps.set_shas.outputs.this_sha }} \
-- ${{ env.DOCS_BUILD_DIR }})
if [ -n "$DOCS_DIR_CHANGES" ] && [ -z "$DOCS_BUILD_DIR_CHANGES" ]; then
echo "changes found"
echo $DOCS_DIR_CHANGES
echo "docs_dir_changed=true" >> $GITHUB_OUTPUT
exit 0
fi
echo "docs_dir_changed=false" >> $GITHUB_OUTPUT
echo "no changes found"
gen_docs:
name: generate docs
runs-on: ubuntu-latest
needs: [check_gen]
if: |
needs.check_gen.outputs.cli_dir_changed == 'true'
|| needs.check_gen.outputs.docs_dir_changed == 'true'
steps:
- name: "[DEBUG] print variables"
run: |
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
echo "github head_ref: ${{ github.head_ref }}"
- name: git checkout
uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}
- name: install python
uses: actions/setup-python@v4.3.0
with:
python-version: 3.8
- name: install dev requirements
run: |
python3 -m venv env
source env/bin/activate
python -m pip install --upgrade pip
pip install -r requirements.txt -r dev-requirements.txt
- name: generate docs
run: |
source env/bin/activate
cd ${{ env.DOCS_DIR }}
echo "cleaning existing docs"
make clean
echo "creating docs"
make html
- name: debug
run: |
echo ">>>>> status"
git status
echo ">>>>> remotes"
git remote -v
echo ">>>>> branch"
git branch -v
echo ">>>>> log"
git log --pretty=oneline | head -5
- name: commit docs
run: |
git config user.name 'Github Build Bot'
git config user.email 'buildbot@fishtownanalytics.com'
git commit -am "Add generated CLI API docs"
git push -u origin ${{ github.head_ref }}

View File

@@ -15,9 +15,6 @@ on:
issues:
types: [closed, deleted, reopened]
# no special access is needed
permissions: read-all
jobs:
call-label-action:
uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main

View File

@@ -45,9 +45,7 @@ jobs:
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v4.3.0
with:
python-version: '3.8'
uses: actions/setup-python@v2
- name: Install python dependencies
run: |
@@ -73,7 +71,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
python-version: ["3.7", "3.8", "3.9", "3.10"]
env:
TOXENV: "unit"
@@ -84,7 +82,7 @@ jobs:
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4.3.0
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
@@ -101,9 +99,7 @@ jobs:
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
- uses: actions/upload-artifact@v2
if: always()
@@ -120,8 +116,8 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
os: [ubuntu-20.04]
python-version: ["3.7", "3.8", "3.9", "3.10"]
os: [ubuntu-latest]
include:
- python-version: 3.8
os: windows-latest
@@ -141,7 +137,7 @@ jobs:
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4.3.0
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
@@ -170,9 +166,7 @@ jobs:
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
- uses: actions/upload-artifact@v2
if: always()
@@ -196,9 +190,9 @@ jobs:
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v4.3.0
uses: actions/setup-python@v2
with:
python-version: '3.8'
python-version: 3.8
- name: Install python dependencies
run: |

View File

@@ -1,109 +0,0 @@
# **what?**
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
# - generate and validate data for night release (commit SHA, version number, release branch);
# - pass data to release workflow;
# - night release will be pushed to GitHub as a draft release;
# - night build will be pushed to test PyPI;
#
# **why?**
# Ensure an automated and tested release process for nightly builds
#
# **when?**
# This workflow runs on schedule or can be run manually on demand.
name: Nightly Test Release to GitHub and PyPI
on:
workflow_dispatch: # for manual triggering
schedule:
- cron: 0 9 * * *
permissions:
contents: write # this is the permission that allows creating a new release
defaults:
run:
shell: bash
env:
RELEASE_BRANCH: "main"
jobs:
aggregate-release-data:
runs-on: ubuntu-latest
outputs:
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
version_number: ${{ steps.nightly-release-version.outputs.number }}
release_branch: ${{ steps.release-branch.outputs.name }}
steps:
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
uses: actions/checkout@v3
with:
ref: ${{ env.RELEASE_BRANCH }}
- name: "Resolve Commit To Release"
id: resolve-commit-sha
run: |
commit_sha=$(git rev-parse HEAD)
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
- name: "Get Current Version Number"
id: version-number-sources
run: |
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
echo "current_version=$current_version" >> $GITHUB_OUTPUT
- name: "Audit Version And Parse Into Parts"
id: semver
uses: dbt-labs/actions/parse-semver@v1.1.0
with:
version: ${{ steps.version-number-sources.outputs.current_version }}
- name: "Get Current Date"
id: current-date
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
- name: "Generate Nightly Release Version Number"
id: nightly-release-version
run: |
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly"
echo "number=$number" >> $GITHUB_OUTPUT
- name: "Audit Nightly Release Version And Parse Into Parts"
uses: dbt-labs/actions/parse-semver@v1.1.0
with:
version: ${{ steps.nightly-release-version.outputs.number }}
- name: "Set Release Branch"
id: release-branch
run: |
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
log-outputs-aggregate-release-data:
runs-on: ubuntu-latest
needs: [aggregate-release-data]
steps:
- name: "[DEBUG] Log Outputs"
run: |
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
release-github-pypi:
needs: [aggregate-release-data]
uses: ./.github/workflows/release.yml
with:
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }}
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
build_script_path: "scripts/build-dist.sh"
env_setup_script_path: "scripts/env-setup.sh"
s3_bucket_name: "core-team-artifacts"
package_test_command: "dbt --version"
test_run: true
nightly_release: true
secrets: inherit

View File

@@ -39,7 +39,7 @@ jobs:
max-parallel: 1
fail-fast: false
matrix:
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, 1.4.latest, main]
branch: [1.0.latest, 1.1.latest, main]
steps:
- name: Call CI workflow for ${{ matrix.branch }} branch

View File

@@ -41,9 +41,9 @@ jobs:
id: version
run: |
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
echo "major=$MAJOR" >> $GITHUB_OUTPUT
echo "minor=$MINOR" >> $GITHUB_OUTPUT
echo "patch=$PATCH" >> $GITHUB_OUTPUT
echo "::set-output name=major::$MAJOR"
echo "::set-output name=minor::$MINOR"
echo "::set-output name=patch::$PATCH"
- name: Is pkg 'latest'
id: latest
@@ -70,10 +70,8 @@ jobs:
- name: Get docker build arg
id: build_arg
run: |
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
- name: Log in to the GHCR
uses: docker/login-action@v1

View File

@@ -1,230 +1,199 @@
# **what?**
# Release workflow provides the following steps:
# - checkout the given commit;
# - validate version in sources and changelog file for given version;
# - bump the version and generate a changelog if needed;
# - merge all changes to the target branch if needed;
# - run unit and integration tests against given commit;
# - build and package that SHA;
# - release it to GitHub and PyPI with that specific build;
#
# Take the given commit, run unit tests specifically on that sha, build and
# package it, and then release to GitHub and PyPi with that specific build
# **why?**
# Ensure an automated and tested release process
#
# **when?**
# This workflow can be run manually on demand or can be called by other workflows
name: Release to GitHub and PyPI
# **when?**
# This will only run manually with a given sha and version
name: Release to GitHub and PyPi
on:
workflow_dispatch:
inputs:
sha:
description: "The last commit sha in the release"
type: string
required: true
target_branch:
description: "The branch to release from"
type: string
required: true
description: 'The last commit sha in the release'
required: true
version_number:
description: "The release version number (i.e. 1.0.0b1)"
type: string
required: true
build_script_path:
description: "Build script path"
type: string
default: "scripts/build-dist.sh"
required: true
env_setup_script_path:
description: "Environment setup script path"
type: string
default: "scripts/env-setup.sh"
required: false
s3_bucket_name:
description: "AWS S3 bucket name"
type: string
default: "core-team-artifacts"
required: true
package_test_command:
description: "Package test command"
type: string
default: "dbt --version"
required: true
test_run:
description: "Test run (Publish release as draft)"
type: boolean
default: true
required: false
nightly_release:
description: "Nightly release to dev environment"
type: boolean
default: false
required: false
workflow_call:
inputs:
sha:
description: "The last commit sha in the release"
type: string
required: true
target_branch:
description: "The branch to release from"
type: string
required: true
version_number:
description: "The release version number (i.e. 1.0.0b1)"
type: string
required: true
build_script_path:
description: "Build script path"
type: string
default: "scripts/build-dist.sh"
required: true
env_setup_script_path:
description: "Environment setup script path"
type: string
default: "scripts/env-setup.sh"
required: false
s3_bucket_name:
description: "AWS S3 bucket name"
type: string
default: "core-team-artifacts"
required: true
package_test_command:
description: "Package test command"
type: string
default: "dbt --version"
required: true
test_run:
description: "Test run (Publish release as draft)"
type: boolean
default: true
required: false
nightly_release:
description: "Nightly release to dev environment"
type: boolean
default: false
required: false
permissions:
contents: write # this is the permission that allows creating a new release
description: 'The release version number (i.e. 1.0.0b1)'
required: true
defaults:
run:
shell: bash
jobs:
log-inputs:
name: Log Inputs
unit:
name: Unit test
runs-on: ubuntu-latest
env:
TOXENV: "unit"
steps:
- name: "[DEBUG] Print Variables"
- name: Check out the repository
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.inputs.sha }}
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install python dependencies
run: |
echo The last commit sha in the release: ${{ inputs.sha }}
echo The branch to release from: ${{ inputs.target_branch }}
echo The release version number: ${{ inputs.version_number }}
echo Build script path: ${{ inputs.build_script_path }}
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
echo Package test command: ${{ inputs.package_test_command }}
echo Test run: ${{ inputs.test_run }}
echo Nightly release: ${{ inputs.nightly_release }}
pip install --user --upgrade pip
pip install tox
pip --version
tox --version
bump-version-generate-changelog:
name: Bump package version, Generate changelog
- name: Run tox
run: tox
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
with:
sha: ${{ inputs.sha }}
version_number: ${{ inputs.version_number }}
target_branch: ${{ inputs.target_branch }}
env_setup_script_path: ${{ inputs.env_setup_script_path }}
test_run: ${{ inputs.test_run }}
nightly_release: ${{ inputs.nightly_release }}
secrets:
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
log-outputs-bump-version-generate-changelog:
name: "[Log output] Bump package version, Generate changelog"
if: ${{ !failure() && !cancelled() }}
needs: [bump-version-generate-changelog]
build:
name: build packages
runs-on: ubuntu-latest
steps:
- name: Print variables
- name: Check out the repository
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.inputs.sha }}
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install python dependencies
run: |
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
pip install --user --upgrade pip
pip install --upgrade setuptools wheel twine check-wheel-contents
pip --version
build-test-package:
name: Build, Test, Package
if: ${{ !failure() && !cancelled() }}
needs: [bump-version-generate-changelog]
- name: Build distributions
run: ./scripts/build-dist.sh
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
- name: Show distributions
run: ls -lh dist/
with:
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
version_number: ${{ inputs.version_number }}
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
build_script_path: ${{ inputs.build_script_path }}
s3_bucket_name: ${{ inputs.s3_bucket_name }}
package_test_command: ${{ inputs.package_test_command }}
test_run: ${{ inputs.test_run }}
nightly_release: ${{ inputs.nightly_release }}
- name: Check distribution descriptions
run: |
twine check dist/*
secrets:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Check wheel contents
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- uses: actions/upload-artifact@v2
with:
name: dist
path: |
dist/
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
test-build:
name: verify packages
needs: [build, unit]
runs-on: ubuntu-latest
steps:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install python dependencies
run: |
pip install --user --upgrade pip
pip install --upgrade wheel
pip --version
- uses: actions/download-artifact@v2
with:
name: dist
path: dist/
- name: Show distributions
run: ls -lh dist/
- name: Install wheel distributions
run: |
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
- name: Check wheel distributions
run: |
dbt --version
- name: Install source distributions
run: |
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
- name: Check source distributions
run: |
dbt --version
github-release:
name: GitHub Release
if: ${{ !failure() && !cancelled() }}
needs: [bump-version-generate-changelog, build-test-package]
needs: test-build
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
runs-on: ubuntu-latest
with:
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
version_number: ${{ inputs.version_number }}
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
test_run: ${{ inputs.test_run }}
steps:
- uses: actions/download-artifact@v2
with:
name: dist
path: '.'
# Need to set an output variable because env variables can't be taken as input
# This is needed for the next step with releasing to GitHub
- name: Find release type
id: release_type
env:
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
run: |
echo ::set-output name=isPrerelease::$IS_PRERELEASE
- name: Creating GitHub Release
uses: softprops/action-gh-release@v1
with:
name: dbt-core v${{github.event.inputs.version_number}}
tag_name: v${{github.event.inputs.version_number}}
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
target_commitish: ${{github.event.inputs.sha}}
body: |
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
files: |
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
dbt-core-${{github.event.inputs.version_number}}.tar.gz
pypi-release:
name: PyPI Release
name: Pypi release
needs: [github-release]
runs-on: ubuntu-latest
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
needs: github-release
with:
version_number: ${{ inputs.version_number }}
test_run: ${{ inputs.test_run }}
environment: PypiProd
steps:
- uses: actions/download-artifact@v2
with:
name: dist
path: 'dist'
secrets:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
slack-notification:
name: Slack Notification
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
needs:
[
bump-version-generate-changelog,
build-test-package,
github-release,
pypi-release,
]
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
with:
status: "failure"
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
- name: Publish distribution to PyPI
uses: pypa/gh-action-pypi-publish@v1.4.2
with:
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@@ -21,9 +21,6 @@ on:
- "*.latest"
- "releases/*"
# no special access is needed
permissions: read-all
env:
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt

View File

@@ -3,10 +3,15 @@ on:
schedule:
- cron: "30 1 * * *"
permissions:
issues: write
pull-requests: write
jobs:
stale:
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
runs-on: ubuntu-latest
steps:
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
with:
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
# mark issues/PRs stale when they haven't seen activity in 180 days
days-before-stale: 180

View File

@@ -22,7 +22,7 @@ jobs:
# run the performance measurements on the current or default branch
test-schema:
name: Test Log Schema
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
env:
# turns warnings into errors
RUSTFLAGS: "-D warnings"
@@ -46,6 +46,12 @@ jobs:
with:
python-version: "3.8"
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Install python dependencies
run: |
pip install --user --upgrade pip
@@ -63,3 +69,10 @@ jobs:
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
- name: Run integration tests
run: tox -e integration -- -nauto
# apply our schema tests to every log event from the previous step
# skips any output that isn't valid json
- uses: actions-rs/cargo@v1
with:
command: run
args: --manifest-path test/interop/log_parsing/Cargo.toml

View File

@@ -1,15 +1,18 @@
# **what?**
# This workflow will take the new version number to bump to. With that
# This workflow will take a version number and a dry run flag. With that
# it will run versionbump to update the version number everywhere in the
# code base and then run changie to create the corresponding changelog.
# A PR will be created with the changes that can be reviewed before committing.
# code base and then generate an update Docker requirements file. If this
# is a dry run, a draft PR will open with the changes. If this isn't a dry
# run, the changes will be committed to the branch this is run on.
# **why?**
# This is to aid in releasing dbt and making sure we have updated
# the version in all places and generated the changelog.
# the versions and Docker requirements in all places.
# **when?**
# This is triggered manually
# This is triggered either manually OR
# from the repository_dispatch event "version-bump" which is sent from
# the dbt-release repo Action
name: Version Bump
@@ -17,25 +20,35 @@ on:
workflow_dispatch:
inputs:
version_number:
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
description: 'The version number to bump to'
required: true
permissions:
contents: write
pull-requests: write
is_dry_run:
description: 'Creates a draft PR to allow testing instead of committing to a branch'
required: true
default: 'true'
repository_dispatch:
types: [version-bump]
jobs:
bump:
runs-on: ubuntu-latest
steps:
- name: "[DEBUG] Print Variables"
run: |
echo "all variables defined as inputs"
echo The version_number: ${{ github.event.inputs.version_number }}
- name: Check out the repository
uses: actions/checkout@v2
- name: Set version and dry run values
id: variables
env:
VERSION_NUMBER: "${{ github.event.client_payload.version_number == '' && github.event.inputs.version_number || github.event.client_payload.version_number }}"
IS_DRY_RUN: "${{ github.event.client_payload.is_dry_run == '' && github.event.inputs.is_dry_run || github.event.client_payload.is_dry_run }}"
run: |
echo Repository dispatch event version: ${{ github.event.client_payload.version_number }}
echo Repository dispatch event dry run: ${{ github.event.client_payload.is_dry_run }}
echo Workflow dispatch event version: ${{ github.event.inputs.version_number }}
echo Workflow dispatch event dry run: ${{ github.event.inputs.is_dry_run }}
echo ::set-output name=VERSION_NUMBER::$VERSION_NUMBER
echo ::set-output name=IS_DRY_RUN::$IS_DRY_RUN
- uses: actions/setup-python@v2
with:
python-version: "3.8"
@@ -46,80 +59,53 @@ jobs:
source env/bin/activate
pip install --upgrade pip
- name: Add Homebrew to PATH
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- name: Install Homebrew packages
run: |
brew install pre-commit
brew tap miniscruff/changie https://github.com/miniscruff/changie
brew install changie
- name: Audit Version and Parse Into Parts
id: semver
uses: dbt-labs/actions/parse-semver@v1
with:
version: ${{ github.event.inputs.version_number }}
- name: Set branch value
id: variables
run: |
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
- name: Create PR branch
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
run: |
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
git push origin ${{ steps.variables.outputs.BRANCH_NAME }}
git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}
git checkout -b bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
git push origin bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
git branch --set-upstream-to=origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
# - name: Generate Docker requirements
# run: |
# source env/bin/activate
# pip install -r requirements.txt
# pip freeze -l > docker/requirements/requirements.txt
# git status
- name: Bump version
run: |
source env/bin/activate
pip install -r dev-requirements.txt
env/bin/bumpversion --allow-dirty --new-version ${{ github.event.inputs.version_number }} major
env/bin/bumpversion --allow-dirty --new-version ${{steps.variables.outputs.VERSION_NUMBER}} major
git status
- name: Run changie
run: |
if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
then
changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'
else
changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases
fi
changie merge
git status
# this step will fail on whitespace errors but also correct them
- name: Remove trailing whitespace
continue-on-error: true
run: |
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/*
git status
# this step will fail on newline errors but also correct them
- name: Removing extra newlines
continue-on-error: true
run: |
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/*
git status
- name: Commit version bump to branch
- name: Commit version bump directly
uses: EndBug/add-and-commit@v7
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'false' }}
with:
author_name: 'Github Build Bot'
author_email: 'buildbot@fishtownanalytics.com'
message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
- name: Commit version bump to branch
uses: EndBug/add-and-commit@v7
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
with:
author_name: 'Github Build Bot'
author_email: 'buildbot@fishtownanalytics.com'
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
push: 'origin origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v3
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
with:
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
draft: true
base: ${{github.ref}}
title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
title: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
labels: |
Skip Changelog

9
.gitignore vendored
View File

@@ -11,7 +11,6 @@ __pycache__/
env*/
dbt_env/
build/
!core/dbt/docs/build
develop-eggs/
dist/
downloads/
@@ -25,8 +24,7 @@ var/
*.egg-info/
.installed.cfg
*.egg
.mypy_cache/
.dmypy.json
*.mypy_cache/
logs/
# PyInstaller
@@ -51,7 +49,6 @@ coverage.xml
*,cover
.hypothesis/
test.env
makefile.test.env
*.pytest_cache/
@@ -98,7 +95,3 @@ venv/
# vscode
.vscode/
*.code-workspace
# poetry
poetry.lock

View File

@@ -2,11 +2,11 @@
# Eventually the hooks described here will be run as tests before merging each PR.
# TODO: remove global exclusion of tests when testing overhaul is complete
exclude: ^(test/|core/dbt/docs/build/)
exclude: ^test/
# Force all unspecified python hooks to run python 3.8
default_language_version:
python: python3
python: python3.8
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
@@ -24,13 +24,18 @@ repos:
rev: 22.3.0
hooks:
- id: black
args:
- "--line-length=99"
- "--target-version=py38"
- id: black
alias: black-check
stages: [manual]
args:
- "--line-length=99"
- "--target-version=py38"
- "--check"
- "--diff"
- repo: https://github.com/pycqa/flake8
- repo: https://gitlab.com/pycqa/flake8
rev: 4.0.1
hooks:
- id: flake8

View File

@@ -5,13 +5,12 @@
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
## Previous Releases
For information on prior major and minor releases, see their changelogs:
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)

View File

@@ -7,9 +7,7 @@
3. [Setting up an environment](#setting-up-an-environment)
4. [Running `dbt` in development](#running-dbt-core-in-development)
5. [Testing dbt-core](#testing)
6. [Debugging](#debugging)
7. [Adding a changelog entry](#adding-a-changelog-entry)
8. [Submitting a Pull Request](#submitting-a-pull-request)
6. [Submitting a Pull Request](#submitting-a-pull-request)
## About this document
@@ -23,8 +21,7 @@ If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-developm
- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`).
- **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones.
- **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...). If an issue fix applies to a release branch, that fix should be first committed to the development branch and then to the release branch (rarely release-branch fixes may not apply to `main`).
- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via pip, homebrew, and dbt Cloud.
- **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...)
## Getting the code
@@ -44,9 +41,7 @@ If you are not a member of the `dbt-labs` GitHub organization, you can contribut
### dbt Labs contributors
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch. Branch names should be fixed by `CT-XXX/` where:
* CT stands for 'core team'
* XXX stands for a JIRA ticket number
If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
## Setting up an environment
@@ -56,7 +51,7 @@ There are some tools that will be helpful to you in developing locally. While th
These are the tools used in `dbt-core` development and testing:
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, 3.10 and 3.11
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
- [`black`](https://github.com/psf/black) for code formatting
@@ -96,15 +91,12 @@ brew install postgresql
### Installation
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
```sh
make dev
```
or, alternatively:
```sh
# or
pip install -r dev-requirements.txt -r editable-requirements.txt
pre-commit install
```
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
@@ -159,11 +151,11 @@ Check out the other targets in the Makefile to see other commonly used test
suites.
#### `pre-commit`
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This command installs several pip executables including black, mypy, and flake8. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
#### `tox`
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
#### `pytest`
@@ -182,52 +174,20 @@ python3 -m pytest tests/functional/sources
> See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options.
### Unit, Integration, Functional?
Here are some general rules for adding tests:
* unit tests (`test/unit` & `tests/unit`) dont need to access a database; "pure Python" tests should be written as unit tests
* functional tests (`test/integration` & `tests/functional`) cover anything that interacts with a database, namely adapter
* *everything in* `test/*` *is being steadily migrated to* `tests/*`
## Debugging
1. The logs for a `dbt run` have stack traces and other information for debugging errors (in `logs/dbt.log` in your project directory).
2. Try using a debugger, like `ipdb`. For pytest: `--pdb --pdbcls=IPython.terminal.debugger:pdb`
3. Sometimes, its easier to debug on a single thread: `dbt --single-threaded run`
4. To make print statements from Jinja macros: `{{ log(msg, info=true) }}`
5. You can also add `{{ debug() }}` statements, which will drop you into some auto-generated code that the macro wrote.
6. The dbt “artifacts” are written out to the target directory of your dbt project. They are in unformatted json, which can be hard to read. Format them with:
> python -m json.tool target/run_results.json > run_results.json
### Assorted development tips
* Append `# type: ignore` to the end of a line if you need to disable `mypy` on that line.
* Sometimes flake8 complains about lines that are actually fine, in which case you can put a comment on the line such as: # noqa or # noqa: ANNN, where ANNN is the error code that flake8 issues.
* To collect output for `CProfile`, run dbt with the `-r` option and the name of an output file, i.e. `dbt -r dbt.cprof run`. If you just want to profile parsing, you can do: `dbt -r dbt.cprof parse`. `pip` install `snakeviz` to view the output. Run `snakeviz dbt.cprof` and output will be rendered in a browser window.
## Adding or modifying a CHANGELOG Entry
## Adding CHANGELOG Entry
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
Once changie is installed and your PR is created for a new feature, simply run the following command and changie will walk you through the process of creating a changelog entry:
```shell
changie new
```
Commit the file that's created and your changelog entry is complete!
If you are contributing to a feature already in progress, you will modify the changie yaml file in dbt/.changes/unreleased/ related to your change. If you need help finding this file, please ask within the discussion for the pull request!
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.
## Submitting a Pull Request
Code can be merged into the current development branch `main` by opening a pull request. A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
Sometimes, the content license agreement auto-check bot doesn't find a user's entry in its roster. If you need to force a rerun, add `@cla-bot check` in a comment on the pull request.

View File

@@ -49,9 +49,6 @@ RUN apt-get update \
python3.10 \
python3.10-dev \
python3.10-venv \
python3.11 \
python3.11-dev \
python3.11-venv \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

View File

@@ -6,37 +6,11 @@ ifeq ($(USE_DOCKER),true)
DOCKER_CMD := docker-compose run --rm test
endif
#
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
# with any ENV_VAR overrides required by your test environment, e.g.
# DBT_TEST_USER_1=user
# LOG_DIR="dir with a space in it"
#
# Warn: Restrict each line to one variable only.
#
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
include ./makefile.test.env
endif
CI_FLAGS =\
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
.PHONY: dev_req
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
.PHONY: dev
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
@\
pip install -r dev-requirements.txt -r editable-requirements.txt
.PHONY: dev
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
@\
pre-commit install
.PHONY: mypy
mypy: .env ## Runs mypy against staged changes for static type checking.
@\
@@ -74,20 +48,13 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
.PHONY: integration
integration: .env ## Runs postgres integration tests with py-integration
@\
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
$(DOCKER_CMD) tox -e py-integration -- -nauto
.PHONY: integration-fail-fast
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
@\
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
.PHONY: interop
interop: clean
@\
mkdir $(LOG_DIR) && \
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
.PHONY: setup-db
setup-db: ## Setup Postgres database with docker-compose for system testing.
@\
@@ -109,7 +76,6 @@ endif
clean: ## Resets development environment.
@echo 'cleaning repo...'
@rm -f .coverage
@rm -f .coverage.*
@rm -rf .eggs/
@rm -f .env
@rm -rf .tox/

View File

@@ -1,2 +1 @@
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
include dbt/py.typed

View File

@@ -2,59 +2,50 @@
## The following are individual files in this directory.
### compilation.py
### constants.py
### dataclass_schema.py
### deprecations.py
### exceptions.py
### flags.py
### helper_types.py
### hooks.py
### lib.py
### links.py
### logger.py
### main.py
### node_types.py
### profiler.py
### selected_resources.py
### semver.py
### tracking.py
### version.py
### lib.py
### node_types.py
### helper_types.py
### links.py
### semver.py
### ui.py
### compilation.py
### dataclass_schema.py
### exceptions.py
### hooks.py
### logger.py
### profiler.py
### utils.py
### version.py
## The subdirectories will be documented in a README in the subdirectory
* adapters
* cli
* clients
* config
* context
* contracts
* deps
* docs
* events
* graph
* include
* parser
* adapters
* context
* deps
* graph
* task
* tests
* clients
* events

View File

@@ -1,10 +0,0 @@
## Base adapters
### impl.py
The class `SQLAdapter` in [base/imply.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/adapters/base/impl.py) is a (mostly) abstract object that adapter objects inherit from. The base class scaffolds out methods that every adapter project usually should implement for smooth communication between dbt and database.
Some target databases require more or fewer methods--it all depends on what the warehouse's featureset is.
Look into the class for function-level comments.

View File

@@ -10,5 +10,5 @@ from dbt.adapters.base.relation import ( # noqa
SchemaSearchMap,
)
from dbt.adapters.base.column import Column # noqa
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter, PythonJobHelper # noqa
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter # noqa
from dbt.adapters.base.plugin import AdapterPlugin # noqa

View File

@@ -2,7 +2,7 @@ from dataclasses import dataclass
import re
from typing import Dict, ClassVar, Any, Optional
from dbt.exceptions import DbtRuntimeError
from dbt.exceptions import RuntimeException
@dataclass
@@ -12,7 +12,6 @@ class Column:
"TIMESTAMP": "TIMESTAMP",
"FLOAT": "FLOAT",
"INTEGER": "INT",
"BOOLEAN": "BOOLEAN",
}
column: str
dtype: str
@@ -85,7 +84,7 @@ class Column:
def string_size(self) -> int:
if not self.is_string():
raise DbtRuntimeError("Called string_size() on non-string field!")
raise RuntimeException("Called string_size() on non-string field!")
if self.dtype == "text" or self.char_size is None:
# char_size should never be None. Handle it reasonably just in case
@@ -124,7 +123,7 @@ class Column:
def from_description(cls, name: str, raw_data_type: str) -> "Column":
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
if match is None:
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
data_type, size_info = match.groups()
char_size = None
numeric_precision = None
@@ -137,7 +136,7 @@ class Column:
try:
char_size = int(parts[0])
except ValueError:
raise DbtRuntimeError(
raise RuntimeException(
f'Could not interpret data_type "{raw_data_type}": '
f'could not convert "{parts[0]}" to an integer'
)
@@ -145,14 +144,14 @@ class Column:
try:
numeric_precision = int(parts[0])
except ValueError:
raise DbtRuntimeError(
raise RuntimeException(
f'Could not interpret data_type "{raw_data_type}": '
f'could not convert "{parts[0]}" to an integer'
)
try:
numeric_scale = int(parts[1])
except ValueError:
raise DbtRuntimeError(
raise RuntimeException(
f'Could not interpret data_type "{raw_data_type}": '
f'could not convert "{parts[1]}" to an integer'
)

View File

@@ -2,7 +2,6 @@ import abc
import os
from time import sleep
import sys
import traceback
# multiprocessing.RLock is a function returning this type
from multiprocessing.synchronize import RLock
@@ -41,16 +40,14 @@ from dbt.events.functions import fire_event
from dbt.events.types import (
NewConnection,
ConnectionReused,
ConnectionLeftOpenInCleanup,
ConnectionLeftOpen,
ConnectionClosedInCleanup,
ConnectionLeftOpen2,
ConnectionClosed,
ConnectionClosed2,
Rollback,
RollbackFailed,
)
from dbt.events.contextvars import get_node_info
from dbt import flags
from dbt.utils import cast_to_str
SleepTime = Union[int, float] # As taken by time.sleep.
AdapterHandle = Any # Adapter connection handle objects can be any class.
@@ -91,13 +88,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
key = self.get_thread_identifier()
with self.lock:
if key not in self.thread_connections:
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
return self.thread_connections[key]
def set_thread_connection(self, conn: Connection) -> None:
key = self.get_thread_identifier()
if key in self.thread_connections:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
"In set_thread_connection, existing connection exists for {}"
)
self.thread_connections[key] = conn
@@ -137,49 +134,47 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:return: A context manager that handles exceptions raised by the
underlying database.
"""
raise dbt.exceptions.NotImplementedError(
raise dbt.exceptions.NotImplementedException(
"`exception_handler` is not implemented for this adapter!"
)
def set_connection_name(self, name: Optional[str] = None) -> Connection:
"""Called by 'acquire_connection' in BaseAdapter, which is called by
'connection_named', called by 'connection_for(node)'.
Creates a connection for this thread if one doesn't already
exist, and will rename an existing connection."""
conn_name: str
if name is None:
# if a name isn't specified, we'll re-use a single handle
# named 'master'
conn_name = "master"
else:
if not isinstance(name, str):
raise dbt.exceptions.CompilerException(
f"For connection name, got {name} - not a string!"
)
assert isinstance(name, str)
conn_name = name
conn_name: str = "master" if name is None else name
# Get a connection for this thread
conn = self.get_if_exists()
if conn and conn.name == conn_name and conn.state == "open":
# Found a connection and nothing to do, so just return it
return conn
if conn is None:
# Create a new connection
conn = Connection(
type=Identifier(self.TYPE),
name=conn_name,
name=None,
state=ConnectionState.INIT,
transaction_open=False,
handle=None,
credentials=self.profile.credentials,
)
conn.handle = LazyHandle(self.open)
# Add the connection to thread_connections for this thread
self.set_thread_connection(conn)
fire_event(
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
)
else: # existing connection either wasn't open or didn't have the right name
if conn.state != "open":
conn.handle = LazyHandle(self.open)
if conn.name != conn_name:
orig_conn_name: str = conn.name or ""
conn.name = conn_name
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
if conn.name == conn_name and conn.state == "open":
return conn
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
if conn.state == "open":
fire_event(ConnectionReused(conn_name=conn_name))
else:
conn.handle = LazyHandle(self.open)
conn.name = conn_name
return conn
@classmethod
@@ -211,7 +206,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
connect should trigger a retry.
:type retryable_exceptions: Iterable[Type[Exception]]
:param int retry_limit: How many times to retry the call to connect. If this limit
is exceeded before a successful call, a FailedToConnectError will be raised.
is exceeded before a successful call, a FailedToConnectException will be raised.
Must be non-negative.
:param retry_timeout: Time to wait between attempts to connect. Can also take a
Callable that takes the number of attempts so far, beginning at 0, and returns an int
@@ -220,14 +215,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
connect function across recursive calls. Passed as an argument to retry_timeout if it
is a Callable. This parameter should not be set by the initial caller.
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
successfully acquiring a handle.
:return: The given connection with its appropriate state and handle attributes set
depending on whether we successfully acquired a handle or not.
"""
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
if timeout < 0:
raise dbt.exceptions.FailedToConnectError(
raise dbt.exceptions.FailedToConnectException(
"retry_timeout cannot be negative or return a negative time."
)
@@ -235,7 +230,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
connection.handle = None
connection.state = ConnectionState.FAIL
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
try:
connection.handle = connect()
@@ -246,7 +241,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
if retry_limit <= 0:
connection.handle = None
connection.state = ConnectionState.FAIL
raise dbt.exceptions.FailedToConnectError(str(e))
raise dbt.exceptions.FailedToConnectException(str(e))
logger.debug(
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
@@ -268,12 +263,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
except Exception as e:
connection.handle = None
connection.state = ConnectionState.FAIL
raise dbt.exceptions.FailedToConnectError(str(e))
raise dbt.exceptions.FailedToConnectException(str(e))
@abc.abstractmethod
def cancel_open(self) -> Optional[List[str]]:
"""Cancel all open connections on the adapter. (passable)"""
raise dbt.exceptions.NotImplementedError(
raise dbt.exceptions.NotImplementedException(
"`cancel_open` is not implemented for this adapter!"
)
@@ -288,7 +283,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
This should be thread-safe, or hold the lock if necessary. The given
connection should not be in either in_use or available.
"""
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
def release(self) -> None:
with self.lock:
@@ -309,9 +304,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
with self.lock:
for connection in self.thread_connections.values():
if connection.state not in {"closed", "init"}:
fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name)))
fire_event(ConnectionLeftOpen(conn_name=connection.name))
else:
fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name)))
fire_event(ConnectionClosed(conn_name=connection.name))
self.close(connection)
# garbage collect these connections
@@ -320,12 +315,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
@abc.abstractmethod
def begin(self) -> None:
"""Begin a transaction. (passable)"""
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`begin` is not implemented for this adapter!"
)
@abc.abstractmethod
def commit(self) -> None:
"""Commit a transaction. (passable)"""
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`commit` is not implemented for this adapter!"
)
@classmethod
def _rollback_handle(cls, connection: Connection) -> None:
@@ -333,40 +332,28 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
try:
connection.handle.rollback()
except Exception:
fire_event(
RollbackFailed(
conn_name=cast_to_str(connection.name),
exc_info=traceback.format_exc(),
node_info=get_node_info(),
)
)
fire_event(RollbackFailed(conn_name=connection.name))
@classmethod
def _close_handle(cls, connection: Connection) -> None:
"""Perform the actual close operation."""
# On windows, sometimes connection handles don't have a close() attr.
if hasattr(connection.handle, "close"):
fire_event(
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
)
fire_event(ConnectionClosed2(conn_name=connection.name))
connection.handle.close()
else:
fire_event(
ConnectionLeftOpen(
conn_name=cast_to_str(connection.name), node_info=get_node_info()
)
)
fire_event(ConnectionLeftOpen2(conn_name=connection.name))
@classmethod
def _rollback(cls, connection: Connection) -> None:
"""Roll back the given connection."""
if connection.transaction_open is False:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
f"Tried to rollback transaction on connection "
f'"{connection.name}", but it does not have one open!'
)
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
fire_event(Rollback(conn_name=connection.name))
cls._rollback_handle(connection)
connection.transaction_open = False
@@ -378,7 +365,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
return connection
if connection.transaction_open and connection.handle:
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
fire_event(Rollback(conn_name=connection.name))
cls._rollback_handle(connection)
connection.transaction_open = False
@@ -411,4 +398,6 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:return: A tuple of the query status and results (empty if fetch=False).
:rtype: Tuple[AdapterResponse, agate.Table]
"""
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`execute` is not implemented for this adapter!"
)

View File

@@ -2,7 +2,6 @@ import abc
from concurrent.futures import as_completed, Future
from contextlib import contextmanager
from datetime import datetime
import time
from itertools import chain
from typing import (
Optional,
@@ -15,26 +14,21 @@ from typing import (
List,
Mapping,
Iterator,
Union,
Set,
)
import agate
import pytz
from dbt.exceptions import (
DbtInternalError,
MacroArgTypeError,
MacroResultError,
QuoteConfigTypeError,
NotImplementedError,
NullRelationCacheAttemptedError,
NullRelationDropAttemptedError,
RelationReturnedMultipleResultsError,
RenameToNoneAttemptedError,
DbtRuntimeError,
SnapshotTargetIncompleteError,
SnapshotTargetNotSnapshotTableError,
UnexpectedNullError,
UnexpectedNonTimestampError,
raise_database_error,
raise_compiler_error,
invalid_type_error,
get_relation_returned_multiple_results,
InternalException,
NotImplementedException,
RuntimeException,
)
from dbt.adapters.protocol import (
@@ -43,17 +37,13 @@ from dbt.adapters.protocol import (
)
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
from dbt.clients.jinja import MacroGenerator
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
from dbt.contracts.graph.manifest import Manifest, MacroManifest
from dbt.contracts.graph.nodes import ResultNode
from dbt.events.functions import fire_event, warn_or_error
from dbt.events.types import (
CacheMiss,
ListRelations,
CodeExecution,
CodeExecutionStatus,
CatalogGenerationError,
)
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
from dbt.contracts.graph.parsed import ParsedSeedNode
from dbt.exceptions import warn_or_error
from dbt.events.functions import fire_event
from dbt.events.types import CacheMiss, ListRelations
from dbt.utils import filter_null_values, executor
from dbt.adapters.base.connections import Connection, AdapterResponse
from dbt.adapters.base.meta import AdapterMeta, available
@@ -64,8 +54,10 @@ from dbt.adapters.base.relation import (
SchemaSearchMap,
)
from dbt.adapters.base import Column as BaseColumn
from dbt.adapters.base import Credentials
from dbt.adapters.cache import RelationsCache, _make_ref_key_msg
from dbt.adapters.cache import RelationsCache, _make_key
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
GET_CATALOG_MACRO_NAME = "get_catalog"
@@ -74,7 +66,7 @@ FRESHNESS_MACRO_NAME = "collect_freshness"
def _expect_row_value(key: str, row: agate.Row):
if key not in row.keys():
raise DbtInternalError(
raise InternalException(
'Got a row without "{}" column, columns: {}'.format(key, row.keys())
)
return row[key]
@@ -103,10 +95,18 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
assume the datetime is already for UTC and add the timezone.
"""
if dt is None:
raise UnexpectedNullError(field_name, source)
raise raise_database_error(
"Expected a non-null value when querying field '{}' of table "
" {} but received value 'null' instead".format(field_name, source)
)
elif not hasattr(dt, "tzinfo"):
raise UnexpectedNonTimestampError(field_name, source, dt)
raise raise_database_error(
"Expected a timestamp value when querying field '{}' of table "
"{} but received value of type '{}' instead".format(
field_name, source, type(dt).__name__
)
)
elif dt.tzinfo:
return dt.astimezone(pytz.UTC)
@@ -121,35 +121,6 @@ def _relation_name(rel: Optional[BaseRelation]) -> str:
return str(rel)
def log_code_execution(code_execution_function):
# decorator to log code and execution time
if code_execution_function.__name__ != "submit_python_job":
raise ValueError("this should be only used to log submit_python_job now")
def execution_with_log(*args):
self = args[0]
connection_name = self.connections.get_thread_connection().name
fire_event(CodeExecution(conn_name=connection_name, code_content=args[2]))
start_time = time.time()
response = code_execution_function(*args)
fire_event(
CodeExecutionStatus(
status=response._message, elapsed=round((time.time() - start_time), 2)
)
)
return response
return execution_with_log
class PythonJobHelper:
def __init__(self, parsed_model: Dict, credential: Credentials) -> None:
raise NotImplementedError("PythonJobHelper is not implemented yet")
def submit(self, compiled_code: str) -> Any:
raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
class BaseAdapter(metaclass=AdapterMeta):
"""The BaseAdapter provides an abstract base class for adapters.
@@ -236,7 +207,9 @@ class BaseAdapter(metaclass=AdapterMeta):
return conn.name
@contextmanager
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
def connection_named(
self, name: str, node: Optional[CompileResultNode] = None
) -> Iterator[None]:
try:
if self.connections.query_header is not None:
self.connections.query_header.set(name, node)
@@ -248,7 +221,7 @@ class BaseAdapter(metaclass=AdapterMeta):
self.connections.query_header.reset()
@contextmanager
def connection_for(self, node: ResultNode) -> Iterator[None]:
def connection_for(self, node: CompileResultNode) -> Iterator[None]:
with self.connection_named(node.unique_id, node):
yield
@@ -311,9 +284,7 @@ class BaseAdapter(metaclass=AdapterMeta):
from dbt.parser.manifest import ManifestLoader
manifest = ManifestLoader.load_macros(
self.config,
self.connections.set_query_header,
base_macros_only=base_macros_only,
self.config, self.connections.set_query_header, base_macros_only=base_macros_only
)
# TODO CT-211
self._macro_manifest_lazy = manifest # type: ignore[assignment]
@@ -332,11 +303,7 @@ class BaseAdapter(metaclass=AdapterMeta):
if (database, schema) not in self.cache:
fire_event(
CacheMiss(
conn_name=self.nice_connection_name(),
database=cast_to_str(database),
schema=schema,
)
CacheMiss(conn_name=self.nice_connection_name(), database=database, schema=schema)
)
return False
else:
@@ -363,7 +330,7 @@ class BaseAdapter(metaclass=AdapterMeta):
lowercase strings.
"""
info_schema_name_map = SchemaSearchMap()
nodes: Iterator[ResultNode] = chain(
nodes: Iterator[CompileResultNode] = chain(
[
node
for node in manifest.nodes.values()
@@ -414,10 +381,7 @@ class BaseAdapter(metaclass=AdapterMeta):
self.cache.update_schemas(cache_update)
def set_relations_cache(
self,
manifest: Manifest,
clear: bool = False,
required_schemas: Set[BaseRelation] = None,
self, manifest: Manifest, clear: bool = False, required_schemas: Set[BaseRelation] = None
) -> None:
"""Run a query that gets a populated cache of the relations in the
database and set the cache on this adapter.
@@ -432,7 +396,7 @@ class BaseAdapter(metaclass=AdapterMeta):
"""Cache a new relation in dbt. It will show up in `list relations`."""
if relation is None:
name = self.nice_connection_name()
raise NullRelationCacheAttemptedError(name)
raise_compiler_error("Attempted to cache a null relation for {}".format(name))
self.cache.add(relation)
# so jinja doesn't render things
return ""
@@ -444,7 +408,7 @@ class BaseAdapter(metaclass=AdapterMeta):
"""
if relation is None:
name = self.nice_connection_name()
raise NullRelationDropAttemptedError(name)
raise_compiler_error("Attempted to drop a null relation for {}".format(name))
self.cache.drop(relation)
return ""
@@ -461,7 +425,9 @@ class BaseAdapter(metaclass=AdapterMeta):
name = self.nice_connection_name()
src_name = _relation_name(from_relation)
dst_name = _relation_name(to_relation)
raise RenameToNoneAttemptedError(src_name, dst_name, name)
raise_compiler_error(
"Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
)
self.cache.rename(from_relation, to_relation)
return ""
@@ -473,12 +439,12 @@ class BaseAdapter(metaclass=AdapterMeta):
@abc.abstractmethod
def date_function(cls) -> str:
"""Get the date function used by this adapter's database."""
raise NotImplementedError("`date_function` is not implemented for this adapter!")
raise NotImplementedException("`date_function` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
def is_cancelable(cls) -> bool:
raise NotImplementedError("`is_cancelable` is not implemented for this adapter!")
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
###
# Abstract methods about schemas
@@ -486,7 +452,7 @@ class BaseAdapter(metaclass=AdapterMeta):
@abc.abstractmethod
def list_schemas(self, database: str) -> List[str]:
"""Get a list of existing schemas in database"""
raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
@available.parse(lambda *a, **k: False)
def check_schema_exists(self, database: str, schema: str) -> bool:
@@ -509,13 +475,13 @@ class BaseAdapter(metaclass=AdapterMeta):
*Implementors must call self.cache.drop() to preserve cache state!*
"""
raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_none
def truncate_relation(self, relation: BaseRelation) -> None:
"""Truncate the given relation."""
raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_none
@@ -524,13 +490,15 @@ class BaseAdapter(metaclass=AdapterMeta):
Implementors must call self.cache.rename() to preserve cache state.
"""
raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_list
def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
"""Get a list of the columns in the given Relation."""
raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
raise NotImplementedException(
"`get_columns_in_relation` is not implemented for this adapter!"
)
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
@@ -552,7 +520,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param self.Relation current: A relation that currently exists in the
database with columns of unspecified types.
"""
raise NotImplementedError(
raise NotImplementedException(
"`expand_target_column_types` is not implemented for this adapter!"
)
@@ -567,8 +535,8 @@ class BaseAdapter(metaclass=AdapterMeta):
:return: The relations in schema
:rtype: List[self.Relation]
"""
raise NotImplementedError(
"`list_relations_without_caching` is not implemented for this adapter!"
raise NotImplementedException(
"`list_relations_without_caching` is not implemented for this " "adapter!"
)
###
@@ -609,7 +577,7 @@ class BaseAdapter(metaclass=AdapterMeta):
to_relation.
"""
if not isinstance(from_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="get_missing_columns",
arg_name="from_relation",
got_value=from_relation,
@@ -617,7 +585,7 @@ class BaseAdapter(metaclass=AdapterMeta):
)
if not isinstance(to_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="get_missing_columns",
arg_name="to_relation",
got_value=to_relation,
@@ -638,11 +606,11 @@ class BaseAdapter(metaclass=AdapterMeta):
expected columns.
:param Relation relation: The relation to check
:raises InvalidMacroArgType: If the columns are
:raises CompilationException: If the columns are
incorrect.
"""
if not isinstance(relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="valid_snapshot_target",
arg_name="relation",
got_value=relation,
@@ -663,16 +631,24 @@ class BaseAdapter(metaclass=AdapterMeta):
if missing:
if extra:
raise SnapshotTargetIncompleteError(extra, missing)
msg = (
'Snapshot target has ("{}") but not ("{}") - is it an '
"unmigrated previous version archive?".format(
'", "'.join(extra), '", "'.join(missing)
)
)
else:
raise SnapshotTargetNotSnapshotTableError(missing)
msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
'", "'.join(missing)
)
raise_compiler_error(msg)
@available.parse_none
def expand_target_column_types(
self, from_relation: BaseRelation, to_relation: BaseRelation
) -> None:
if not isinstance(from_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="expand_target_column_types",
arg_name="from_relation",
got_value=from_relation,
@@ -680,7 +656,7 @@ class BaseAdapter(metaclass=AdapterMeta):
)
if not isinstance(to_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="expand_target_column_types",
arg_name="to_relation",
got_value=to_relation,
@@ -694,10 +670,7 @@ class BaseAdapter(metaclass=AdapterMeta):
return self.cache.get_relations(database, schema)
schema_relation = self.Relation.create(
database=database,
schema=schema,
identifier="",
quote_policy=self.config.quoting,
database=database, schema=schema, identifier="", quote_policy=self.config.quoting
).without_identifier()
# we can't build the relations cache because we don't have a
@@ -705,9 +678,7 @@ class BaseAdapter(metaclass=AdapterMeta):
relations = self.list_relations_without_caching(schema_relation)
fire_event(
ListRelations(
database=cast_to_str(database),
schema=schema,
relations=[_make_ref_key_msg(x) for x in relations],
database=database, schema=schema, relations=[_make_key(x) for x in relations]
)
)
@@ -762,7 +733,7 @@ class BaseAdapter(metaclass=AdapterMeta):
"schema": schema,
"database": database,
}
raise RelationReturnedMultipleResultsError(kwargs, matches)
get_relation_returned_multiple_results(kwargs, matches)
elif matches:
return matches[0]
@@ -784,20 +755,20 @@ class BaseAdapter(metaclass=AdapterMeta):
@available.parse_none
def create_schema(self, relation: BaseRelation):
"""Create the given schema if it does not exist."""
raise NotImplementedError("`create_schema` is not implemented for this adapter!")
raise NotImplementedException("`create_schema` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_none
def drop_schema(self, relation: BaseRelation):
"""Drop the given schema (and everything in it) if it exists."""
raise NotImplementedError("`drop_schema` is not implemented for this adapter!")
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
@available
@classmethod
@abc.abstractmethod
def quote(cls, identifier: str) -> str:
"""Quote the given identifier, as appropriate for the database."""
raise NotImplementedError("`quote` is not implemented for this adapter!")
raise NotImplementedException("`quote` is not implemented for this adapter!")
@available
def quote_as_configured(self, identifier: str, quote_key: str) -> str:
@@ -826,7 +797,10 @@ class BaseAdapter(metaclass=AdapterMeta):
elif quote_config is None:
pass
else:
raise QuoteConfigTypeError(quote_config)
raise_compiler_error(
f'The seed configuration value of "quote_columns" has an '
f"invalid type {type(quote_config)}"
)
if quote_columns:
return self.quote(column)
@@ -847,7 +821,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_text_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
@@ -859,7 +833,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
@@ -871,7 +845,9 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!")
raise NotImplementedException(
"`convert_boolean_type` is not implemented for this adapter!"
)
@classmethod
@abc.abstractmethod
@@ -883,7 +859,9 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!")
raise NotImplementedException(
"`convert_datetime_type` is not implemented for this adapter!"
)
@classmethod
@abc.abstractmethod
@@ -895,7 +873,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_date_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
@@ -907,7 +885,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_time_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
@available
@classmethod
@@ -942,7 +920,7 @@ class BaseAdapter(metaclass=AdapterMeta):
context_override: Optional[Dict[str, Any]] = None,
kwargs: Dict[str, Any] = None,
text_only_columns: Optional[Iterable[str]] = None,
) -> AttrDict:
) -> agate.Table:
"""Look macro_name up in the manifest and execute its results.
:param macro_name: The name of the macro to execute.
@@ -974,7 +952,7 @@ class BaseAdapter(metaclass=AdapterMeta):
else:
package_name = 'the "{}" package'.format(project)
raise DbtRuntimeError(
raise RuntimeException(
'dbt could not find a macro with the name "{}" in {}'.format(
macro_name, package_name
)
@@ -1027,7 +1005,7 @@ class BaseAdapter(metaclass=AdapterMeta):
manifest=manifest,
)
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
results = self._catalog_filter_table(table, manifest)
return results
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
@@ -1059,7 +1037,7 @@ class BaseAdapter(metaclass=AdapterMeta):
loaded_at_field: str,
filter: Optional[str],
manifest: Optional[Manifest] = None,
) -> Tuple[AdapterResponse, Dict[str, Any]]:
) -> Dict[str, Any]:
"""Calculate the freshness of sources in dbt, and return it"""
kwargs: Dict[str, Any] = {
"source": source,
@@ -1068,12 +1046,15 @@ class BaseAdapter(metaclass=AdapterMeta):
}
# run the macro
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
# now we have a 1-row table of the maximum `loaded_at_field` value and
# the current time according to the db.
if len(table) != 1 or len(table[0]) != 2:
raise MacroResultError(FRESHNESS_MACRO_NAME, table)
raise_compiler_error(
'Got an invalid result from "{}" macro: {}'.format(
FRESHNESS_MACRO_NAME, [tuple(r) for r in table]
)
)
if table[0][0] is None:
# no records in the table, so really the max_loaded_at was
# infinitely long ago. Just call it 0:00 January 1 year UTC
@@ -1083,12 +1064,11 @@ class BaseAdapter(metaclass=AdapterMeta):
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
age = (snapshotted_at - max_loaded_at).total_seconds()
freshness = {
return {
"max_loaded_at": max_loaded_at,
"snapshotted_at": snapshotted_at,
"age": age,
}
return adapter_response, freshness
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
"""A hook for running some operation before the model materialization
@@ -1151,7 +1131,7 @@ class BaseAdapter(metaclass=AdapterMeta):
elif location == "prepend":
return f"'{value}' || {add_to}"
else:
raise DbtRuntimeError(f'Got an unexpected location value of "{location}"')
raise RuntimeException(f'Got an unexpected location value of "{location}"')
def get_rows_different_sql(
self,
@@ -1182,74 +1162,6 @@ class BaseAdapter(metaclass=AdapterMeta):
return sql
@property
def python_submission_helpers(self) -> Dict[str, Type[PythonJobHelper]]:
raise NotImplementedError("python_submission_helpers is not specified")
@property
def default_python_submission_method(self) -> str:
raise NotImplementedError("default_python_submission_method is not specified")
@log_code_execution
def submit_python_job(self, parsed_model: dict, compiled_code: str) -> AdapterResponse:
submission_method = parsed_model["config"].get(
"submission_method", self.default_python_submission_method
)
if submission_method not in self.python_submission_helpers:
raise NotImplementedError(
"Submission method {} is not supported for current adapter".format(
submission_method
)
)
job_helper = self.python_submission_helpers[submission_method](
parsed_model, self.connections.profile.credentials
)
submission_result = job_helper.submit(compiled_code)
# process submission result to generate adapter response
return self.generate_python_submission_response(submission_result)
def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse:
raise NotImplementedError(
"Your adapter need to implement generate_python_submission_response"
)
def valid_incremental_strategies(self):
"""The set of standard builtin strategies which this adapter supports out-of-the-box.
Not used to validate custom strategies defined by end users.
"""
return ["append"]
def builtin_incremental_strategies(self):
return ["append", "delete+insert", "merge", "insert_overwrite"]
@available.parse_none
def get_incremental_strategy_macro(self, model_context, strategy: str):
# Construct macro_name from strategy name
if strategy is None:
strategy = "default"
# validate strategies for this adapter
valid_strategies = self.valid_incremental_strategies()
valid_strategies.append("default")
builtin_strategies = self.builtin_incremental_strategies()
if strategy in builtin_strategies and strategy not in valid_strategies:
raise DbtRuntimeError(
f"The incremental strategy '{strategy}' is not valid for this adapter"
)
strategy = strategy.replace("+", "_")
macro_name = f"get_incremental_{strategy}_sql"
# The model_context should have MacroGenerator callable objects for all macros
if macro_name not in model_context:
raise DbtRuntimeError(
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
macro_name, self.config.project_name
)
)
# This returns a callable macro
return model_context[macro_name]
COLUMNS_EQUAL_SQL = """
with diff_count as (
@@ -1297,7 +1209,7 @@ def catch_as_completed(
elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
raise exc
else:
warn_or_error(CatalogGenerationError(exc=str(exc)))
warn_or_error(f"Encountered an error while generating catalog: {str(exc)}")
# exc is not None, derives from Exception, and isn't ctrl+c
exceptions.append(exc)
return merge_tables(tables), exceptions

View File

@@ -1,7 +1,7 @@
from typing import List, Optional, Type
from dbt.adapters.base import Credentials
from dbt.exceptions import CompilationError
from dbt.exceptions import CompilationException
from dbt.adapters.protocol import AdapterProtocol
@@ -11,7 +11,7 @@ def project_name_from_path(include_path: str) -> str:
partial = Project.partial_load(include_path)
if partial.project_name is None:
raise CompilationError(f"Invalid project at {include_path}: name not set!")
raise CompilationException(f"Invalid project at {include_path}: name not set!")
return partial.project_name

View File

@@ -5,9 +5,9 @@ from dbt.clients.jinja import QueryStringGenerator
from dbt.context.manifest import generate_query_header_context
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
from dbt.contracts.graph.nodes import ResultNode
from dbt.contracts.graph.compiled import CompileResultNode
from dbt.contracts.graph.manifest import Manifest
from dbt.exceptions import DbtRuntimeError
from dbt.exceptions import RuntimeException
class NodeWrapper:
@@ -48,7 +48,7 @@ class _QueryComment(local):
if isinstance(comment, str) and "*/" in comment:
# tell the user "no" so they don't hurt themselves by writing
# garbage
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
self.query_comment = comment
self.append = append
@@ -90,7 +90,7 @@ class MacroQueryStringSetter:
def reset(self):
self.set("master", None)
def set(self, name: str, node: Optional[ResultNode]):
def set(self, name: str, node: Optional[CompileResultNode]):
wrapped: Optional[NodeWrapper] = None
if node is not None:
wrapped = NodeWrapper(node)

View File

@@ -1,8 +1,9 @@
from collections.abc import Hashable
from dataclasses import dataclass, field
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
from dataclasses import dataclass
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
from dbt.contracts.graph.compiled import CompiledNode
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
from dbt.contracts.relation import (
RelationType,
ComponentName,
@@ -11,11 +12,7 @@ from dbt.contracts.relation import (
Policy,
Path,
)
from dbt.exceptions import (
ApproximateMatchError,
DbtInternalError,
MultipleDatabasesNotAllowedError,
)
from dbt.exceptions import InternalException
from dbt.node_types import NodeType
from dbt.utils import filter_null_values, deep_merge, classproperty
@@ -30,10 +27,8 @@ class BaseRelation(FakeAPIObject, Hashable):
path: Path
type: Optional[RelationType] = None
quote_character: str = '"'
# Python 3.11 requires that these use default_factory instead of simple default
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
include_policy: Policy = field(default_factory=lambda: Policy())
quote_policy: Policy = field(default_factory=lambda: Policy())
include_policy: Policy = Policy()
quote_policy: Policy = Policy()
dbt_created: bool = False
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
@@ -44,9 +39,9 @@ class BaseRelation(FakeAPIObject, Hashable):
@classmethod
def _get_field_named(cls, field_name):
for f, _ in cls._get_fields():
if f.name == field_name:
return f
for field, _ in cls._get_fields():
if field.name == field_name:
return field
# this should be unreachable
raise ValueError(f"BaseRelation has no {field_name} field!")
@@ -57,11 +52,11 @@ class BaseRelation(FakeAPIObject, Hashable):
@classmethod
def get_default_quote_policy(cls) -> Policy:
return cls._get_field_named("quote_policy").default_factory()
return cls._get_field_named("quote_policy").default
@classmethod
def get_default_include_policy(cls) -> Policy:
return cls._get_field_named("include_policy").default_factory()
return cls._get_field_named("include_policy").default
def get(self, key, default=None):
"""Override `.get` to return a metadata object so we don't break
@@ -87,7 +82,7 @@ class BaseRelation(FakeAPIObject, Hashable):
if not search:
# nothing was passed in
raise dbt.exceptions.DbtRuntimeError(
raise dbt.exceptions.RuntimeException(
"Tried to match relation, but no search path was passed!"
)
@@ -104,7 +99,7 @@ class BaseRelation(FakeAPIObject, Hashable):
if approximate_match and not exact_match:
target = self.create(database=database, schema=schema, identifier=identifier)
raise ApproximateMatchError(target, self)
dbt.exceptions.approximate_relation_match(target, self)
return exact_match
@@ -189,7 +184,7 @@ class BaseRelation(FakeAPIObject, Hashable):
)
@classmethod
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
source_quoting = source.quoting.to_dict(omit_none=True)
source_quoting.pop("column", None)
quote_policy = deep_merge(
@@ -214,7 +209,7 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_ephemeral_from_node(
cls: Type[Self],
config: HasQuoting,
node: ManifestNode,
node: Union[ParsedNode, CompiledNode],
) -> Self:
# Note that ephemeral models are based on the name.
identifier = cls.add_ephemeral_prefix(node.name)
@@ -227,7 +222,7 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_from_node(
cls: Type[Self],
config: HasQuoting,
node: ManifestNode,
node: Union[ParsedNode, CompiledNode],
quote_policy: Optional[Dict[str, bool]] = None,
**kwargs: Any,
) -> Self:
@@ -248,20 +243,20 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_from(
cls: Type[Self],
config: HasQuoting,
node: ResultNode,
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
**kwargs: Any,
) -> Self:
if node.resource_type == NodeType.Source:
if not isinstance(node, SourceDefinition):
raise DbtInternalError(
"type mismatch, expected SourceDefinition but got {}".format(type(node))
if not isinstance(node, ParsedSourceDefinition):
raise InternalException(
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
)
return cls.create_from_source(node, **kwargs)
else:
# Can't use ManifestNode here because of parameterized generics
if not isinstance(node, (ParsedNode)):
raise DbtInternalError(
f"type mismatch, expected ManifestNode but got {type(node)}"
if not isinstance(node, (ParsedNode, CompiledNode)):
raise InternalException(
"type mismatch, expected ParsedNode or CompiledNode but "
"got {}".format(type(node))
)
return cls.create_from_node(config, node, **kwargs)
@@ -358,7 +353,7 @@ class InformationSchema(BaseRelation):
def __post_init__(self):
if not isinstance(self.information_schema_view, (type(None), str)):
raise dbt.exceptions.CompilationError(
raise dbt.exceptions.CompilationException(
"Got an invalid name: {}".format(self.information_schema_view)
)
@@ -442,7 +437,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
if not allow_multiple_databases:
seen = {r.database.lower() for r in self if r.database}
if len(seen) > 1:
raise MultipleDatabasesNotAllowedError(seen)
dbt.exceptions.raise_compiler_error(str(seen))
for information_schema_name, schema in self.search():
path = {"database": information_schema_name.database, "schema": schema}

View File

@@ -2,23 +2,26 @@ import threading
from copy import deepcopy
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
from dbt.adapters.reference_keys import (
_make_ref_key,
_make_ref_key_msg,
_make_msg_from_ref_key,
_ReferenceKey,
from dbt.adapters.reference_keys import _make_key, _ReferenceKey
import dbt.exceptions
from dbt.events.functions import fire_event
from dbt.events.types import (
AddLink,
AddRelation,
DropCascade,
DropMissingRelation,
DropRelation,
DumpAfterAddGraph,
DumpAfterRenameSchema,
DumpBeforeAddGraph,
DumpBeforeRenameSchema,
RenameSchema,
TemporaryRelation,
UncachedRelation,
UpdateReference,
)
from dbt.exceptions import (
DependentLinkNotCachedError,
NewNameAlreadyInCacheError,
NoneRelationFoundError,
ReferencedLinkNotCachedError,
TruncatedModelNameCausedCollisionError,
)
from dbt.events.functions import fire_event, fire_event_if
from dbt.events.types import CacheAction, CacheDumpGraph
import dbt.flags as flags
from dbt.utils import lowercase
from dbt.helper_types import Lazy
def dot_separated(key: _ReferenceKey) -> str:
@@ -78,7 +81,7 @@ class _CachedRelation:
:return _ReferenceKey: A key for this relation.
"""
return _make_ref_key(self)
return _make_key(self)
def add_reference(self, referrer: "_CachedRelation"):
"""Add a reference from referrer to self, indicating that if this node
@@ -141,7 +144,11 @@ class _CachedRelation:
:raises InternalError: If the new key already exists.
"""
if new_key in self.referenced_by:
raise NewNameAlreadyInCacheError(old_key, new_key)
dbt.exceptions.raise_cache_inconsistent(
'in rename of "{}" -> "{}", new name is in the cache already'.format(
old_key, new_key
)
)
if old_key not in self.referenced_by:
return
@@ -257,17 +264,21 @@ class RelationsCache:
if referenced is None:
return
if referenced is None:
raise ReferencedLinkNotCachedError(referenced_key)
dbt.exceptions.raise_cache_inconsistent(
"in add_link, referenced link key {} not in cache!".format(referenced_key)
)
dependent = self.relations.get(dependent_key)
if dependent is None:
raise DependentLinkNotCachedError(dependent_key)
dbt.exceptions.raise_cache_inconsistent(
"in add_link, dependent link key {} not in cache!".format(dependent_key)
)
assert dependent is not None # we just raised!
referenced.add_reference(dependent)
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
def add_link(self, referenced, dependent):
"""Add a link between two relations to the database. If either relation
does not exist, it will be added as an "external" relation.
@@ -282,18 +293,13 @@ class RelationsCache:
:param BaseRelation dependent: The dependent model.
:raises InternalError: If either entry does not exist.
"""
ref_key = _make_ref_key(referenced)
dep_key = _make_ref_key(dependent)
ref_key = _make_key(referenced)
dep_key = _make_key(dependent)
if (ref_key.database, ref_key.schema) not in self:
# if we have not cached the referenced schema at all, we must be
# referring to a table outside our control. There's no need to make
# a link - we will never drop the referenced relation during a run.
fire_event(
CacheAction(
ref_key=_make_msg_from_ref_key(ref_key),
ref_key_2=_make_msg_from_ref_key(dep_key),
)
)
fire_event(UncachedRelation(dep_key=dep_key, ref_key=ref_key))
return
if ref_key not in self.relations:
# Insert a dummy "external" relation.
@@ -303,13 +309,7 @@ class RelationsCache:
# Insert a dummy "external" relation.
dependent = dependent.replace(type=referenced.External)
self.add(dependent)
fire_event(
CacheAction(
action="add_link",
ref_key=_make_msg_from_ref_key(dep_key),
ref_key_2=_make_msg_from_ref_key(ref_key),
)
)
fire_event(AddLink(dep_key=dep_key, ref_key=ref_key))
with self.lock:
self._add_link(ref_key, dep_key)
@@ -320,18 +320,12 @@ class RelationsCache:
:param BaseRelation relation: The underlying relation.
"""
cached = _CachedRelation(relation)
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
)
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_msg(cached)))
fire_event(AddRelation(relation=_make_key(cached)))
fire_event(DumpBeforeAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
with self.lock:
self._setdefault(cached)
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
)
fire_event(DumpAfterAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
def _remove_refs(self, keys):
"""Removes all references to all entries in keys. This does not
@@ -346,6 +340,19 @@ class RelationsCache:
for cached in self.relations.values():
cached.release_references(keys)
def _drop_cascade_relation(self, dropped_key):
"""Drop the given relation and cascade it appropriately to all
dependent relations.
:param _CachedRelation dropped: An existing _CachedRelation to drop.
"""
if dropped_key not in self.relations:
fire_event(DropMissingRelation(relation=dropped_key))
return
consequences = self.relations[dropped_key].collect_consequences()
fire_event(DropCascade(dropped=dropped_key, consequences=consequences))
self._remove_refs(consequences)
def drop(self, relation):
"""Drop the named relation and cascade it appropriately to all
dependent relations.
@@ -357,22 +364,10 @@ class RelationsCache:
:param str schema: The schema of the relation to drop.
:param str identifier: The identifier of the relation to drop.
"""
dropped_key = _make_ref_key(relation)
dropped_key_msg = _make_ref_key_msg(relation)
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
dropped_key = _make_key(relation)
fire_event(DropRelation(dropped=dropped_key))
with self.lock:
if dropped_key not in self.relations:
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
return
consequences = self.relations[dropped_key].collect_consequences()
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
consequence_msgs = [_make_msg_from_ref_key(key) for key in consequences]
fire_event(
CacheAction(
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
)
)
self._remove_refs(consequences)
self._drop_cascade_relation(dropped_key)
def _rename_relation(self, old_key, new_relation):
"""Rename a relation named old_key to new_key, updating references.
@@ -388,20 +383,14 @@ class RelationsCache:
relation = self.relations.pop(old_key)
new_key = new_relation.key()
# relation has to rename its innards, so it needs the _CachedRelation.
# relaton has to rename its innards, so it needs the _CachedRelation.
relation.rename(new_relation)
# update all the relations that refer to it
for cached in self.relations.values():
if cached.is_referenced_by(old_key):
fire_event(
CacheAction(
action="update_reference",
ref_key=_make_ref_key_msg(old_key),
ref_key_2=_make_ref_key_msg(new_key),
ref_key_3=_make_ref_key_msg(cached.key()),
)
UpdateReference(old_key=old_key, new_key=new_key, cached_key=cached.key())
)
cached.rename_key(old_key, new_key)
self.relations[new_key] = relation
@@ -424,14 +413,14 @@ class RelationsCache:
:raises InternalError: If the new key is already present.
"""
if new_key in self.relations:
# Tell user when collision caused by model names truncated during
# materialization.
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
dbt.exceptions.raise_cache_inconsistent(
"in rename, new key {} already in cache: {}".format(
new_key, list(self.relations.keys())
)
)
if old_key not in self.relations:
fire_event(
CacheAction(action="temporary_relation", ref_key=_make_msg_from_ref_key(old_key))
)
fire_event(TemporaryRelation(key=old_key))
return False
return True
@@ -447,20 +436,11 @@ class RelationsCache:
:param BaseRelation new: The new relation name information.
:raises InternalError: If the new key is already present.
"""
old_key = _make_ref_key(old)
new_key = _make_ref_key(new)
fire_event(
CacheAction(
action="rename_relation",
ref_key=_make_msg_from_ref_key(old_key),
ref_key_2=_make_msg_from_ref_key(new),
)
)
old_key = _make_key(old)
new_key = _make_key(new)
fire_event(RenameSchema(old_key=old_key, new_key=new_key))
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
)
fire_event(DumpBeforeRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
with self.lock:
if self._check_rename_constraints(old_key, new_key):
@@ -468,10 +448,7 @@ class RelationsCache:
else:
self._setdefault(_CachedRelation(new))
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
)
fire_event(DumpAfterRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
"""Case-insensitively yield all relations matching the given schema.
@@ -490,7 +467,9 @@ class RelationsCache:
]
if None in results:
raise NoneRelationFoundError()
dbt.exceptions.raise_cache_inconsistent(
"in get_relations, a None relation was found in the cache!"
)
return results
def clear(self):
@@ -517,6 +496,6 @@ class RelationsCache:
"""
for relation in to_remove:
# it may have been cascaded out already
drop_key = _make_ref_key(relation)
drop_key = _make_key(relation)
if drop_key in self.relations:
self.drop(drop_key)

View File

@@ -1,18 +1,23 @@
import threading
import traceback
from contextlib import contextmanager
from importlib import import_module
from pathlib import Path
from typing import Any, Dict, List, Optional, Set, Type
from importlib import import_module
from typing import Type, Dict, Any, List, Optional, Set
from dbt.adapters.base.plugin import AdapterPlugin
from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
from dbt.exceptions import RuntimeException, InternalException
from dbt.include.global_project import (
PACKAGE_PATH as GLOBAL_PROJECT_PATH,
PROJECT_NAME as GLOBAL_PROJECT_NAME,
)
from dbt.events.functions import fire_event
from dbt.events.types import AdapterImportError, PluginLoadError
from dbt.exceptions import DbtInternalError, DbtRuntimeError
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
from dbt.contracts.connection import Credentials, AdapterRequiredConfig
from dbt.adapters.protocol import (
AdapterProtocol,
AdapterConfig,
RelationProtocol,
)
from dbt.adapters.base.plugin import AdapterPlugin
Adapter = AdapterProtocol
@@ -34,7 +39,7 @@ class AdapterContainer:
names = ", ".join(self.plugins.keys())
message = f"Invalid adapter type {name}! Must be one of {names}"
raise DbtRuntimeError(message)
raise RuntimeException(message)
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
plugin = self.get_plugin_by_name(name)
@@ -59,18 +64,18 @@ class AdapterContainer:
# if we failed to import the target module in particular, inform
# the user about it via a runtime error
if exc.name == "dbt.adapters." + name:
fire_event(AdapterImportError(exc=str(exc)))
raise DbtRuntimeError(f"Could not find adapter type {name}!")
fire_event(AdapterImportError(exc=exc))
raise RuntimeException(f"Could not find adapter type {name}!")
# otherwise, the error had to have come from some underlying
# library. Log the stack trace.
fire_event(PluginLoadError(exc_info=traceback.format_exc()))
fire_event(PluginLoadError())
raise
plugin: AdapterPlugin = mod.Plugin
plugin_type = plugin.adapter.type()
if plugin_type != name:
raise DbtRuntimeError(
raise RuntimeException(
f"Expected to find adapter with type named {name}, got "
f"adapter with type {plugin_type}"
)
@@ -132,7 +137,7 @@ class AdapterContainer:
try:
plugin = self.plugins[plugin_name]
except KeyError:
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
raise InternalException(f"No plugin found for {plugin_name}") from None
plugins.append(plugin)
seen.add(plugin_name)
for dep in plugin.dependencies:
@@ -151,7 +156,7 @@ class AdapterContainer:
try:
path = self.packages[package_name]
except KeyError:
raise DbtInternalError(f"No internal package listing found for {package_name}")
raise InternalException(f"No internal package listing found for {package_name}")
paths.append(path)
return paths
@@ -212,12 +217,3 @@ def get_adapter_package_names(name: Optional[str]) -> List[str]:
def get_adapter_type_names(name: Optional[str]) -> List[str]:
return FACTORY.get_adapter_type_names(name)
@contextmanager
def adapter_management():
reset_adapters()
try:
yield
finally:
cleanup_connections()

View File

@@ -8,6 +8,7 @@ from typing import (
Generic,
TypeVar,
Tuple,
Union,
Dict,
Any,
)
@@ -16,7 +17,8 @@ from typing_extensions import Protocol
import agate
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
from dbt.contracts.graph.model_config import BaseConfig
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.relation import Policy, HasQuoting
@@ -46,7 +48,11 @@ class RelationProtocol(Protocol):
...
@classmethod
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
def create_from(
cls: Type[Self],
config: HasQuoting,
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
) -> Self:
...
@@ -59,7 +65,7 @@ class CompilerProtocol(Protocol):
node: ManifestNode,
manifest: Manifest,
extra_context: Optional[Dict[str, Any]] = None,
) -> ManifestNode:
) -> NonSourceCompiledNode:
...
@@ -82,7 +88,7 @@ class AdapterProtocol( # type: ignore[misc]
],
):
# N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a
# ClassVar due to the restrictiveness of PEP-526
# ClassVar due to the restirctiveness of PEP-526
# See: https://github.com/python/mypy/issues/5144
AdapterSpecificConfigs: Type[AdapterConfig_T]
Column: Type[Column_T]

View File

@@ -2,7 +2,6 @@
from collections import namedtuple
from typing import Any, Optional
from dbt.events.proto_types import ReferenceKeyMsg
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
@@ -15,12 +14,7 @@ def lowercase(value: Optional[str]) -> Optional[str]:
return value.lower()
# For backwards compatibility. New code should use _make_ref_key
def _make_key(relation: Any) -> _ReferenceKey:
return _make_ref_key(relation)
def _make_ref_key(relation: Any) -> _ReferenceKey:
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
to keep track of quoting
"""
@@ -28,13 +22,3 @@ def _make_ref_key(relation: Any) -> _ReferenceKey:
return _ReferenceKey(
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
)
def _make_ref_key_msg(relation: Any):
return _make_msg_from_ref_key(_make_ref_key(relation))
def _make_msg_from_ref_key(ref_key: _ReferenceKey) -> ReferenceKeyMsg:
return ReferenceKeyMsg(
database=ref_key.database, schema=ref_key.schema, identifier=ref_key.identifier
)

View File

@@ -10,8 +10,6 @@ from dbt.adapters.base import BaseConnectionManager
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
from dbt.events.functions import fire_event
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
from dbt.events.contextvars import get_node_info
from dbt.utils import cast_to_str
class SQLConnectionManager(BaseConnectionManager):
@@ -27,7 +25,9 @@ class SQLConnectionManager(BaseConnectionManager):
@abc.abstractmethod
def cancel(self, connection: Connection):
"""Cancel the given connection."""
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`cancel` is not implemented for this adapter!"
)
def cancel_open(self) -> List[str]:
names = []
@@ -55,13 +55,7 @@ class SQLConnectionManager(BaseConnectionManager):
connection = self.get_thread_connection()
if auto_begin and connection.transaction_open is False:
self.begin()
fire_event(
ConnectionUsed(
conn_type=self.TYPE,
conn_name=cast_to_str(connection.name),
node_info=get_node_info(),
)
)
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=connection.name))
with self.exception_handler(sql):
if abridge_sql_log:
@@ -69,11 +63,7 @@ class SQLConnectionManager(BaseConnectionManager):
else:
log_sql = sql
fire_event(
SQLQuery(
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
)
)
fire_event(SQLQuery(conn_name=connection.name, sql=log_sql))
pre = time.time()
cursor = connection.handle.cursor()
@@ -81,9 +71,7 @@ class SQLConnectionManager(BaseConnectionManager):
fire_event(
SQLQueryStatus(
status=str(self.get_response(cursor)),
elapsed=round((time.time() - pre)),
node_info=get_node_info(),
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
)
)
@@ -93,7 +81,7 @@ class SQLConnectionManager(BaseConnectionManager):
@abc.abstractmethod
def get_response(cls, cursor: Any) -> AdapterResponse:
"""Get the status of the cursor."""
raise dbt.exceptions.NotImplementedError(
raise dbt.exceptions.NotImplementedException(
"`get_response` is not implemented for this adapter!"
)
@@ -149,7 +137,7 @@ class SQLConnectionManager(BaseConnectionManager):
def begin(self):
connection = self.get_thread_connection()
if connection.transaction_open is True:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
'Tried to begin a new transaction on connection "{}", but '
"it already had one open!".format(connection.name)
)
@@ -162,12 +150,12 @@ class SQLConnectionManager(BaseConnectionManager):
def commit(self):
connection = self.get_thread_connection()
if connection.transaction_open is False:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
'Tried to commit transaction on connection "{}", but '
"it does not have one open!".format(connection.name)
)
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
fire_event(SQLCommit(conn_name=connection.name))
self.add_commit_query()
connection.transaction_open = False

View File

@@ -1,10 +1,11 @@
import agate
from typing import Any, Optional, Tuple, Type, List
import dbt.clients.agate_helper
from dbt.contracts.connection import Connection
from dbt.exceptions import RelationTypeNullError
import dbt.exceptions
from dbt.adapters.base import BaseAdapter, available
from dbt.adapters.cache import _make_ref_key_msg
from dbt.adapters.cache import _make_key
from dbt.adapters.sql import SQLConnectionManager
from dbt.events.functions import fire_event
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
@@ -109,7 +110,7 @@ class SQLAdapter(BaseAdapter):
ColTypeChange(
orig_type=target_column.data_type,
new_type=new_type,
table=_make_ref_key_msg(current),
table=_make_key(current),
)
)
@@ -131,7 +132,9 @@ class SQLAdapter(BaseAdapter):
def drop_relation(self, relation):
if relation.type is None:
raise RelationTypeNullError(relation)
dbt.exceptions.raise_compiler_error(
"Tried to drop relation {}, but its type is null.".format(relation)
)
self.cache_dropped(relation)
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
@@ -152,7 +155,7 @@ class SQLAdapter(BaseAdapter):
def create_schema(self, relation: BaseRelation) -> None:
relation = relation.without_identifier()
fire_event(SchemaCreation(relation=_make_ref_key_msg(relation)))
fire_event(SchemaCreation(relation=_make_key(relation)))
kwargs = {
"relation": relation,
}
@@ -163,7 +166,7 @@ class SQLAdapter(BaseAdapter):
def drop_schema(self, relation: BaseRelation) -> None:
relation = relation.without_identifier()
fire_event(SchemaDrop(relation=_make_ref_key_msg(relation)))
fire_event(SchemaDrop(relation=_make_key(relation)))
kwargs = {
"relation": relation,
}

View File

@@ -1 +0,0 @@
TODO

View File

@@ -1,44 +0,0 @@
# TODO Move this to /core/dbt/flags.py when we're ready to break things
import os
from dataclasses import dataclass
from multiprocessing import get_context
from pprint import pformat as pf
from click import get_current_context
if os.name != "nt":
# https://bugs.python.org/issue41567
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
@dataclass(frozen=True)
class Flags:
def __init__(self, ctx=None) -> None:
if ctx is None:
ctx = get_current_context()
def assign_params(ctx):
"""Recursively adds all click params to flag object"""
for param_name, param_value in ctx.params.items():
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
# when using frozen dataclasses.
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
if hasattr(self, param_name):
raise Exception(f"Duplicate flag names found in click command: {param_name}")
object.__setattr__(self, param_name.upper(), param_value)
if ctx.parent:
assign_params(ctx.parent)
assign_params(ctx)
# Hard coded flags
object.__setattr__(self, "WHICH", ctx.info_name)
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
# Support console DO NOT TRACK initiave
if os.getenv("DO_NOT_TRACK", "").lower() in (1, "t", "true", "y", "yes"):
object.__setattr__(self, "ANONYMOUS_USAGE_STATS", False)
def __str__(self) -> str:
return str(pf(self.__dict__))

View File

@@ -1,412 +0,0 @@
import inspect # This is temporary for RAT-ing
from copy import copy
from pprint import pformat as pf # This is temporary for RAT-ing
import click
from dbt.adapters.factory import adapter_management
from dbt.cli import params as p
from dbt.cli.flags import Flags
from dbt.profiler import profiler
def cli_runner():
# Alias "list" to "ls"
ls = copy(cli.commands["list"])
ls.hidden = True
cli.add_command(ls, "ls")
# Run the cli
cli()
# dbt
@click.group(
context_settings={"help_option_names": ["-h", "--help"]},
invoke_without_command=True,
no_args_is_help=True,
epilog="Specify one of these sub-commands and you can find more help from there.",
)
@click.pass_context
@p.anonymous_usage_stats
@p.cache_selected_only
@p.debug
@p.enable_legacy_logger
@p.fail_fast
@p.log_cache_events
@p.log_format
@p.macro_debugging
@p.partial_parse
@p.print
@p.printer_width
@p.quiet
@p.record_timing_info
@p.static_parser
@p.use_colors
@p.use_experimental_parser
@p.version
@p.version_check
@p.warn_error
@p.warn_error_options
@p.write_json
def cli(ctx, **kwargs):
"""An ELT tool for managing your SQL transformations and data models.
For more documentation on these commands, visit: docs.getdbt.com
"""
incomplete_flags = Flags()
# Profiling
if incomplete_flags.RECORD_TIMING_INFO:
ctx.with_resource(profiler(enable=True, outfile=incomplete_flags.RECORD_TIMING_INFO))
# Adapter management
ctx.with_resource(adapter_management())
# Version info
if incomplete_flags.VERSION:
click.echo(f"`version` called\n ctx.params: {pf(ctx.params)}")
return
else:
del ctx.params["version"]
# dbt build
@cli.command("build")
@click.pass_context
@p.defer
@p.exclude
@p.fail_fast
@p.full_refresh
@p.indirect_selection
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.show
@p.state
@p.store_failures
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
def build(ctx, **kwargs):
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt clean
@cli.command("clean")
@click.pass_context
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
def clean(ctx, **kwargs):
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt docs
@cli.group()
@click.pass_context
def docs(ctx, **kwargs):
"""Generate or serve the documentation website for your project"""
# dbt docs generate
@docs.command("generate")
@click.pass_context
@p.compile_docs
@p.defer
@p.exclude
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
def docs_generate(ctx, **kwargs):
"""Generate the documentation website for your project"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt docs serve
@docs.command("serve")
@click.pass_context
@p.browser
@p.port
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
def docs_serve(ctx, **kwargs):
"""Serve the documentation website for your project"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt compile
@cli.command("compile")
@click.pass_context
@p.defer
@p.exclude
@p.full_refresh
@p.log_path
@p.models
@p.parse_only
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
def compile(ctx, **kwargs):
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt debug
@cli.command("debug")
@click.pass_context
@p.config_dir
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
@p.version_check
def debug(ctx, **kwargs):
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt deps
@cli.command("deps")
@click.pass_context
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
def deps(ctx, **kwargs):
"""Pull the most recent version of the dependencies listed in packages.yml"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt init
@cli.command("init")
@click.pass_context
@p.profile
@p.profiles_dir
@p.project_dir
@p.skip_profile_setup
@p.target
@p.vars
def init(ctx, **kwargs):
"""Initialize a new DBT project."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt list
@cli.command("list")
@click.pass_context
@p.exclude
@p.indirect_selection
@p.models
@p.output
@p.output_keys
@p.profile
@p.profiles_dir
@p.project_dir
@p.resource_type
@p.selector
@p.state
@p.target
@p.vars
def list(ctx, **kwargs):
"""List the resources in your project"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt parse
@cli.command("parse")
@click.pass_context
@p.compile_parse
@p.log_path
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@p.write_manifest
def parse(ctx, **kwargs):
"""Parses the project and provides information on performance"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt run
@cli.command("run")
@click.pass_context
@p.defer
@p.exclude
@p.fail_fast
@p.full_refresh
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
def run(ctx, **kwargs):
"""Compile SQL and execute against the current target database."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt run operation
@cli.command("run-operation")
@click.pass_context
@p.args
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
def run_operation(ctx, **kwargs):
"""Run the named macro with any supplied arguments."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt seed
@cli.command("seed")
@click.pass_context
@p.exclude
@p.full_refresh
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.show
@p.state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
def seed(ctx, **kwargs):
"""Load data from csv files into your data warehouse."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt snapshot
@cli.command("snapshot")
@click.pass_context
@p.defer
@p.exclude
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.state
@p.target
@p.threads
@p.vars
def snapshot(ctx, **kwargs):
"""Execute snapshots defined in your project"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt source
@cli.group()
@click.pass_context
def source(ctx, **kwargs):
"""Manage your project's sources"""
# dbt source freshness
@source.command("freshness")
@click.pass_context
@p.exclude
@p.models
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.state
@p.target
@p.threads
@p.vars
def freshness(ctx, **kwargs):
"""Snapshots the current freshness of the project's sources"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt test
@cli.command("test")
@click.pass_context
@p.defer
@p.exclude
@p.fail_fast
@p.indirect_selection
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.selector
@p.state
@p.store_failures
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
def test(ctx, **kwargs):
"""Runs tests on data in deployed models. Run this after `dbt run`"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# Support running as a module
if __name__ == "__main__":
cli_runner()

View File

@@ -1,48 +0,0 @@
from click import ParamType
import yaml
from dbt.helper_types import WarnErrorOptions
class YAML(ParamType):
"""The Click YAML type. Converts YAML strings into objects."""
name = "YAML"
def convert(self, value, param, ctx):
# assume non-string values are a problem
if not isinstance(value, str):
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
try:
return yaml.load(value, Loader=yaml.Loader)
except yaml.parser.ParserError:
self.fail(f"String '{value}' is not valid YAML", param, ctx)
class WarnErrorOptionsType(YAML):
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
name = "WarnErrorOptionsType"
def convert(self, value, param, ctx):
include_exclude = super().convert(value, param, ctx)
return WarnErrorOptions(
include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", [])
)
class Truthy(ParamType):
"""The Click Truthy type. Converts strings into a "truthy" type"""
name = "TRUTHY"
def convert(self, value, param, ctx):
# assume non-string / non-None values are a problem
if not isinstance(value, (str, None)):
self.fail(f"Cannot load TRUTHY from type {type(value)}", param, ctx)
if value is None or value.lower() in ("0", "false", "f"):
return None
else:
return value

View File

@@ -1,389 +0,0 @@
from pathlib import Path, PurePath
import click
from dbt.cli.option_types import YAML, WarnErrorOptionsType
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
# TODO: The name (reflected in flags) is a correction!
# The original name was `SEND_ANONYMOUS_USAGE_STATS` and used an env var called "DBT_SEND_ANONYMOUS_USAGE_STATS"
# Both of which break existing naming conventions (doesn't match param flag).
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
anonymous_usage_stats = click.option(
"--anonymous-usage-stats/--no-anonymous-usage-stats",
envvar="DBT_ANONYMOUS_USAGE_STATS",
help="Send anonymous usage stats to dbt Labs.",
default=True,
)
args = click.option(
"--args",
envvar=None,
help="Supply arguments to the macro. This dictionary will be mapped to the keyword arguments defined in the selected macro. This argument should be a YAML string, eg. '{my_variable: my_value}'",
type=YAML(),
)
browser = click.option(
"--browser/--no-browser",
envvar=None,
help="Wether or not to open a local web browser after starting the server",
default=True,
)
cache_selected_only = click.option(
"--cache-selected-only/--no-cache-selected-only",
envvar="DBT_CACHE_SELECTED_ONLY",
help="Pre cache database objects relevant to selected resource only.",
)
compile_docs = click.option(
"--compile/--no-compile",
envvar=None,
help="Wether or not to run 'dbt compile' as part of docs generation",
default=True,
)
compile_parse = click.option(
"--compile/--no-compile",
envvar=None,
help="TODO: No help text currently available",
default=True,
)
config_dir = click.option(
"--config-dir",
envvar=None,
help="If specified, DBT will show path information for this project",
type=click.STRING,
)
debug = click.option(
"--debug/--no-debug",
"-d/ ",
envvar="DBT_DEBUG",
help="Display debug logging during dbt execution. Useful for debugging and making bug reports.",
)
# TODO: The env var and name (reflected in flags) are corrections!
# The original name was `DEFER_MODE` and used an env var called "DBT_DEFER_TO_STATE"
# Both of which break existing naming conventions.
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
defer = click.option(
"--defer/--no-defer",
envvar="DBT_DEFER",
help="If set, defer to the state variable for resolving unselected nodes.",
)
enable_legacy_logger = click.option(
"--enable-legacy-logger/--no-enable-legacy-logger",
envvar="DBT_ENABLE_LEGACY_LOGGER",
hidden=True,
)
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
fail_fast = click.option(
"--fail-fast/--no-fail-fast",
"-x/ ",
envvar="DBT_FAIL_FAST",
help="Stop execution on first failure.",
)
full_refresh = click.option(
"--full-refresh",
"-f",
envvar="DBT_FULL_REFRESH",
help="If specified, dbt will drop incremental models and fully-recalculate the incremental table from the model definition.",
is_flag=True,
)
indirect_selection = click.option(
"--indirect-selection",
envvar="DBT_INDIRECT_SELECTION",
help="Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.",
type=click.Choice(["eager", "cautious"], case_sensitive=False),
default="eager",
)
log_cache_events = click.option(
"--log-cache-events/--no-log-cache-events",
help="Enable verbose adapter cache logging.",
envvar="DBT_LOG_CACHE_EVENTS",
)
log_format = click.option(
"--log-format",
envvar="DBT_LOG_FORMAT",
help="Specify the log format, overriding the command's default.",
type=click.Choice(["text", "json", "default"], case_sensitive=False),
default="default",
)
log_path = click.option(
"--log-path",
envvar="DBT_LOG_PATH",
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
type=click.Path(),
)
macro_debugging = click.option(
"--macro-debugging/--no-macro-debugging",
envvar="DBT_MACRO_DEBUGGING",
hidden=True,
)
models = click.option(
"-m",
"-s",
"models",
envvar=None,
help="Specify the nodes to include.",
multiple=True,
)
output = click.option(
"--output",
envvar=None,
help="TODO: No current help text",
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
default="name",
)
output_keys = click.option(
"--output-keys", envvar=None, help="TODO: No current help text", type=click.STRING
)
output_path = click.option(
"--output",
"-o",
envvar=None,
help="Specify the output path for the json report. By default, outputs to 'target/sources.json'",
type=click.Path(file_okay=True, dir_okay=False, writable=True),
default=PurePath.joinpath(Path.cwd(), "target/sources.json"),
)
parse_only = click.option(
"--parse-only",
envvar=None,
help="TODO: No help text currently available",
is_flag=True,
)
partial_parse = click.option(
"--partial-parse/--no-partial-parse",
envvar="DBT_PARTIAL_PARSE",
help="Allow for partial parsing by looking for and writing to a pickle file in the target directory. This overrides the user configuration file.",
default=True,
)
port = click.option(
"--port",
envvar=None,
help="Specify the port number for the docs server",
default=8080,
type=click.INT,
)
# TODO: The env var and name (reflected in flags) are corrections!
# The original name was `NO_PRINT` and used the env var `DBT_NO_PRINT`.
# Both of which break existing naming conventions.
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
print = click.option(
"--print/--no-print",
envvar="DBT_PRINT",
help="Output all {{ print() }} macro calls.",
default=True,
)
printer_width = click.option(
"--printer-width",
envvar="DBT_PRINTER_WIDTH",
help="Sets the width of terminal output",
type=click.INT,
default=80,
)
profile = click.option(
"--profile",
envvar=None,
help="Which profile to load. Overrides setting in dbt_project.yml.",
)
profiles_dir = click.option(
"--profiles-dir",
envvar="DBT_PROFILES_DIR",
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
default=default_profiles_dir(),
type=click.Path(exists=True),
)
project_dir = click.option(
"--project-dir",
envvar=None,
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
default=default_project_dir(),
type=click.Path(exists=True),
)
quiet = click.option(
"--quiet/--no-quiet",
envvar="DBT_QUIET",
help="Suppress all non-error logging to stdout. Does not affect {{ print() }} macro calls.",
)
record_timing_info = click.option(
"--record-timing-info",
"-r",
envvar=None,
help="When this option is passed, dbt will output low-level timing stats to the specified file. Example: `--record-timing-info output.profile`",
type=click.Path(exists=False),
)
resource_type = click.option(
"--resource-type",
envvar=None,
help="TODO: No current help text",
type=click.Choice(
[
"metric",
"source",
"analysis",
"model",
"test",
"exposure",
"snapshot",
"seed",
"default",
"all",
],
case_sensitive=False,
),
default="default",
)
selector = click.option(
"--selector", envvar=None, help="The selector name to use, as defined in selectors.yml"
)
show = click.option(
"--show", envvar=None, help="Show a sample of the loaded data in the terminal", is_flag=True
)
skip_profile_setup = click.option(
"--skip-profile-setup", "-s", envvar=None, help="Skip interactive profile setup.", is_flag=True
)
# TODO: The env var and name (reflected in flags) are corrections!
# The original name was `ARTIFACT_STATE_PATH` and used the env var `DBT_ARTIFACT_STATE_PATH`.
# Both of which break existing naming conventions.
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
state = click.option(
"--state",
envvar="DBT_STATE",
help="If set, use the given directory as the source for json files to compare with this project.",
type=click.Path(
dir_okay=True,
exists=True,
file_okay=False,
readable=True,
resolve_path=True,
),
)
static_parser = click.option(
"--static-parser/--no-static-parser",
envvar="DBT_STATIC_PARSER",
help="Use the static parser.",
default=True,
)
store_failures = click.option(
"--store-failures",
envvar="DBT_STORE_FAILURES",
help="Store test results (failing rows) in the database",
is_flag=True,
)
target = click.option(
"--target", "-t", envvar=None, help="Which target to load for the given profile"
)
target_path = click.option(
"--target-path",
envvar="DBT_TARGET_PATH",
help="Configure the 'target-path'. Only applies this setting for the current run. Overrides the 'DBT_TARGET_PATH' if it is set.",
type=click.Path(),
)
threads = click.option(
"--threads",
envvar=None,
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
default=1,
type=click.INT,
)
use_colors = click.option(
"--use-colors/--no-use-colors",
envvar="DBT_USE_COLORS",
help="Output is colorized by default and may also be set in a profile or at the command line.",
default=True,
)
use_experimental_parser = click.option(
"--use-experimental-parser/--no-use-experimental-parser",
envvar="DBT_USE_EXPERIMENTAL_PARSER",
help="Enable experimental parsing features.",
)
vars = click.option(
"--vars",
envvar=None,
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
type=YAML(),
)
version = click.option(
"--version",
envvar=None,
help="Show version information",
is_flag=True,
)
version_check = click.option(
"--version-check/--no-version-check",
envvar="DBT_VERSION_CHECK",
help="Ensure dbt's version matches the one specified in the dbt_project.yml file ('require-dbt-version')",
default=True,
)
warn_error = click.option(
"--warn-error",
envvar="DBT_WARN_ERROR",
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
default=None,
flag_value=True,
)
warn_error_options = click.option(
"--warn-error-options",
envvar="DBT_WARN_ERROR_OPTIONS",
default=None,
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
type=WarnErrorOptionsType(),
)
write_json = click.option(
"--write-json/--no-write-json",
envvar="DBT_WRITE_JSON",
help="Writing the manifest and run_results.json files to disk",
default=True,
)
write_manifest = click.option(
"--write-manifest/--no-write-manifest",
envvar=None,
help="TODO: No help text currently available",
default=True,
)

View File

@@ -1,11 +0,0 @@
from pathlib import Path
def default_project_dir():
paths = list(Path.cwd().parents)
paths.insert(0, Path.cwd())
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
def default_profiles_dir():
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"

View File

@@ -1,19 +1 @@
# Clients README
### Jinja
#### How are materializations defined
Model materializations are kept in `core/dbt/include/global_project/macros/materializations/models/`. Materializations are defined using syntax that isn't part of the Jinja standard library. These tags are referenced internally, and materializations can be overridden in user projects when users have specific needs.
```
-- Pseudocode for arguments
{% materialization <name>, <target name := one_of{default, adapter}> %}'
{% endmaterialization %}
```
These blocks are referred to Jinja extensions. Extensions are defined as part of the accepted Jinja code encapsulated within a dbt project. This includes system code used internally by dbt and user space (i.e. user-defined) macros. Extensions exist to help Jinja users create reusable code blocks or abstract objects--for us, materializations are a great use-case since we pass these around as arguments within dbt system code.
The code that defines this extension is a class `MaterializationExtension` and a `parse` routine. That code lives in [clients/jinja.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/clients/jinja.py). The routine
enables Jinja to parse (i.e. recognize) the unique comma separated arg structure our `materialization` tags exhibit (the `table, default` as seen above).

View File

@@ -1,15 +1,7 @@
import re
from collections import namedtuple
from dbt.exceptions import (
BlockDefinitionNotAtTopError,
DbtInternalError,
MissingCloseTagError,
MissingControlFlowStartTagError,
NestedTagsError,
UnexpectedControlFlowEndTagError,
UnexpectedMacroEOFError,
)
import dbt.exceptions
def regex(pat):
@@ -147,7 +139,10 @@ class TagIterator:
def _expect_match(self, expected_name, *patterns, **kwargs):
match = self._first_match(*patterns, **kwargs)
if match is None:
raise UnexpectedMacroEOFError(expected_name, self.data[self.pos :])
msg = 'unexpected EOF, expected {}, got "{}"'.format(
expected_name, self.data[self.pos :]
)
dbt.exceptions.raise_compiler_error(msg)
return match
def handle_expr(self, match):
@@ -261,7 +256,7 @@ class TagIterator:
elif block_type_name is not None:
yield self.handle_tag(match)
else:
raise DbtInternalError(
raise dbt.exceptions.InternalException(
"Invalid regex match in next_block, expected block start, "
"expr start, or comment start"
)
@@ -270,6 +265,13 @@ class TagIterator:
return self.find_tags()
duplicate_tags = (
"Got nested tags: {outer.block_type_name} (started at {outer.start}) did "
"not have a matching {{% end{outer.block_type_name} %}} before a "
"subsequent {inner.block_type_name} was found (started at {inner.start})"
)
_CONTROL_FLOW_TAGS = {
"if": "endif",
"for": "endfor",
@@ -317,16 +319,33 @@ class BlockIterator:
found = self.stack.pop()
else:
expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name]
raise UnexpectedControlFlowEndTagError(tag, expected, self.tag_parser)
dbt.exceptions.raise_compiler_error(
(
"Got an unexpected control flow end tag, got {} but "
"never saw a preceeding {} (@ {})"
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
)
expected = _CONTROL_FLOW_TAGS[found]
if expected != tag.block_type_name:
raise MissingControlFlowStartTagError(tag, expected, self.tag_parser)
dbt.exceptions.raise_compiler_error(
(
"Got an unexpected control flow end tag, got {} but "
"expected {} next (@ {})"
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
)
if tag.block_type_name in allowed_blocks:
if self.stack:
raise BlockDefinitionNotAtTopError(self.tag_parser, tag.start)
dbt.exceptions.raise_compiler_error(
(
"Got a block definition inside control flow at {}. "
"All dbt block definitions must be at the top level"
).format(self.tag_parser.linepos(tag.start))
)
if self.current is not None:
raise NestedTagsError(outer=self.current, inner=tag)
dbt.exceptions.raise_compiler_error(
duplicate_tags.format(outer=self.current, inner=tag)
)
if collect_raw_data:
raw_data = self.data[self.last_position : tag.start]
self.last_position = tag.start
@@ -347,7 +366,11 @@ class BlockIterator:
if self.current:
linecount = self.data[: self.current.end].count("\n") + 1
raise MissingCloseTagError(self.current.block_type_name, linecount)
dbt.exceptions.raise_compiler_error(
(
"Reached EOF without finding a close tag for " "{} (searched from line {})"
).format(self.current.block_type_name, linecount)
)
if collect_raw_data:
raw_data = self.data[self.last_position :]

View File

@@ -7,7 +7,7 @@ import json
import dbt.utils
from typing import Iterable, List, Dict, Union, Optional, Any
from dbt.exceptions import DbtRuntimeError
from dbt.exceptions import RuntimeException
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
@@ -168,7 +168,7 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
return
elif not isinstance(value, type(existing_type)):
# actual type mismatch!
raise DbtRuntimeError(
raise RuntimeException(
f"Tables contain columns with the same names ({key}), "
f"but different types ({value} vs {existing_type})"
)

View File

@@ -14,10 +14,10 @@ from dbt.events.types import (
)
from dbt.exceptions import (
CommandResultError,
GitCheckoutError,
GitCloningError,
UnknownGitCloningProblemError,
DbtRuntimeError,
RuntimeException,
bad_package_spec,
raise_git_cloning_error,
raise_git_cloning_problem,
)
from packaging import version
@@ -27,6 +27,16 @@ def _is_commit(revision: str) -> bool:
return bool(re.match(r"\b[0-9a-f]{40}\b", revision))
def _raise_git_cloning_error(repo, revision, error):
stderr = error.stderr.strip()
if "usage: git" in stderr:
stderr = stderr.split("\nusage: git")[0]
if re.match("fatal: destination path '(.+)' already exists", stderr):
raise_git_cloning_error(error)
bad_package_spec(repo, revision, stderr)
def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None):
has_revision = revision is not None
is_commit = _is_commit(revision or "")
@@ -54,7 +64,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
try:
result = run_cmd(cwd, clone_cmd, env={"LC_ALL": "C"})
except CommandResultError as exc:
raise GitCloningError(repo, revision, exc)
_raise_git_cloning_error(repo, revision, exc)
if subdirectory:
cwd_subdir = os.path.join(cwd, dirname or "")
@@ -62,7 +72,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
try:
run_cmd(cwd_subdir, clone_cmd_subdir)
except CommandResultError as exc:
raise GitCloningError(repo, revision, exc)
_raise_git_cloning_error(repo, revision, exc)
if remove_git_dir:
rmdir(os.path.join(dirname, ".git"))
@@ -105,7 +115,8 @@ def checkout(cwd, repo, revision=None):
try:
return _checkout(cwd, repo, revision)
except CommandResultError as exc:
raise GitCheckoutError(repo=repo, revision=revision, error=exc)
stderr = exc.stderr.strip()
bad_package_spec(repo, revision, stderr)
def get_current_sha(cwd):
@@ -134,7 +145,7 @@ def clone_and_checkout(
err = exc.stderr
exists = re.match("fatal: destination path '(.+)' already exists", err)
if not exists:
raise UnknownGitCloningProblemError(repo)
raise_git_cloning_problem(repo)
directory = None
start_sha = None
@@ -144,7 +155,7 @@ def clone_and_checkout(
else:
matches = re.match("Cloning into '(.+)'", err.decode("utf-8"))
if matches is None:
raise DbtRuntimeError(f'Error cloning {repo} - never saw "Cloning into ..." from git')
raise RuntimeException(f'Error cloning {repo} - never saw "Cloning into ..." from git')
directory = matches.group(1)
fire_event(GitProgressPullingNewDependency(dir=directory))
full_path = os.path.join(cwd, directory)

View File

@@ -25,26 +25,18 @@ from dbt.utils import (
)
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
from dbt.contracts.graph.nodes import GenericTestNode
from dbt.contracts.graph.compiled import CompiledGenericTestNode
from dbt.contracts.graph.parsed import ParsedGenericTestNode
from dbt.exceptions import (
CaughtMacroError,
CaughtMacroErrorWithNodeError,
CompilationError,
DbtInternalError,
MaterializationArgError,
JinjaRenderingError,
InternalException,
raise_compiler_error,
CompilationException,
invalid_materialization_argument,
MacroReturn,
MaterializtionMacroNotUsedError,
NoSupportedLanguagesFoundError,
UndefinedCompilationError,
UndefinedMacroError,
JinjaRenderingException,
UndefinedMacroException,
)
from dbt import flags
from dbt.node_types import ModelLanguage
SUPPORTED_LANG_ARG = jinja2.nodes.Name("supported_languages", "param")
def _linecache_inject(source, write):
@@ -161,15 +153,15 @@ def quoted_native_concat(nodes):
except (ValueError, SyntaxError, MemoryError):
result = raw
if isinstance(raw, BoolMarker) and not isinstance(result, bool):
raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'bool'")
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'bool'")
if isinstance(raw, NumberMarker) and not _is_number(result):
raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'number'")
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'number'")
return result
class NativeSandboxTemplate(jinja2.nativetypes.NativeTemplate): # mypy: ignore
environment_class = NativeSandboxEnvironment # type: ignore
environment_class = NativeSandboxEnvironment
def render(self, *args, **kwargs):
"""Render the template to produce a native Python type. If the
@@ -241,12 +233,12 @@ class BaseMacroGenerator:
try:
yield
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
raise CaughtMacroError(e)
raise_compiler_error(str(e))
def call_macro(self, *args, **kwargs):
# called from __call__ methods
if self.context is None:
raise DbtInternalError("Context is still None in call_macro!")
raise InternalException("Context is still None in call_macro!")
assert self.context is not None
macro = self.get_macro()
@@ -273,7 +265,7 @@ class MacroStack(threading.local):
def pop(self, name):
got = self.call_stack.pop()
if got != name:
raise DbtInternalError(f"popped {got}, expected {name}")
raise InternalException(f"popped {got}, expected {name}")
class MacroGenerator(BaseMacroGenerator):
@@ -300,8 +292,8 @@ class MacroGenerator(BaseMacroGenerator):
try:
yield
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
raise CaughtMacroErrorWithNodeError(exc=e, node=self.macro)
except CompilationError as e:
raise_compiler_error(str(e), self.macro)
except CompilationException as e:
e.stack.append(self.macro)
raise e
@@ -309,13 +301,13 @@ class MacroGenerator(BaseMacroGenerator):
@contextmanager
def track_call(self):
# This is only called from __call__
if self.stack is None:
if self.stack is None or self.node is None:
yield
else:
unique_id = self.macro.unique_id
depth = self.stack.depth
# only mark depth=0 as a dependency, when creating this dependency we don't pass in stack
if depth == 0 and self.node:
# only mark depth=0 as a dependency
if depth == 0:
self.node.depends_on.add_macro(unique_id)
self.stack.push(unique_id)
try:
@@ -372,19 +364,8 @@ class MaterializationExtension(jinja2.ext.Extension):
value = parser.parse_expression()
adapter_name = value.value
elif target.name == "supported_languages":
target.set_ctx("param")
node.args.append(target)
parser.stream.expect("assign")
languages = parser.parse_expression()
node.defaults.append(languages)
else:
raise MaterializationArgError(materialization_name, target.name)
if SUPPORTED_LANG_ARG not in node.args:
node.args.append(SUPPORTED_LANG_ARG)
node.defaults.append(jinja2.nodes.List([jinja2.nodes.Const("sql")]))
invalid_materialization_argument(materialization_name, target.name)
node.name = get_materialization_macro_name(materialization_name, adapter_name)
@@ -455,7 +436,7 @@ def create_undefined(node=None):
return self
def __reduce__(self):
raise UndefinedCompilationError(name=self.name, node=node)
raise_compiler_error(f"{self.name} is undefined", node=node)
return Undefined
@@ -513,10 +494,10 @@ def catch_jinja(node=None) -> Iterator[None]:
yield
except jinja2.exceptions.TemplateSyntaxError as e:
e.translated = False
raise CompilationError(str(e), node) from e
raise CompilationException(str(e), node) from e
except jinja2.exceptions.UndefinedError as e:
raise UndefinedMacroError(str(e), node) from e
except CompilationError as exc:
raise UndefinedMacroException(str(e), node) from e
except CompilationException as exc:
exc.add_node(node)
raise
@@ -623,7 +604,7 @@ GENERIC_TEST_KWARGS_NAME = "_dbt_generic_test_kwargs"
def add_rendered_test_kwargs(
context: Dict[str, Any],
node: GenericTestNode,
node: Union[ParsedGenericTestNode, CompiledGenericTestNode],
capture_macros: bool = False,
) -> None:
"""Render each of the test kwargs in the given context using the native
@@ -651,21 +632,3 @@ def add_rendered_test_kwargs(
# when the test node was created in _parse_generic_test.
kwargs = deep_map_render(_convert_function, node.test_metadata.kwargs)
context[GENERIC_TEST_KWARGS_NAME] = kwargs
def get_supported_languages(node: jinja2.nodes.Macro) -> List[ModelLanguage]:
if "materialization" not in node.name:
raise MaterializtionMacroNotUsedError(node=node)
no_kwargs = not node.defaults
no_langs_found = SUPPORTED_LANG_ARG not in node.args
if no_kwargs or no_langs_found:
raise NoSupportedLanguagesFoundError(node=node)
lang_idx = node.args.index(SUPPORTED_LANG_ARG)
# indexing defaults from the end
# since supported_languages is a kwarg, and kwargs are at always after args
return [
ModelLanguage[item.value] for item in node.defaults[-(len(node.args) - lang_idx)].items
]

View File

@@ -1,6 +1,6 @@
import jinja2
from dbt.clients.jinja import get_environment
from dbt.exceptions import MacroNamespaceNotStringError, MacroNameNotStringError
from dbt.exceptions import raise_compiler_error
def statically_extract_macro_calls(string, ctx, db_wrapper=None):
@@ -15,7 +15,7 @@ def statically_extract_macro_calls(string, ctx, db_wrapper=None):
if hasattr(func_call, "node") and hasattr(func_call.node, "name"):
func_name = func_call.node.name
else:
# func_call for dbt.current_timestamp macro
# func_call for dbt_utils.current_timestamp macro
# Call(
# node=Getattr(
# node=Name(
@@ -117,14 +117,20 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
func_name = kwarg.value.value
possible_macro_calls.append(func_name)
else:
raise MacroNameNotStringError(kwarg_value=kwarg.value.value)
raise_compiler_error(
f"The macro_name parameter ({kwarg.value.value}) "
"to adapter.dispatch was not a string"
)
elif kwarg.key == "macro_namespace":
# This will remain to enable static resolution
kwarg_type = type(kwarg.value).__name__
if kwarg_type == "Const":
macro_namespace = kwarg.value.value
else:
raise MacroNamespaceNotStringError(kwarg_type)
raise_compiler_error(
"The macro_namespace parameter to adapter.dispatch "
f"is a {kwarg_type}, not a string"
)
# positional arguments
if packages_arg:

View File

@@ -3,9 +3,9 @@ from typing import Any, Dict, List
import requests
from dbt.events.functions import fire_event
from dbt.events.types import (
RegistryProgressGETRequest,
RegistryProgressMakingGETRequest,
RegistryProgressGETResponse,
RegistryIndexProgressGETRequest,
RegistryIndexProgressMakingGETRequest,
RegistryIndexProgressGETResponse,
RegistryResponseUnexpectedType,
RegistryResponseMissingTopKeys,
@@ -14,7 +14,6 @@ from dbt.events.types import (
)
from dbt.utils import memoized, _connection_exception_retry as connection_exception_retry
from dbt import deprecations
from dbt import semver
import os
if os.getenv("DBT_PACKAGE_HUB_URL"):
@@ -38,7 +37,7 @@ def _get_with_retries(package_name, registry_base_url=None):
def _get(package_name, registry_base_url=None):
url = _get_url(package_name, registry_base_url)
fire_event(RegistryProgressGETRequest(url=url))
fire_event(RegistryProgressMakingGETRequest(url=url))
# all exceptions from requests get caught in the retry logic so no need to wrap this here
resp = requests.get(url, timeout=30)
fire_event(RegistryProgressGETResponse(url=url, resp_code=resp.status_code))
@@ -126,43 +125,16 @@ def package_version(package_name, version, registry_base_url=None) -> Dict[str,
return response[version]
def is_compatible_version(package_spec, dbt_version) -> bool:
require_dbt_version = package_spec.get("require_dbt_version")
if not require_dbt_version:
# if version requirements are missing or empty, assume any version is compatible
return True
else:
# determine whether dbt_version satisfies this package's require-dbt-version config
if not isinstance(require_dbt_version, list):
require_dbt_version = [require_dbt_version]
supported_versions = [
semver.VersionSpecifier.from_version_string(v) for v in require_dbt_version
]
return semver.versions_compatible(dbt_version, *supported_versions)
def get_compatible_versions(package_name, dbt_version, should_version_check) -> List["str"]:
def get_available_versions(package_name) -> List["str"]:
# returns a list of all available versions of a package
response = package(package_name)
# if the user doesn't care about installing compatible versions, just return them all
if not should_version_check:
return list(response)
# otherwise, only return versions that are compatible with the installed version of dbt-core
else:
compatible_versions = [
pkg_version
for pkg_version, info in response.items()
if is_compatible_version(info, dbt_version)
]
return compatible_versions
return list(response)
def _get_index(registry_base_url=None):
url = _get_url("index", registry_base_url)
fire_event(RegistryIndexProgressGETRequest(url=url))
fire_event(RegistryIndexProgressMakingGETRequest(url=url))
# all exceptions from requests get caught in the retry logic so no need to wrap this here
resp = requests.get(url, timeout=30)
fire_event(RegistryIndexProgressGETResponse(url=url, resp_code=resp.status_code))

View File

@@ -12,15 +12,14 @@ import tarfile
import requests
import stat
from typing import Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
from pathspec import PathSpec # type: ignore
from dbt.events.functions import fire_event
from dbt.events.types import (
SystemErrorRetrievingModTime,
SystemCouldNotWrite,
SystemExecutingCmd,
SystemStdOut,
SystemStdErr,
SystemStdOutMsg,
SystemStdErrMsg,
SystemReportReturnCode,
)
import dbt.exceptions
@@ -37,7 +36,6 @@ def find_matching(
root_path: str,
relative_paths_to_search: List[str],
file_pattern: str,
ignore_spec: Optional[PathSpec] = None,
) -> List[Dict[str, Any]]:
"""
Given an absolute `root_path`, a list of relative paths to that
@@ -59,30 +57,19 @@ def find_matching(
reobj = re.compile(regex, re.IGNORECASE)
for relative_path_to_search in relative_paths_to_search:
# potential speedup for ignore_spec
# if ignore_spec.matches(relative_path_to_search):
# continue
absolute_path_to_search = os.path.join(root_path, relative_path_to_search)
walk_results = os.walk(absolute_path_to_search)
for current_path, subdirectories, local_files in walk_results:
# potential speedup for ignore_spec
# relative_dir = os.path.relpath(current_path, root_path) + os.sep
# if ignore_spec.match(relative_dir):
# continue
for local_file in local_files:
absolute_path = os.path.join(current_path, local_file)
relative_path = os.path.relpath(absolute_path, absolute_path_to_search)
relative_path_to_root = os.path.join(relative_path_to_search, relative_path)
modification_time = 0.0
try:
modification_time = os.path.getmtime(absolute_path)
except OSError:
fire_event(SystemErrorRetrievingModTime(path=absolute_path))
if reobj.match(local_file) and (
not ignore_spec or not ignore_spec.match_file(relative_path_to_root)
):
if reobj.match(local_file):
matching.append(
{
"searched_path": relative_path_to_search,
@@ -144,8 +131,7 @@ def make_symlink(source: str, link_path: str) -> None:
Create a symlink at `link_path` referring to `source`.
"""
if not supports_symlinks():
# TODO: why not import these at top?
raise dbt.exceptions.SymbolicLinkError()
dbt.exceptions.system_error("create a symbolic link")
os.symlink(source, link_path)
@@ -178,7 +164,7 @@ def write_file(path: str, contents: str = "") -> bool:
reason = "Path was possibly too long"
# all our hard work and the path was still too long. Log and
# continue.
fire_event(SystemCouldNotWrite(path=path, reason=reason, exc=str(exc)))
fire_event(SystemCouldNotWrite(path=path, reason=reason, exc=exc))
else:
raise
return True
@@ -412,7 +398,7 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
_handle_posix_error(exc, cwd, cmd)
# this should not be reachable, raise _something_ at least!
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
"Unhandled exception in _interpret_oserror: {}".format(exc)
)
@@ -441,8 +427,8 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T
except OSError as exc:
_interpret_oserror(exc, cwd, cmd)
fire_event(SystemStdOut(bmsg=out))
fire_event(SystemStdErr(bmsg=err))
fire_event(SystemStdOutMsg(bmsg=out))
fire_event(SystemStdErrMsg(bmsg=err))
if proc.returncode != 0:
fire_event(SystemReportReturnCode(returncode=proc.returncode))

View File

@@ -60,4 +60,4 @@ def load_yaml_text(contents, path=None):
else:
error = str(e)
raise dbt.exceptions.DbtValidationError(error)
raise dbt.exceptions.ValidationException(error)

View File

@@ -1,43 +1,49 @@
import argparse
import networkx as nx # type: ignore
import os
from collections import defaultdict
from typing import List, Dict, Any, Tuple, cast, Optional
import networkx as nx # type: ignore
import pickle
import sqlparse
from collections import defaultdict
from typing import List, Dict, Any, Tuple, Optional
from dbt import flags
from dbt.adapters.factory import get_adapter
from dbt.clients import jinja
from dbt.clients.system import make_directory
from dbt.context.providers import generate_runtime_model_context
from dbt.contracts.graph.manifest import Manifest, UniqueID
from dbt.contracts.graph.nodes import (
ManifestNode,
ManifestSQLNode,
GenericTestNode,
from dbt.contracts.graph.compiled import (
COMPILED_TYPES,
CompiledGenericTestNode,
GraphMemberNode,
InjectedCTE,
SeedNode,
ManifestNode,
NonSourceCompiledNode,
)
from dbt.contracts.graph.parsed import ParsedNode
from dbt.exceptions import (
GraphDependencyNotFoundError,
DbtInternalError,
DbtRuntimeError,
dependency_not_found,
InternalException,
RuntimeException,
)
from dbt.graph import Graph
from dbt.events.functions import fire_event
from dbt.events.types import FoundStats, WritingInjectedSQLForNode
from dbt.events.contextvars import get_node_info
from dbt.node_types import NodeType, ModelLanguage
from dbt.events.types import FoundStats, CompilingNode, WritingInjectedSQLForNode
from dbt.node_types import NodeType
from dbt.events.format import pluralize
import dbt.tracking
import dbt.task.list as list_task
graph_file_name = "graph.gpickle"
def _compiled_type_for(model: ParsedNode):
if type(model) not in COMPILED_TYPES:
raise InternalException(
f"Asked to compile {type(model)} node, but it has no compiled form"
)
return COMPILED_TYPES[type(model)]
def print_compile_stats(stats):
names = {
NodeType.Model: "model",
@@ -170,15 +176,14 @@ class Compiler:
# a dict for jinja rendering of SQL
def _create_node_context(
self,
node: ManifestSQLNode,
node: NonSourceCompiledNode,
manifest: Manifest,
extra_context: Dict[str, Any],
) -> Dict[str, Any]:
context = generate_runtime_model_context(node, self.config, manifest)
context.update(extra_context)
if isinstance(node, GenericTestNode):
if isinstance(node, CompiledGenericTestNode):
# for test nodes, add a special keyword args value to the context
jinja.add_rendered_test_kwargs(context, node)
@@ -189,6 +194,14 @@ class Compiler:
relation_cls = adapter.Relation
return relation_cls.add_ephemeral_prefix(name)
def _get_relation_name(self, node: ParsedNode):
relation_name = None
if node.is_relational and not node.is_ephemeral_model:
adapter = get_adapter(self.config)
relation_cls = adapter.Relation
relation_name = str(relation_cls.create_from(self.config, node))
return relation_name
def _inject_ctes_into_sql(self, sql: str, ctes: List[InjectedCTE]) -> str:
"""
`ctes` is a list of InjectedCTEs like:
@@ -247,10 +260,10 @@ class Compiler:
def _recursively_prepend_ctes(
self,
model: ManifestSQLNode,
model: NonSourceCompiledNode,
manifest: Manifest,
extra_context: Optional[Dict[str, Any]],
) -> Tuple[ManifestSQLNode, List[InjectedCTE]]:
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
"""This method is called by the 'compile_node' method. Starting
from the node that it is passed in, it will recursively call
itself using the 'extra_ctes'. The 'ephemeral' models do
@@ -258,15 +271,14 @@ class Compiler:
are rolled up into the models that refer to them by
inserting CTEs into the SQL.
"""
if model.compiled_code is None:
raise DbtRuntimeError("Cannot inject ctes into an unparsed node", model)
if model.compiled_sql is None:
raise RuntimeException("Cannot inject ctes into an unparsed node", model)
if model.extra_ctes_injected:
return (model, model.extra_ctes)
# Just to make it plain that nothing is actually injected for this case
if not model.extra_ctes:
if not isinstance(model, SeedNode):
model.extra_ctes_injected = True
model.extra_ctes_injected = True
manifest.update_node(model)
return (model, model.extra_ctes)
@@ -280,19 +292,20 @@ class Compiler:
# ephemeral model.
for cte in model.extra_ctes:
if cte.id not in manifest.nodes:
raise DbtInternalError(
raise InternalException(
f"During compilation, found a cte reference that "
f"could not be resolved: {cte.id}"
)
cte_model = manifest.nodes[cte.id]
assert not isinstance(cte_model, SeedNode)
if not cte_model.is_ephemeral_model:
raise DbtInternalError(f"{cte.id} is not ephemeral")
raise InternalException(f"{cte.id} is not ephemeral")
# This model has already been compiled, so it's been
# through here before
if getattr(cte_model, "compiled", False):
assert isinstance(cte_model, tuple(COMPILED_TYPES.values()))
cte_model = cast(NonSourceCompiledNode, cte_model)
new_prepended_ctes = cte_model.extra_ctes
# if the cte_model isn't compiled, i.e. first time here
@@ -311,69 +324,64 @@ class Compiler:
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_sql
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
injected_sql = self._inject_ctes_into_sql(
model.compiled_code,
model.compiled_sql,
prepended_ctes,
)
model._pre_injected_sql = model.compiled_code
model.compiled_code = injected_sql
model._pre_injected_sql = model.compiled_sql
model.compiled_sql = injected_sql
model.extra_ctes_injected = True
model.extra_ctes = prepended_ctes
model.validate(model.to_dict(omit_none=True))
manifest.update_node(model)
return model, prepended_ctes
# Sets compiled fields in the ManifestSQLNode passed in,
# creates a compiled_node from the ManifestNode passed in,
# creates a "context" dictionary for jinja rendering,
# and then renders the "compiled_code" using the node, the
# raw_code and the context.
# and then renders the "compiled_sql" using the node, the
# raw_sql and the context.
def _compile_node(
self,
node: ManifestSQLNode,
node: ManifestNode,
manifest: Manifest,
extra_context: Optional[Dict[str, Any]] = None,
) -> ManifestSQLNode:
) -> NonSourceCompiledNode:
if extra_context is None:
extra_context = {}
fire_event(CompilingNode(unique_id=node.unique_id))
data = node.to_dict(omit_none=True)
data.update(
{
"compiled": False,
"compiled_code": None,
"compiled_sql": None,
"extra_ctes_injected": False,
"extra_ctes": [],
}
)
compiled_node = _compiled_type_for(node).from_dict(data)
if node.language == ModelLanguage.python:
context = self._create_node_context(node, manifest, extra_context)
context = self._create_node_context(compiled_node, manifest, extra_context)
postfix = jinja.get_rendered(
"{{ py_script_postfix(model) }}",
context,
node,
)
# we should NOT jinja render the python model's 'raw code'
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
compiled_node.compiled_sql = jinja.get_rendered(
node.raw_sql,
context,
node,
)
else:
context = self._create_node_context(node, manifest, extra_context)
node.compiled_code = jinja.get_rendered(
node.raw_code,
context,
node,
)
compiled_node.relation_name = self._get_relation_name(node)
node.compiled = True
compiled_node.compiled = True
return node
return compiled_node
def write_graph_file(self, linker: Linker, manifest: Manifest):
filename = graph_file_name
@@ -392,7 +400,7 @@ class Compiler:
elif dependency in manifest.metrics:
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
else:
raise GraphDependencyNotFoundError(node, dependency)
dependency_not_found(node, dependency)
def link_graph(self, linker: Linker, manifest: Manifest, add_test_edges: bool = False):
for source in manifest.sources.values():
@@ -475,38 +483,29 @@ class Compiler:
if write:
self.write_graph_file(linker, manifest)
# Do not print these for ListTask's
if not (
self.config.args.__class__ == argparse.Namespace
and self.config.args.cls == list_task.ListTask
):
print_compile_stats(stats)
print_compile_stats(stats)
return Graph(linker.graph)
# writes the "compiled_code" into the target/compiled directory
def _write_node(self, node: ManifestSQLNode) -> ManifestSQLNode:
if not node.extra_ctes_injected or node.resource_type in (
NodeType.Snapshot,
NodeType.Seed,
):
# writes the "compiled_sql" into the target/compiled directory
def _write_node(self, node: NonSourceCompiledNode) -> ManifestNode:
if not node.extra_ctes_injected or node.resource_type == NodeType.Snapshot:
return node
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
fire_event(WritingInjectedSQLForNode(unique_id=node.unique_id))
if node.compiled_code:
if node.compiled_sql:
node.compiled_path = node.write_node(
self.config.target_path, "compiled", node.compiled_code
self.config.target_path, "compiled", node.compiled_sql
)
return node
def compile_node(
self,
node: ManifestSQLNode,
node: ManifestNode,
manifest: Manifest,
extra_context: Optional[Dict[str, Any]] = None,
write: bool = True,
) -> ManifestSQLNode:
) -> NonSourceCompiledNode:
"""This is the main entry point into this code. It's called by
CompileRunner.compile, GenericRPCRunner.compile, and
RunTask.get_hook_sql. It calls '_compile_node' to convert

View File

@@ -9,14 +9,12 @@ from dbt.clients.system import load_file_contents
from dbt.clients.yaml_helper import load_yaml_text
from dbt.contracts.connection import Credentials, HasCredentials
from dbt.contracts.project import ProfileConfig, UserConfig
from dbt.exceptions import (
CompilationError,
DbtProfileError,
DbtProjectError,
DbtValidationError,
DbtRuntimeError,
ProfileConfigError,
)
from dbt.exceptions import CompilationException
from dbt.exceptions import DbtProfileError
from dbt.exceptions import DbtProjectError
from dbt.exceptions import ValidationException
from dbt.exceptions import RuntimeException
from dbt.exceptions import validator_error_message
from dbt.events.types import MissingProfileTarget
from dbt.events.functions import fire_event
from dbt.utils import coerce_dict_str
@@ -25,6 +23,8 @@ from .renderer import ProfileRenderer
DEFAULT_THREADS = 1
DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser("~"), ".dbt")
INVALID_PROFILE_MESSAGE = """
dbt encountered an error while trying to read your profiles.yml file.
@@ -44,7 +44,7 @@ defined in your profiles.yml file. You can find profiles.yml here:
{profiles_file}/profiles.yml
""".format(
profiles_file=flags.DEFAULT_PROFILES_DIR
profiles_file=DEFAULT_PROFILES_DIR
)
@@ -60,9 +60,9 @@ def read_profile(profiles_dir: str) -> Dict[str, Any]:
msg = f"The profiles.yml file at {path} is empty"
raise DbtProfileError(INVALID_PROFILE_MESSAGE.format(error_string=msg))
return yaml_content
except DbtValidationError as e:
except ValidationException as e:
msg = INVALID_PROFILE_MESSAGE.format(error_string=e)
raise DbtValidationError(msg) from e
raise ValidationException(msg) from e
return {}
@@ -75,7 +75,7 @@ def read_user_config(directory: str) -> UserConfig:
if user_config is not None:
UserConfig.validate(user_config)
return UserConfig.from_dict(user_config)
except (DbtRuntimeError, ValidationError):
except (RuntimeException, ValidationError):
pass
return UserConfig()
@@ -158,7 +158,7 @@ class Profile(HasCredentials):
dct = self.to_profile_info(serialize_credentials=True)
ProfileConfig.validate(dct)
except ValidationError as exc:
raise ProfileConfigError(exc) from exc
raise DbtProfileError(validator_error_message(exc)) from exc
@staticmethod
def _credentials_from_profile(
@@ -182,8 +182,8 @@ class Profile(HasCredentials):
data = cls.translate_aliases(profile)
cls.validate(data)
credentials = cls.from_dict(data)
except (DbtRuntimeError, ValidationError) as e:
msg = str(e) if isinstance(e, DbtRuntimeError) else e.message
except (RuntimeException, ValidationError) as e:
msg = str(e) if isinstance(e, RuntimeException) else e.message
raise DbtProfileError(
'Credentials in profile "{}", target "{}" invalid: {}'.format(
profile_name, target_name, msg
@@ -299,7 +299,7 @@ class Profile(HasCredentials):
try:
profile_data = renderer.render_data(raw_profile_data)
except CompilationError as exc:
except CompilationException as exc:
raise DbtProfileError(str(exc)) from exc
return target_name, profile_data

View File

@@ -16,19 +16,19 @@ import hashlib
import os
from dbt import flags, deprecations
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
from dbt.clients.system import resolve_path_from_base
from dbt.clients.system import path_exists
from dbt.clients.system import load_file_contents
from dbt.clients.yaml_helper import load_yaml_text
from dbt.contracts.connection import QueryComment
from dbt.exceptions import (
DbtProjectError,
SemverError,
ProjectContractBrokenError,
ProjectContractError,
DbtRuntimeError,
)
from dbt.exceptions import DbtProjectError
from dbt.exceptions import SemverException
from dbt.exceptions import validator_error_message
from dbt.exceptions import RuntimeException
from dbt.graph import SelectionSpec
from dbt.helper_types import NoValue
from dbt.semver import VersionSpecifier, versions_compatible
from dbt.semver import VersionSpecifier
from dbt.semver import versions_compatible
from dbt.version import get_installed_version
from dbt.utils import MultiDict
from dbt.node_types import NodeType
@@ -75,11 +75,6 @@ Validator Error:
{error}
"""
MISSING_DBT_PROJECT_ERROR = """\
No dbt_project.yml found at expected path {path}
Verify that each entry within packages.yml (and their transitive dependencies) contains a file named dbt_project.yml
"""
@runtime_checkable
class IsFQNResource(Protocol):
@@ -168,7 +163,9 @@ def _raw_project_from(project_root: str) -> Dict[str, Any]:
# get the project.yml contents
if not path_exists(project_yaml_filepath):
raise DbtProjectError(MISSING_DBT_PROJECT_ERROR.format(path=project_yaml_filepath))
raise DbtProjectError(
"no dbt_project.yml found at expected path {}".format(project_yaml_filepath)
)
project_dict = _load_yaml(project_yaml_filepath)
@@ -222,7 +219,7 @@ def _get_required_version(
try:
dbt_version = _parse_versions(dbt_raw_version)
except SemverError as e:
except SemverException as e:
raise DbtProjectError(str(e)) from e
if verify_version:
@@ -251,7 +248,7 @@ class PartialProject(RenderComponents):
project_name: Optional[str] = field(
metadata=dict(
description=(
"The name of the project. This should always be set and will not be rendered"
"The name of the project. This should always be set and will not " "be rendered"
)
)
)
@@ -328,7 +325,7 @@ class PartialProject(RenderComponents):
ProjectContract.validate(rendered.project_dict)
cfg = ProjectContract.from_dict(rendered.project_dict)
except ValidationError as e:
raise ProjectContractError(e) from e
raise DbtProjectError(validator_error_message(e)) from e
# name/version are required in the Project definition, so we can assume
# they are present
name = cfg.name
@@ -383,8 +380,6 @@ class PartialProject(RenderComponents):
snapshots: Dict[str, Any]
sources: Dict[str, Any]
tests: Dict[str, Any]
metrics: Dict[str, Any]
exposures: Dict[str, Any]
vars_value: VarProvider
dispatch = cfg.dispatch
@@ -393,8 +388,6 @@ class PartialProject(RenderComponents):
snapshots = cfg.snapshots
sources = cfg.sources
tests = cfg.tests
metrics = cfg.metrics
exposures = cfg.exposures
if cfg.vars is None:
vars_dict: Dict[str, Any] = {}
else:
@@ -448,8 +441,6 @@ class PartialProject(RenderComponents):
query_comment=query_comment,
sources=sources,
tests=tests,
metrics=metrics,
exposures=exposures,
vars=vars_value,
config_version=cfg.config_version,
unrendered=unrendered,
@@ -552,8 +543,6 @@ class Project:
snapshots: Dict[str, Any]
sources: Dict[str, Any]
tests: Dict[str, Any]
metrics: Dict[str, Any]
exposures: Dict[str, Any]
vars: VarProvider
dbt_version: List[VersionSpecifier]
packages: Dict[str, Any]
@@ -626,8 +615,6 @@ class Project:
"snapshots": self.snapshots,
"sources": self.sources,
"tests": self.tests,
"metrics": self.metrics,
"exposures": self.exposures,
"vars": self.vars.to_dict(),
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
"config-version": self.config_version,
@@ -645,7 +632,7 @@ class Project:
try:
ProjectContract.validate(self.to_project_config())
except ValidationError as e:
raise ProjectContractBrokenError(e) from e
raise DbtProjectError(validator_error_message(e)) from e
@classmethod
def partial_load(cls, project_root: str, *, verify_version: bool = False) -> PartialProject:
@@ -670,8 +657,8 @@ class Project:
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
if name not in self.selectors:
raise DbtRuntimeError(
f"Could not find selector named {name}, expected one of {list(self.selectors)}"
raise RuntimeException(
f"Could not find selector named {name}, expected one of " f"{list(self.selectors)}"
)
return self.selectors[name]["definition"]

View File

@@ -3,13 +3,13 @@ import re
import os
from dbt.clients.jinja import get_rendered, catch_jinja
from dbt.constants import SECRET_ENV_PREFIX
from dbt.context.target import TargetContext
from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER
from dbt.context.base import BaseContext
from dbt.contracts.connection import HasCredentials
from dbt.exceptions import DbtProjectError, CompilationError, RecursionError
from dbt.exceptions import DbtProjectError, CompilationException, RecursionException
from dbt.utils import deep_map_render
from dbt.logger import SECRET_ENV_PREFIX
Keypath = Tuple[Union[str, int], ...]
@@ -40,14 +40,14 @@ class BaseRenderer:
try:
with catch_jinja():
return get_rendered(value, self.context, native=True)
except CompilationError as exc:
except CompilationException as exc:
msg = f"Could not render {value}: {exc.msg}"
raise CompilationError(msg) from exc
raise CompilationException(msg) from exc
def render_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
try:
return deep_map_render(self.render_entry, data)
except RecursionError:
except RecursionException:
raise DbtProjectError(
f"Cycle detected: {self.name} input has a reference to itself", project=data
)
@@ -159,8 +159,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
if first in {"seeds", "models", "snapshots", "tests"}:
keypath_parts = {(k.lstrip("+ ") if isinstance(k, str) else k) for k in keypath}
# model-level hooks
late_rendered_hooks = {"pre-hook", "post-hook", "pre_hook", "post_hook"}
if keypath_parts.intersection(late_rendered_hooks):
if "pre-hook" in keypath_parts or "post-hook" in keypath_parts:
return False
return True

View File

@@ -3,42 +3,31 @@ import os
from copy import deepcopy
from dataclasses import dataclass, field
from pathlib import Path
from typing import (
Any,
Dict,
Iterable,
Iterator,
Mapping,
MutableSet,
Optional,
Tuple,
Type,
Union,
)
from typing import Dict, Any, Optional, Mapping, Iterator, Iterable, Tuple, List, MutableSet, Type
from .profile import Profile
from .project import Project
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
from .utils import parse_cli_vars
from dbt import flags
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
from dbt.adapters.factory import get_relation_class_by_name, get_include_paths
from dbt.helper_types import FQNPath, PathSet, DictDefaultEmptyStr
from dbt.config.profile import read_user_config
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
from dbt.contracts.graph.manifest import ManifestMetadata
from dbt.contracts.project import Configuration, UserConfig
from dbt.contracts.relation import ComponentName
from dbt.dataclass_schema import ValidationError
from dbt.exceptions import (
ConfigContractBrokenError,
DbtProjectError,
NonUniquePackageNameError,
DbtRuntimeError,
UninstalledPackagesFoundError,
)
from dbt.events.functions import warn_or_error
from dbt.events.types import UnusedResourceConfigPath
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
from dbt.ui import warning_tag
from .profile import Profile
from .project import Project, PartialProject
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
from .utils import parse_cli_vars
from dbt.contracts.project import Configuration, UserConfig
from dbt.exceptions import (
RuntimeException,
DbtProjectError,
validator_error_message,
warn_or_error,
raise_compiler_error,
)
from dbt.dataclass_schema import ValidationError
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
@@ -116,8 +105,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
query_comment=project.query_comment,
sources=project.sources,
tests=project.tests,
metrics=project.metrics,
exposures=project.exposures,
vars=project.vars,
config_version=project.config_version,
unrendered=project.unrendered,
@@ -187,7 +174,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
try:
Configuration.validate(self.serialize())
except ValidationError as e:
raise ConfigContractBrokenError(e) from e
raise DbtProjectError(validator_error_message(e)) from e
@classmethod
def _get_rendered_profile(
@@ -201,52 +188,28 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
@classmethod
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
profile = cls.collect_profile(args=args)
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
project = cls.collect_project(args=args, project_renderer=project_renderer)
assert type(project) is Project
return (project, profile)
@classmethod
def collect_profile(
cls: Type["RuntimeConfig"], args: Any, profile_name: Optional[str] = None
) -> Profile:
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
profile_renderer = ProfileRenderer(cli_vars)
# build the profile using the base renderer and the one fact we know
if profile_name is None:
# Note: only the named profile section is rendered here. The rest of the
# profile is ignored.
partial = cls.collect_project(args)
assert type(partial) is PartialProject
profile_name = partial.render_profile_name(profile_renderer)
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
# Save env_vars encountered in rendering for partial parsing
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
return profile
@classmethod
def collect_project(
cls: Type["RuntimeConfig"],
args: Any,
project_renderer: Optional[DbtProjectYamlRenderer] = None,
) -> Union[Project, PartialProject]:
# profile_name from the project
project_root = args.project_dir if args.project_dir else os.getcwd()
version_check = bool(flags.VERSION_CHECK)
partial = Project.partial_load(project_root, verify_version=version_check)
if project_renderer is None:
return partial
else:
project = partial.render(project_renderer)
project.project_env_vars = project_renderer.ctx_obj.env_vars
return project
# build the profile using the base renderer and the one fact we know
# Note: only the named profile section is rendered. The rest of the
# profile is ignored.
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
profile_renderer = ProfileRenderer(cli_vars)
profile_name = partial.render_profile_name(profile_renderer)
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
# Save env_vars encountered in rendering for partial parsing
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
# get a new renderer using our target information and render the
# project
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
project = partial.render(project_renderer)
# Save env_vars encountered in rendering for partial parsing
project.project_env_vars = project_renderer.ctx_obj.env_vars
return (project, profile)
# Called in main.py, lib.py, task/base.py
@classmethod
@@ -258,7 +221,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
:param args: The arguments as parsed from the cli.
:raises DbtProjectError: If the project is invalid or missing.
:raises DbtProfileError: If the profile is invalid or missing.
:raises DbtValidationError: If the cli variables are invalid.
:raises ValidationException: If the cli variables are invalid.
"""
project, profile = cls.collect_parts(args)
@@ -311,15 +274,13 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
"snapshots": self._get_config_paths(self.snapshots),
"sources": self._get_config_paths(self.sources),
"tests": self._get_config_paths(self.tests),
"metrics": self._get_config_paths(self.metrics),
"exposures": self._get_config_paths(self.exposures),
}
def warn_for_unused_resource_config_paths(
def get_unused_resource_config_paths(
self,
resource_fqns: Mapping[str, PathSet],
disabled: PathSet,
) -> None:
) -> List[FQNPath]:
"""Return a list of lists of strings, where each inner list of strings
represents a type + FQN path of a resource configuration that is not
used.
@@ -333,13 +294,23 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
for config_path in config_paths:
if not _is_config_used(config_path, fqns):
resource_path = ".".join(i for i in ((resource_type,) + config_path))
unused_resource_config_paths.append(resource_path)
unused_resource_config_paths.append((resource_type,) + config_path)
return unused_resource_config_paths
if len(unused_resource_config_paths) == 0:
def warn_for_unused_resource_config_paths(
self,
resource_fqns: Mapping[str, PathSet],
disabled: PathSet,
) -> None:
unused = self.get_unused_resource_config_paths(resource_fqns, disabled)
if len(unused) == 0:
return
warn_or_error(UnusedResourceConfigPath(unused_config_paths=unused_resource_config_paths))
msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format(
len(unused), "\n".join("- {}".format(".".join(u)) for u in unused)
)
warn_or_error(msg, log_fmt=warning_tag("{}"))
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
if self.dependencies is None:
@@ -353,15 +324,22 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
count_packages_specified = len(self.packages.packages) # type: ignore
count_packages_installed = len(tuple(self._get_project_directories()))
if count_packages_specified > count_packages_installed:
raise UninstalledPackagesFoundError(
count_packages_specified,
count_packages_installed,
self.packages_install_path,
raise_compiler_error(
f"dbt found {count_packages_specified} package(s) "
f"specified in packages.yml, but only "
f"{count_packages_installed} package(s) installed "
f'in {self.packages_install_path}. Run "dbt deps" to '
f"install package dependencies."
)
project_paths = itertools.chain(internal_packages, self._get_project_directories())
for project_name, project in self.load_projects(project_paths):
if project_name in all_projects:
raise NonUniquePackageNameError(project_name)
raise_compiler_error(
f"dbt found more than one package with the name "
f'"{project_name}" included in this project. Package '
f"names must be unique in a project. Please rename "
f"one of these packages."
)
all_projects[project_name] = project
self.dependencies = all_projects
return self.dependencies
@@ -426,7 +404,7 @@ class UnsetProfile(Profile):
def __getattribute__(self, name):
if name in {"profile_name", "target_name", "threads"}:
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
return Profile.__getattribute__(self, name)
@@ -453,7 +431,7 @@ class UnsetProfileConfig(RuntimeConfig):
def __getattribute__(self, name):
# Override __getattribute__ to check that the attribute isn't 'banned'.
if name in {"profile_name", "target_name"}:
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
# avoid every attribute access triggering infinite recursion
return RuntimeConfig.__getattribute__(self, name)
@@ -499,8 +477,6 @@ class UnsetProfileConfig(RuntimeConfig):
"snapshots": self.snapshots,
"sources": self.sources,
"tests": self.tests,
"metrics": self.metrics,
"exposures": self.exposures,
"vars": self.vars.to_dict(),
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
"config-version": self.config_version,
@@ -561,8 +537,6 @@ class UnsetProfileConfig(RuntimeConfig):
query_comment=project.query_comment,
sources=project.sources,
tests=project.tests,
metrics=project.metrics,
exposures=project.exposures,
vars=project.vars,
config_version=project.config_version,
unrendered=project.unrendered,
@@ -602,13 +576,21 @@ class UnsetProfileConfig(RuntimeConfig):
:param args: The arguments as parsed from the cli.
:raises DbtProjectError: If the project is invalid or missing.
:raises DbtProfileError: If the profile is invalid or missing.
:raises DbtValidationError: If the cli variables are invalid.
:raises ValidationException: If the cli variables are invalid.
"""
project, profile = cls.collect_parts(args)
return cls.from_parts(project=project, profile=profile, args=args)
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
Configuration paths exist in your dbt_project.yml file which do not \
apply to any resources.
There are {} unused configuration paths:
{}
"""
def _is_config_used(path, fqns):
if fqns:
for fqn in fqns:

View File

@@ -12,7 +12,7 @@ from dbt.clients.system import (
resolve_path_from_base,
)
from dbt.contracts.selection import SelectorFile
from dbt.exceptions import DbtSelectorsError, DbtRuntimeError
from dbt.exceptions import DbtSelectorsError, RuntimeException
from dbt.graph import parse_from_selectors_definition, SelectionSpec
from dbt.graph.selector_spec import SelectionCriteria
@@ -46,7 +46,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
f"yaml-selectors",
result_type="invalid_selector",
) from exc
except DbtRuntimeError as exc:
except RuntimeException as exc:
raise DbtSelectorsError(
f"Could not read selector file data: {exc}",
result_type="invalid_selector",
@@ -62,7 +62,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
) -> "SelectorConfig":
try:
rendered = renderer.render_data(data)
except (ValidationError, DbtRuntimeError) as exc:
except (ValidationError, RuntimeException) as exc:
raise DbtSelectorsError(
f"Could not render selector data: {exc}",
result_type="invalid_selector",
@@ -77,7 +77,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
) -> "SelectorConfig":
try:
data = load_yaml_text(load_file_contents(str(path)))
except (ValidationError, DbtRuntimeError) as exc:
except (ValidationError, RuntimeException) as exc:
raise DbtSelectorsError(
f"Could not read selector file: {exc}",
result_type="invalid_selector",

View File

@@ -8,24 +8,24 @@ from dbt.clients import yaml_helper
from dbt.config import Profile, Project, read_user_config
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
from dbt.events.functions import fire_event
from dbt.events.types import InvalidOptionYAML
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
from dbt.events.types import InvalidVarsYAML
from dbt.exceptions import ValidationException, raise_compiler_error
def parse_cli_vars(var_string: str) -> Dict[str, Any]:
return parse_cli_yaml_string(var_string, "vars")
def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, Any]:
try:
cli_vars = yaml_helper.load_yaml_text(var_string)
var_type = type(cli_vars)
if var_type is dict:
return cli_vars
else:
raise OptionNotYamlDictError(var_type, cli_option_name)
except DbtValidationError:
fire_event(InvalidOptionYAML(option_name=cli_option_name))
type_name = var_type.__name__
raise_compiler_error(
"The --vars argument must be a YAML dictionary, but was "
"of type '{}'".format(type_name)
)
except ValidationException:
fire_event(InvalidVarsYAML())
raise

View File

@@ -1,10 +0,0 @@
SECRET_ENV_PREFIX = "DBT_ENV_SECRET_"
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
METADATA_ENV_PREFIX = "DBT_ENV_CUSTOM_ENV_"
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
MAXIMUM_SEED_SIZE_NAME = "1MB"
PIN_PACKAGE_URL = (
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"
)

View File

@@ -2,7 +2,7 @@
Contexts are used for Jinja rendering. They include context methods, executable macros, and various settings that are available in Jinja.
The most common entrypoint to Jinja rendering in dbt is a method named `get_rendered`, which takes two arguments: templated code (string), and a context used to render it (dictionary).
The most common entrypoint to Jinja rendering in dbt is a method named `get_rendered`, which takes two arguments: templated code (string), and a context used to render it (dictionary).
The context is the bundle of information that is in "scope" when rendering Jinja-templated code. For instance, imagine a simple Jinja template:
```

View File

@@ -4,22 +4,19 @@ from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
from dbt import flags
from dbt import tracking
from dbt import utils
from dbt.clients.jinja import get_rendered
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
from dbt.contracts.graph.nodes import Resource
from dbt.contracts.graph.compiled import CompiledResource
from dbt.exceptions import (
SecretEnvVarLocationError,
EnvVarMissingError,
CompilationException,
MacroReturn,
RequiredVarNotFoundError,
SetStrictWrongTypeError,
ZipStrictWrongTypeError,
raise_compiler_error,
raise_parsing_error,
disallow_secret_env_var,
)
from dbt.logger import SECRET_ENV_PREFIX
from dbt.events.functions import fire_event, get_invocation_id
from dbt.events.types import JinjaLogInfo, JinjaLogDebug
from dbt.events.contextvars import get_node_info
from dbt.events.types import MacroEventInfo, MacroEventDebug
from dbt.version import __version__ as dbt_version
# These modules are added to the context. Consider alternative
@@ -129,17 +126,18 @@ class ContextMeta(type):
class Var:
UndefinedVarError = "Required var '{}' not found in config:\nVars " "supplied to {} = {}"
_VAR_NOTSET = object()
def __init__(
self,
context: Mapping[str, Any],
cli_vars: Mapping[str, Any],
node: Optional[Resource] = None,
node: Optional[CompiledResource] = None,
) -> None:
self._context: Mapping[str, Any] = context
self._cli_vars: Mapping[str, Any] = cli_vars
self._node: Optional[Resource] = node
self._node: Optional[CompiledResource] = node
self._merged: Mapping[str, Any] = self._generate_merged()
def _generate_merged(self) -> Mapping[str, Any]:
@@ -153,7 +151,10 @@ class Var:
return "<Configuration>"
def get_missing_var(self, var_name):
raise RequiredVarNotFoundError(var_name, self._merged, self._node)
dct = {k: self._merged[k] for k in self._merged}
pretty_vars = json.dumps(dct, sort_keys=True, indent=4)
msg = self.UndefinedVarError.format(var_name, self.node_name, pretty_vars)
raise_compiler_error(msg, self._node)
def has_var(self, var_name: str):
return var_name in self._merged
@@ -297,22 +298,18 @@ class BaseContext(metaclass=ContextMeta):
"""
return_value = None
if var.startswith(SECRET_ENV_PREFIX):
raise SecretEnvVarLocationError(var)
disallow_secret_env_var(var)
if var in os.environ:
return_value = os.environ[var]
elif default is not None:
return_value = default
if return_value is not None:
# If the environment variable is set from a default, store a string indicating
# that so we can skip partial parsing. Otherwise the file will be scheduled for
# reparsing. If the default changes, the file will have been updated and therefore
# will be scheduled for reparsing anyways.
self.env_vars[var] = return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER
self.env_vars[var] = return_value
return return_value
else:
raise EnvVarMissingError(var)
msg = f"Env var required but not provided: '{var}'"
raise_parsing_error(msg)
if os.environ.get("DBT_MACRO_DEBUGGING"):
@@ -493,7 +490,7 @@ class BaseContext(metaclass=ContextMeta):
try:
return set(value)
except TypeError as e:
raise SetStrictWrongTypeError(e)
raise CompilationException(e)
@contextmember("zip")
@staticmethod
@@ -537,7 +534,7 @@ class BaseContext(metaclass=ContextMeta):
try:
return zip(*args)
except TypeError as e:
raise ZipStrictWrongTypeError(e)
raise CompilationException(e)
@contextmember
@staticmethod
@@ -555,9 +552,9 @@ class BaseContext(metaclass=ContextMeta):
{% endmacro %}"
"""
if info:
fire_event(JinjaLogInfo(msg=msg, node_info=get_node_info()))
fire_event(MacroEventInfo(msg=msg))
else:
fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info()))
fire_event(MacroEventDebug(msg=msg))
return ""
@contextproperty
@@ -634,8 +631,9 @@ class BaseContext(metaclass=ContextMeta):
{% endif %}
This supports all flags defined in flags submodule (core/dbt/flags.py)
TODO: Replace with object that provides read-only access to flag values
"""
return flags.get_flag_obj()
return flags
@contextmember
@staticmethod
@@ -684,19 +682,6 @@ class BaseContext(metaclass=ContextMeta):
dict_diff.update({k: dict_a[k]})
return dict_diff
@contextmember
@staticmethod
def local_md5(value: str) -> str:
"""Calculates an MD5 hash of the given string.
It's called "local_md5" to emphasize that it runs locally in dbt (in jinja context) and not an MD5 SQL command.
:param value: The value to hash
Usage:
{% set value_hash = local_md5("hello world") %}
"""
return utils.md5(value)
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
ctx = BaseContext(cli_vars)

View File

@@ -1,14 +1,14 @@
import os
from typing import Any, Dict, Optional
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
from dbt.contracts.connection import AdapterRequiredConfig
from dbt.logger import SECRET_ENV_PREFIX
from dbt.node_types import NodeType
from dbt.utils import MultiDict
from dbt.context.base import contextproperty, contextmember, Var
from dbt.context.target import TargetContext
from dbt.exceptions import EnvVarMissingError, SecretEnvVarLocationError
from dbt.exceptions import raise_parsing_error, disallow_secret_env_var
class ConfiguredContext(TargetContext):
@@ -86,7 +86,7 @@ class SchemaYamlContext(ConfiguredContext):
def env_var(self, var: str, default: Optional[str] = None) -> str:
return_value = None
if var.startswith(SECRET_ENV_PREFIX):
raise SecretEnvVarLocationError(var)
disallow_secret_env_var(var)
if var in os.environ:
return_value = os.environ[var]
elif default is not None:
@@ -94,17 +94,11 @@ class SchemaYamlContext(ConfiguredContext):
if return_value is not None:
if self.schema_yaml_vars:
# If the environment variable is set from a default, store a string indicating
# that so we can skip partial parsing. Otherwise the file will be scheduled for
# reparsing. If the default changes, the file will have been updated and therefore
# will be scheduled for reparsing anyways.
self.schema_yaml_vars.env_vars[var] = (
return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER
)
self.schema_yaml_vars.env_vars[var] = return_value
return return_value
else:
raise EnvVarMissingError(var)
msg = f"Env var required but not provided: '{var}'"
raise_parsing_error(msg)
class MacroResolvingContext(ConfiguredContext):

View File

@@ -5,7 +5,7 @@ from typing import List, Iterator, Dict, Any, TypeVar, Generic
from dbt.config import RuntimeConfig, Project, IsFQNResource
from dbt.contracts.graph.model_config import BaseConfig, get_config_for, _listify
from dbt.exceptions import DbtInternalError
from dbt.exceptions import InternalException
from dbt.node_types import NodeType
from dbt.utils import fqn_search
@@ -43,12 +43,9 @@ class UnrenderedConfig(ConfigSource):
model_configs = unrendered.get("sources")
elif resource_type == NodeType.Test:
model_configs = unrendered.get("tests")
elif resource_type == NodeType.Metric:
model_configs = unrendered.get("metrics")
elif resource_type == NodeType.Exposure:
model_configs = unrendered.get("exposures")
else:
model_configs = unrendered.get("models")
if model_configs is None:
return {}
else:
@@ -68,10 +65,6 @@ class RenderedConfig(ConfigSource):
model_configs = self.project.sources
elif resource_type == NodeType.Test:
model_configs = self.project.tests
elif resource_type == NodeType.Metric:
model_configs = self.project.metrics
elif resource_type == NodeType.Exposure:
model_configs = self.project.exposures
else:
model_configs = self.project.models
return model_configs
@@ -89,7 +82,7 @@ class BaseContextConfigGenerator(Generic[T]):
return self._active_project
dependencies = self._active_project.load_dependencies()
if project_name not in dependencies:
raise DbtInternalError(
raise InternalException(
f"Project name {project_name} not found in dependencies "
f"(found {list(dependencies)})"
)
@@ -287,14 +280,14 @@ class ContextConfig:
elif k in BaseConfig.mergebehavior["update"]:
if not isinstance(v, dict):
raise DbtInternalError(f"expected dict, got {v}")
raise InternalException(f"expected dict, got {v}")
if k in config_call_dict and isinstance(config_call_dict[k], dict):
config_call_dict[k].update(v)
else:
config_call_dict[k] = v
elif k in BaseConfig.mergebehavior["dict_key_append"]:
if not isinstance(v, dict):
raise DbtInternalError(f"expected dict, got {v}")
raise InternalException(f"expected dict, got {v}")
if k in config_call_dict: # should always be a dict
for key, value in v.items():
extend = False

View File

@@ -1,12 +1,13 @@
from typing import Any, Dict, Union
from dbt.exceptions import (
DocTargetNotFoundError,
DocArgsError,
doc_invalid_args,
doc_target_not_found,
)
from dbt.config.runtime import RuntimeConfig
from dbt.contracts.graph.compiled import CompileResultNode
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.graph.nodes import Macro, ResultNode
from dbt.contracts.graph.parsed import ParsedMacro
from dbt.context.base import contextmember
from dbt.context.configured import SchemaYamlContext
@@ -16,7 +17,7 @@ class DocsRuntimeContext(SchemaYamlContext):
def __init__(
self,
config: RuntimeConfig,
node: Union[Macro, ResultNode],
node: Union[ParsedMacro, CompileResultNode],
manifest: Manifest,
current_project: str,
) -> None:
@@ -52,9 +53,9 @@ class DocsRuntimeContext(SchemaYamlContext):
elif len(args) == 2:
doc_package_name, doc_name = args
else:
raise DocArgsError(self.node, args)
doc_invalid_args(self.node, args)
# Documentation
# ParsedDocumentation
target_doc = self.manifest.resolve_doc(
doc_name,
doc_package_name,
@@ -68,9 +69,7 @@ class DocsRuntimeContext(SchemaYamlContext):
# TODO CT-211
source_file.add_node(self.node.unique_id) # type: ignore[union-attr]
else:
raise DocTargetNotFoundError(
node=self.node, target_doc_name=doc_name, target_doc_package=doc_package_name
)
doc_target_not_found(self.node, doc_name, doc_package_name)
return target_doc.block_contents

View File

@@ -1,144 +0,0 @@
import functools
from typing import NoReturn
from dbt.events.functions import warn_or_error
from dbt.events.helpers import env_secrets, scrub_secrets
from dbt.events.types import JinjaLogWarning
from dbt.exceptions import (
DbtRuntimeError,
MissingConfigError,
MissingMaterializationError,
MissingRelationError,
AmbiguousAliasError,
AmbiguousCatalogMatchError,
CacheInconsistencyError,
DataclassNotDictError,
CompilationError,
DbtDatabaseError,
DependencyNotFoundError,
DependencyError,
DuplicatePatchPathError,
DuplicateResourceNameError,
PropertyYMLError,
NotImplementedError,
RelationWrongTypeError,
)
def warn(msg, node=None):
warn_or_error(JinjaLogWarning(msg=msg), node=node)
return ""
def missing_config(model, name) -> NoReturn:
raise MissingConfigError(unique_id=model.unique_id, name=name)
def missing_materialization(model, adapter_type) -> NoReturn:
raise MissingMaterializationError(
materialization=model.config.materialized, adapter_type=adapter_type
)
def missing_relation(relation, model=None) -> NoReturn:
raise MissingRelationError(relation, model)
def raise_ambiguous_alias(node_1, node_2, duped_name=None) -> NoReturn:
raise AmbiguousAliasError(node_1, node_2, duped_name)
def raise_ambiguous_catalog_match(unique_id, match_1, match_2) -> NoReturn:
raise AmbiguousCatalogMatchError(unique_id, match_1, match_2)
def raise_cache_inconsistent(message) -> NoReturn:
raise CacheInconsistencyError(message)
def raise_dataclass_not_dict(obj) -> NoReturn:
raise DataclassNotDictError(obj)
def raise_compiler_error(msg, node=None) -> NoReturn:
raise CompilationError(msg, node)
def raise_database_error(msg, node=None) -> NoReturn:
raise DbtDatabaseError(msg, node)
def raise_dep_not_found(node, node_description, required_pkg) -> NoReturn:
raise DependencyNotFoundError(node, node_description, required_pkg)
def raise_dependency_error(msg) -> NoReturn:
raise DependencyError(scrub_secrets(msg, env_secrets()))
def raise_duplicate_patch_name(patch_1, existing_patch_path) -> NoReturn:
raise DuplicatePatchPathError(patch_1, existing_patch_path)
def raise_duplicate_resource_name(node_1, node_2) -> NoReturn:
raise DuplicateResourceNameError(node_1, node_2)
def raise_invalid_property_yml_version(path, issue) -> NoReturn:
raise PropertyYMLError(path, issue)
def raise_not_implemented(msg) -> NoReturn:
raise NotImplementedError(msg)
def relation_wrong_type(relation, expected_type, model=None) -> NoReturn:
raise RelationWrongTypeError(relation, expected_type, model)
# Update this when a new function should be added to the
# dbt context's `exceptions` key!
CONTEXT_EXPORTS = {
fn.__name__: fn
for fn in [
warn,
missing_config,
missing_materialization,
missing_relation,
raise_ambiguous_alias,
raise_ambiguous_catalog_match,
raise_cache_inconsistent,
raise_dataclass_not_dict,
raise_compiler_error,
raise_database_error,
raise_dep_not_found,
raise_dependency_error,
raise_duplicate_patch_name,
raise_duplicate_resource_name,
raise_invalid_property_yml_version,
raise_not_implemented,
relation_wrong_type,
]
}
# wraps context based exceptions in node info
def wrapper(model):
def wrap(func):
@functools.wraps(func)
def inner(*args, **kwargs):
try:
return func(*args, **kwargs)
except DbtRuntimeError as exc:
exc.add_node(model)
raise exc
return inner
return wrap
def wrapped_exports(model):
wrap = wrapper(model)
return {name: wrap(export) for name, export in CONTEXT_EXPORTS.items()}

View File

@@ -1,10 +1,10 @@
from typing import Dict, MutableMapping, Optional
from dbt.contracts.graph.nodes import Macro
from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError
from dbt.contracts.graph.parsed import ParsedMacro
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
from dbt.clients.jinja import MacroGenerator
MacroNamespace = Dict[str, Macro]
MacroNamespace = Dict[str, ParsedMacro]
# This class builds the MacroResolver by adding macros
@@ -21,7 +21,7 @@ MacroNamespace = Dict[str, Macro]
class MacroResolver:
def __init__(
self,
macros: MutableMapping[str, Macro],
macros: MutableMapping[str, ParsedMacro],
root_project_name: str,
internal_package_names,
) -> None:
@@ -77,7 +77,7 @@ class MacroResolver:
def _add_macro_to(
self,
package_namespaces: Dict[str, MacroNamespace],
macro: Macro,
macro: ParsedMacro,
):
if macro.package_name in package_namespaces:
namespace = package_namespaces[macro.package_name]
@@ -86,10 +86,10 @@ class MacroResolver:
package_namespaces[macro.package_name] = namespace
if macro.name in namespace:
raise DuplicateMacroNameError(macro, macro, macro.package_name)
raise_duplicate_macro_name(macro, macro, macro.package_name)
package_namespaces[macro.package_name][macro.name] = macro
def add_macro(self, macro: Macro):
def add_macro(self, macro: ParsedMacro):
macro_name: str = macro.name
# internal macros (from plugins) will be processed separately from
@@ -109,15 +109,9 @@ class MacroResolver:
def get_macro(self, local_package, macro_name):
local_package_macros = {}
# If the macro is explicitly prefixed with an internal namespace
# (e.g. 'dbt.some_macro'), look there first
if local_package in self.internal_package_names:
local_package_macros = self.internal_packages[local_package]
# If the macro is explicitly prefixed with a different package name
# (e.g. 'dbt_utils.some_macro'), look there first
if local_package not in self.internal_package_names and local_package in self.packages:
local_package_macros = self.packages[local_package]
# First: search the specified package for this macro
# First: search the local packages for this macro
if macro_name in local_package_macros:
return local_package_macros[macro_name]
# Now look up in the standard search order
@@ -187,7 +181,7 @@ class TestMacroNamespace:
elif package_name in self.macro_resolver.packages:
macro = self.macro_resolver.packages[package_name].get(name)
else:
raise PackageNotFoundForMacroError(package_name)
raise_compiler_error(f"Could not find package '{package_name}'")
if not macro:
return None
macro_func = MacroGenerator(macro, self.ctx, self.node, self.thread_ctx)

Some files were not shown because too many files have changed in this diff Show More