Compare commits

..

2 Commits

Author SHA1 Message Date
Emily Rockman
ad9aabca1d fix merge conflict leftovers 2022-11-29 09:25:42 -06:00
Emily Rockman
ea1304f8bb WIP
WIP

WIP

WIP

WIP

added data to exception

log all class props

WIP
2022-11-29 08:17:39 -06:00
881 changed files with 43453 additions and 86940 deletions

View File

@@ -1,19 +1,13 @@
[bumpversion]
current_version = 1.7.0a1
parse = (?P<major>[\d]+) # major version number
\.(?P<minor>[\d]+) # minor version number
\.(?P<patch>[\d]+) # patch version number
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
(?P<prekind>a|b|rc) # pre-release type
(?P<num>[\d]+) # pre-release version number
current_version = 1.4.0a1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
((?P<prekind>a|b|rc)
(?P<pre>\d+) # pre-release version num
)?
( # optional nightly release indicator
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
serialize =
{major}.{minor}.{patch}{prekind}{num}.{nightly}
{major}.{minor}.{patch}.{nightly}
{major}.{minor}.{patch}{prekind}{num}
{major}.{minor}.{patch}{prekind}{pre}
{major}.{minor}.{patch}
commit = False
tag = False
@@ -27,11 +21,9 @@ values =
rc
final
[bumpversion:part:num]
[bumpversion:part:pre]
first_value = 1
[bumpversion:part:nightly]
[bumpversion:file:core/setup.py]
[bumpversion:file:core/dbt/version.py]

View File

@@ -3,9 +3,6 @@
For information on prior major and minor releases, see their changelogs:
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)

View File

@@ -1,6 +0,0 @@
kind: Dependencies
body: Pin click<9 + sqlparse<0.5
time: 2023-07-19T12:37:43.716495+02:00
custom:
Author: jtcohen6
PR: "8146"

View File

@@ -0,0 +1,7 @@
kind: "Dependency"
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
time: 2022-09-23T00:06:46.00000Z
custom:
Author: dependabot[bot]
Issue: 4904
PR: 5917

View File

@@ -0,0 +1,7 @@
kind: "Dependency"
body: "Bump black from 22.8.0 to 22.10.0"
time: 2022-10-07T00:08:48.00000Z
custom:
Author: dependabot[bot]
Issue: 4904
PR: 6019

View File

@@ -0,0 +1,7 @@
kind: "Dependency"
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
time: 2022-10-26T00:09:10.00000Z
custom:
Author: dependabot[bot]
Issue: 4904
PR: 6144

View File

@@ -0,0 +1,7 @@
kind: Docs
body: minor doc correction
time: 2022-09-08T15:41:57.689162-04:00
custom:
Author: andy-clapson
Issue: "5791"
PR: "5684"

View File

@@ -0,0 +1,7 @@
kind: Docs
body: Generate API docs for new CLI interface
time: 2022-10-07T09:06:56.446078-05:00
custom:
Author: stu-k
Issue: "5528"
PR: "6022"

View File

@@ -0,0 +1,6 @@
kind: Docs
time: 2022-10-17T17:14:11.715348-05:00
custom:
Author: paulbenschmidt
Issue: "5880"
PR: "324"

View File

@@ -0,0 +1,7 @@
kind: Docs
body: Fix rendering of sample code for metrics
time: 2022-11-16T15:57:43.204201+01:00
custom:
Author: jtcohen6
Issue: "323"
PR: "346"

View File

@@ -1,6 +0,0 @@
kind: Docs
body: Fix for column tests not rendering on quoted columns
time: 2023-05-31T11:54:19.687363-04:00
custom:
Author: drewbanin
Issue: "201"

View File

@@ -1,6 +0,0 @@
kind: Docs
body: Remove static SQL codeblock for metrics
time: 2023-07-18T19:24:22.155323+02:00
custom:
Author: marcodamore
Issue: "436"

View File

@@ -0,0 +1,8 @@
kind: Features
body: Added favor-state flag to optionally favor state nodes even if unselected node
exists
time: 2022-04-08T16:54:59.696564+01:00
custom:
Author: daniel-murray josephberni
Issue: "2968"
PR: "5859"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Proto logging messages
time: 2022-08-17T15:48:57.225267-04:00
custom:
Author: gshank
Issue: "5610"
PR: "5643"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Friendlier error messages when packages.yml is malformed
time: 2022-09-12T12:59:35.121188+01:00
custom:
Author: jared-rimmer
Issue: "5486"
PR: "5812"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Migrate dbt-utils current_timestamp macros into core + adapters
time: 2022-09-14T09:56:25.97818-07:00
custom:
Author: colin-rogers-dbt
Issue: "5521"
PR: "5838"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Allow partitions in external tables to be supplied as a list
time: 2022-09-25T21:16:51.051239654+02:00
custom:
Author: pgoslatara
Issue: "5929"
PR: "5930"

View File

@@ -0,0 +1,7 @@
kind: Features
body: extend -f flag shorthand for seed command
time: 2022-10-03T11:07:05.381632-05:00
custom:
Author: dave-connors-3
Issue: "5990"
PR: "5991"

View File

@@ -0,0 +1,8 @@
kind: Features
body: This pulls the profile name from args when constructing a RuntimeConfig in lib.py,
enabling the dbt-server to override the value that's in the dbt_project.yml
time: 2022-11-02T15:00:03.000805-05:00
custom:
Author: racheldaniel
Issue: "6201"
PR: "6202"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Added an md5 function to the base context
time: 2022-11-14T18:52:07.788593+02:00
custom:
Author: haritamar
Issue: "6246"
PR: "6247"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Account for disabled flags on models in schema files more completely
time: 2022-09-16T10:48:54.162273-05:00
custom:
Author: emmyoop
Issue: "3992"
PR: "5868"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Add validation of enabled config for metrics, exposures and sources
time: 2022-10-10T11:32:18.752322-05:00
custom:
Author: emmyoop
Issue: "6030"
PR: "6038"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: check length of args of python model function before accessing it
time: 2022-10-11T16:07:15.464093-04:00
custom:
Author: chamini2
Issue: "6041"
PR: "6042"

View File

@@ -0,0 +1,8 @@
kind: Fixes
body: Add functors to ensure event types with str-type attributes are initialized
to spec, even when provided non-str type params.
time: 2022-10-16T17:37:42.846683-07:00
custom:
Author: versusfacit
Issue: "5436"
PR: "5874"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Allow hooks to fail without halting execution flow
time: 2022-11-07T09:53:14.340257-06:00
custom:
Author: ChenyuLInx
Issue: "5625"
PR: "6059"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Enable converting deprecation warnings to errors
time: 2023-07-18T12:55:18.03914-04:00
custom:
Author: michelleark
Issue: "8130"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Put black config in explicit config
time: 2022-09-27T19:42:59.241433-07:00
custom:
Author: max-sixty
Issue: "5946"
PR: "5947"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Added flat_graph attribute the Manifest class's deepcopy() coverage
time: 2022-09-29T13:44:06.275941-04:00
custom:
Author: peterallenwebb
Issue: "5809"
PR: "5975"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Add mypy configs so `mypy` passes from CLI
time: 2022-10-05T12:03:10.061263-07:00
custom:
Author: max-sixty
Issue: "5983"
PR: "5983"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Exception message cleanup.
time: 2022-10-07T09:46:27.682872-05:00
custom:
Author: emmyoop
Issue: "6023"
PR: "6024"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Add dmypy cache to gitignore
time: 2022-10-07T14:00:44.227644-07:00
custom:
Author: max-sixty
Issue: "6028"
PR: "5978"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Provide useful errors when the value of 'materialized' is invalid
time: 2022-10-13T18:19:12.167548-04:00
custom:
Author: peterallenwebb
Issue: "5229"
PR: "6025"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Fixed extra whitespace in strings introduced by black.
time: 2022-10-17T15:15:11.499246-05:00
custom:
Author: luke-bassett
Issue: "1350"
PR: "6086"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Clean up string formatting
time: 2022-10-17T15:58:44.676549-04:00
custom:
Author: eve-johns
Issue: "6068"
PR: "6082"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Remove the 'root_path' field from most nodes
time: 2022-10-28T10:48:37.687886-04:00
custom:
Author: gshank
Issue: "6171"
PR: "6172"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Combine certain logging events with different levels
time: 2022-10-28T11:03:44.887836-04:00
custom:
Author: gshank
Issue: "6173"
PR: "6174"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Convert threading tests to pytest
time: 2022-11-08T07:45:50.589147-06:00
custom:
Author: stu-k
Issue: "5942"
PR: "6226"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Convert postgres index tests to pytest
time: 2022-11-08T11:56:33.743042-06:00
custom:
Author: stu-k
Issue: "5770"
PR: "6228"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Convert use color tests to pytest
time: 2022-11-08T13:31:04.788547-06:00
custom:
Author: stu-k
Issue: "5771"
PR: "6230"

View File

@@ -4,70 +4,21 @@ headerPath: header.tpl.md
versionHeaderPath: ""
changelogPath: CHANGELOG.md
versionExt: md
envPrefix: "CHANGIE_"
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
kindFormat: '### {{.Kind}}'
changeFormat: |-
{{- $IssueList := list }}
{{- $changes := splitList " " $.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
kinds:
- label: Breaking Changes
- label: Features
- label: Fixes
- label: Docs
changeFormat: |-
{{- $IssueList := list }}
{{- $changes := splitList " " $.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
changeFormat: '- {{.Body}} ([dbt-docs/#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-docs/issues/{{.Custom.Issue}}), [dbt-docs/#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-docs/pull/{{.Custom.PR}}))'
- label: Under the Hood
- label: Dependencies
changeFormat: |-
{{- $PRList := list }}
{{- $changes := splitList " " $.Custom.PR }}
{{- range $pullrequest := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
{{- $PRList = append $PRList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
skipGlobalChoices: true
additionalChoices:
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: PR
label: GitHub Pull Request Number (separated by a single space if multiple)
type: string
minLength: 1
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
- label: Security
changeFormat: |-
{{- $PRList := list }}
{{- $changes := splitList " " $.Custom.PR }}
{{- range $pullrequest := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
{{- $PRList = append $PRList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
skipGlobalChoices: true
additionalChoices:
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: PR
label: GitHub Pull Request Number (separated by a single space if multiple)
type: string
minLength: 1
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
newlines:
afterChangelogHeader: 1
@@ -82,53 +33,42 @@ custom:
type: string
minLength: 3
- key: Issue
label: GitHub Issue Number (separated by a single space if multiple)
type: string
minLength: 1
label: GitHub Issue Number
type: int
minInt: 1
- key: PR
label: GitHub Pull Request Number
type: int
minInt: 1
footerFormat: |
{{- $contributorDict := dict }}
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
{{- $core_team := splitList " " .Env.CORE_TEAM }}
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
{{- range $team_member := $core_team }}
{{- $team_member_lower := lower $team_member }}
{{- $maintainers = append $maintainers $team_member_lower }}
{{- end }}
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
{{- range $change := .Changes }}
{{- $authorList := splitList " " $change.Custom.Author }}
{{- /* loop through all authors for a single changelog */}}
{{- /* loop through all authors for a PR */}}
{{- range $author := $authorList }}
{{- $authorLower := lower $author }}
{{- /* we only want to include non-core team contributors */}}
{{- if not (has $authorLower $maintainers)}}
{{- $changeList := splitList " " $change.Custom.Author }}
{{- $IssueList := list }}
{{- $changeLink := $change.Kind }}
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
{{- $changes := splitList " " $change.Custom.PR }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
{{- else }}
{{- $changes := splitList " " $change.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
{{- end }}
{{- /* check if this contributor has other changes associated with them already */}}
{{- if hasKey $contributorDict $author }}
{{- $contributionList := get $contributorDict $author }}
{{- $contributionList = concat $contributionList $IssueList }}
{{- $contributorDict := set $contributorDict $author $contributionList }}
{{- else }}
{{- $contributionList := $IssueList }}
{{- $contributorDict := set $contributorDict $author $contributionList }}
{{- end }}
{{- end}}
{{- if not (has $authorLower $core_team)}}
{{- /* Docs kind link back to dbt-docs instead of dbt-core PRs */}}
{{- $prLink := $change.Kind }}
{{- if eq $change.Kind "Docs" }}
{{- $prLink = "[dbt-docs/#pr](https://github.com/dbt-labs/dbt-docs/pull/pr)" | replace "pr" $change.Custom.PR }}
{{- else }}
{{- $prLink = "[#pr](https://github.com/dbt-labs/dbt-core/pull/pr)" | replace "pr" $change.Custom.PR }}
{{- end }}
{{- /* check if this contributor has other PRs associated with them already */}}
{{- if hasKey $contributorDict $author }}
{{- $prList := get $contributorDict $author }}
{{- $prList = append $prList $prLink }}
{{- $contributorDict := set $contributorDict $author $prList }}
{{- else }}
{{- $prList := list $prLink }}
{{- $contributorDict := set $contributorDict $author $prList }}
{{- end }}
{{- end}}
{{- end}}
{{- end }}
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}

View File

@@ -9,4 +9,4 @@ ignore =
E203 # makes Flake8 work like black
E741
E501 # long line checking is done in black
exclude = test/
exclude = test

6
.gitattributes vendored
View File

@@ -1,6 +0,0 @@
core/dbt/include/index.html binary
tests/functional/artifacts/data/state/*/manifest.json binary
core/dbt/docs/build/html/searchindex.js binary
core/dbt/docs/build/html/index.html binary
performance/runner/Cargo.lock binary
core/dbt/events/types_pb2.py binary

53
.github/CODEOWNERS vendored
View File

@@ -11,24 +11,44 @@
# As a default for areas with no assignment,
# the core team as a whole will be assigned
* @dbt-labs/core-team
* @dbt-labs/core
### OSS Tooling Guild
# Changes to GitHub configurations including Actions
/.github/ @leahwicz
/.github/ @dbt-labs/guild-oss-tooling
.bumpversion.cfg @dbt-labs/guild-oss-tooling
### LANGUAGE
.changie.yaml @dbt-labs/guild-oss-tooling
# Language core modules
/core/dbt/config/ @dbt-labs/core-language
/core/dbt/context/ @dbt-labs/core-language
/core/dbt/contracts/ @dbt-labs/core-language
/core/dbt/deps/ @dbt-labs/core-language
/core/dbt/events/ @dbt-labs/core-language # structured logging
/core/dbt/parser/ @dbt-labs/core-language
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
pytest.ini @dbt-labs/guild-oss-tooling
tox.ini @dbt-labs/guild-oss-tooling
# Language misc files
/core/dbt/dataclass_schema.py @dbt-labs/core-language
/core/dbt/hooks.py @dbt-labs/core-language
/core/dbt/node_types.py @dbt-labs/core-language
/core/dbt/semver.py @dbt-labs/core-language
### EXECUTION
# Execution core modules
/core/dbt/graph/ @dbt-labs/core-execution
/core/dbt/task/ @dbt-labs/core-execution
# Execution misc files
/core/dbt/compilation.py @dbt-labs/core-execution
/core/dbt/flags.py @dbt-labs/core-execution
/core/dbt/lib.py @dbt-labs/core-execution
/core/dbt/main.py @dbt-labs/core-execution
/core/dbt/profiler.py @dbt-labs/core-execution
/core/dbt/selected_resources.py @dbt-labs/core-execution
/core/dbt/tracking.py @dbt-labs/core-execution
/core/dbt/version.py @dbt-labs/core-execution
pyproject.toml @dbt-labs/guild-oss-tooling
requirements.txt @dbt-labs/guild-oss-tooling
dev_requirements.txt @dbt-labs/guild-oss-tooling
/core/setup.py @dbt-labs/guild-oss-tooling
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
### ADAPTERS
@@ -40,7 +60,6 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
# Postgres plugin
/plugins/ @dbt-labs/core-adapters
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
# Functional tests for adapter plugins
/tests/adapter @dbt-labs/core-adapters
@@ -52,9 +71,5 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
# Perf regression testing framework
# This excludes the test project files itself since those aren't specific
# framework changes (excluded by not setting an owner next to it- no owner)
/performance @nathaniel-may
/performance @nathaniel-may
/performance/projects
### ARTIFACTS
/schemas/dbt @dbt-labs/cloud-artifacts

22
.github/_README.md vendored
View File

@@ -63,12 +63,12 @@ permissions:
contents: read
pull-requests: write
```
### Secrets
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
@@ -105,7 +105,7 @@ Some triggers of note that we use:
```
# **what?**
# Describe what the action does.
# Describe what the action does.
# **why?**
# Why does this action exist?
@@ -138,7 +138,7 @@ Some triggers of note that we use:
id: fp
run: |
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
echo "::set-output name=FILEPATH::$FILEPATH"
```
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
@@ -158,14 +158,14 @@ Some triggers of note that we use:
echo "The build_script_path: ${{ inputs.build_script_path }}"
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
echo "The package_test_command: ${{ inputs.package_test_command }}"
# collect all the variables that need to be used in subsequent jobs
- name: Set Variables
id: variables
run: |
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
echo "::set-output name=important_path::'performance/runner/Cargo.toml'"
echo "::set-output name=release_id::${{github.event.inputs.release_id}}"
echo "::set-output name=open_prs::${{github.event.inputs.open_prs}}"
job2:
needs: [job1]
@@ -190,14 +190,14 @@ ___
### Actions from the Marketplace
- Dont use external actions for things that can easily be accomplished manually.
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and wont change under us) and clear as to whats actually happening. It also prevents any
- Pin actions _we don't control_ to tags.
- Pin actions _we don't control_ to tags.
### Connecting to AWS
- Authenticate with the aws managed workflow
```yaml
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -208,7 +208,7 @@ ___
```yaml
- name: Copy Artifacts from S3 via CLI
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
```
### Testing

View File

@@ -35,7 +35,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v1
- name: Wrangle latest tag
id: is_latest
uses: ./.github/actions/latest-wrangler

View File

@@ -13,7 +13,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v1
- name: Wrangle latest tag
id: is_latest
uses: ./.github/actions/latest-wrangler

View File

@@ -28,12 +28,11 @@ if __name__ == "__main__":
if package_request.status_code == 404:
if halt_on_missing:
sys.exit(1)
# everything is the latest if the package doesn't exist
github_output = os.environ.get("GITHUB_OUTPUT")
with open(github_output, "at", encoding="utf-8") as gh_output:
gh_output.write("latest=True")
gh_output.write("minor_latest=True")
sys.exit(0)
else:
# everything is the latest if the package doesn't exist
print(f"::set-output name=latest::{True}")
print(f"::set-output name=minor_latest::{True}")
sys.exit(0)
# TODO: verify package meta is "correct"
# https://github.com/dbt-labs/dbt-core/issues/4640
@@ -92,7 +91,5 @@ if __name__ == "__main__":
latest = is_latest(pre_rel, new_version, current_latest)
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
github_output = os.environ.get("GITHUB_OUTPUT")
with open(github_output, "at", encoding="utf-8") as gh_output:
gh_output.write(f"latest={latest}")
gh_output.write(f"minor_latest={minor_latest}")
print(f"::set-output name=latest::{latest}")
print(f"::set-output name=minor_latest::{minor_latest}")

View File

@@ -1,35 +1,23 @@
resolves #
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
resolves #
<!---
Include the number of the issue addressed by this PR above if applicable.
PRs for code changes without an associated issue *will not be merged*.
See CONTRIBUTING.md for more information.
Include the number of the docs issue that was opened for this PR. If
this change has no user-facing implications, "N/A" suffices instead. New
docs tickets can be created by clicking the link above or by going to
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
-->
### Problem
### Description
<!---
Describe the problem this PR is solving. What is the application state
before this PR is merged?
-->
### Solution
<!---
Describe the way this PR solves the above problem. Add as much detail as you
can to help reviewers understand your changes. Include any alternatives and
tradeoffs you considered.
Describe the Pull Request here. Add any references and info to help reviewers
understand your changes. Include any tradeoffs you considered.
-->
### Checklist
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
- [ ] I have run this code in development and it appears to resolve the stated issue
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
- [ ] I have run this code in development and it appears to resolve the stated issue
- [ ] This PR includes tests, or tests are not required/relevant for this PR
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
- [ ] I have [opened an issue to add/update docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose), or docs changes are not required/relevant for this PR
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-a-changelog-entry)

View File

@@ -35,6 +35,6 @@ jobs:
github.event.pull_request.merged
&& contains(github.event.label.name, 'backport')
steps:
- uses: tibdex/backport@v2.0.3
- uses: tibdex/backport@v2.0.2
with:
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -40,7 +40,7 @@ jobs:
matrix:
include:
- label: "dependencies"
changie_kind: "Dependencies"
changie_kind: "Dependency"
- label: "snyk"
changie_kind: "Security"
runs-on: ubuntu-latest
@@ -50,7 +50,7 @@ jobs:
- name: Create and commit changelog on bot PR
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
id: bot_changelog
uses: emmyoop/changie_bot@v1.1.0
uses: emmyoop/changie_bot@v1.0.1
with:
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
commit_author_name: "Github Build Bot"
@@ -58,4 +58,4 @@ jobs:
commit_message: "Add automated changelog yaml from template for bot PR"
changie_kind: ${{ matrix.changie_kind }}
label: ${{ matrix.label }}
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: 4904\n PR: ${{ github.event.pull_request.number }}"

View File

@@ -1,41 +0,0 @@
# **what?**
# Cuts a new `*.latest` branch
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
# `main` and bumps `main` to the input version.
# **why?**
# Generally reduces the workload of engineers and reduces error. Allow automation.
# **when?**
# This will run when called manually.
name: Cut new release branch
on:
workflow_dispatch:
inputs:
version_to_bump_main:
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
required: true
new_branch_name:
description: 'The full name of the new branch (ex. 1.5.latest)'
required: true
defaults:
run:
shell: bash
permissions:
contents: write
jobs:
cut_branch:
name: "Cut branch and clean up main for dbt-core"
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
with:
version_to_bump_main: ${{ inputs.version_to_bump_main }}
new_branch_name: ${{ inputs.new_branch_name }}
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
secrets:
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}

View File

@@ -18,8 +18,8 @@ permissions:
issues: write
jobs:
call-creation-action:
uses: dbt-labs/actions/.github/workflows/jira-creation-actions.yml@main
call-label-action:
uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}

View File

@@ -19,7 +19,7 @@ permissions:
jobs:
call-label-action:
uses: dbt-labs/actions/.github/workflows/jira-label-actions.yml@main
uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}

View File

@@ -19,8 +19,8 @@ on:
permissions: read-all
jobs:
call-transition-action:
uses: dbt-labs/actions/.github/workflows/jira-transition-actions.yml@main
call-label-action:
uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}

View File

@@ -42,10 +42,10 @@ jobs:
steps:
- name: Check out the repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v4.3.0
with:
python-version: '3.8'
@@ -53,8 +53,12 @@ jobs:
run: |
python -m pip install --user --upgrade pip
python -m pip --version
make dev
python -m pip install pre-commit
pre-commit --version
python -m pip install mypy==0.942
mypy --version
python -m pip install -r requirements.txt
python -m pip install -r dev-requirements.txt
dbt --version
- name: Run pre-commit hooks
@@ -69,17 +73,18 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
python-version: ["3.7", "3.8", "3.9", "3.10"]
env:
TOXENV: "unit"
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
steps:
- name: Check out the repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
uses: actions/setup-python@v4.3.0
with:
python-version: ${{ matrix.python-version }}
@@ -96,27 +101,25 @@ jobs:
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
- name: Upload Unit Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- uses: actions/upload-artifact@v2
if: always()
with:
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
path: unit_results.csv
integration:
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}
timeout-minutes: 60
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
os: [ubuntu-20.04]
python-version: ["3.7", "3.8", "3.9", "3.10"]
os: [ubuntu-latest]
include:
- python-version: 3.8
os: windows-latest
@@ -125,22 +128,18 @@ jobs:
env:
TOXENV: integration
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2
DBT_TEST_USER_3: dbt_test_user_3
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: ${{ github.event.repository.name }}
steps:
- name: Check out the repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
uses: actions/setup-python@v4.3.0
with:
python-version: ${{ matrix.python-version }}
@@ -164,26 +163,24 @@ jobs:
tox --version
- name: Run tests
run: tox -- --ddtrace
run: tox
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v2
if: always()
with:
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
path: ./logs
- name: Upload Integration Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- uses: actions/upload-artifact@v2
if: always()
with:
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
path: integration_results.csv
build:
name: build packages
@@ -192,10 +189,10 @@ jobs:
steps:
- name: Check out the repository
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v4.3.0
with:
python-version: '3.8'

View File

@@ -1,265 +0,0 @@
# **what?**
# This workflow models the performance characteristics of a point in time in dbt.
# It runs specific dbt commands on committed projects multiple times to create and
# commit information about the distribution to the current branch. For more information
# see the readme in the performance module at /performance/README.md.
#
# **why?**
# When developing new features, we can take quick performance samples and compare
# them against the commited baseline measurements produced by this workflow to detect
# some performance regressions at development time before they reach users.
#
# **when?**
# This is only run once directly after each release (for non-prereleases). If for some
# reason the results of a run are not satisfactory, it can also be triggered manually.
name: Model Performance Characteristics
on:
# runs after non-prereleases are published.
release:
types: [released]
# run manually from the actions tab
workflow_dispatch:
inputs:
release_id:
description: 'dbt version to model (must be non-prerelease in Pypi)'
type: string
required: true
env:
RUNNER_CACHE_PATH: performance/runner/target/release/runner
# both jobs need to write
permissions:
contents: write
pull-requests: write
jobs:
set-variables:
name: Setting Variables
runs-on: ubuntu-latest
outputs:
cache_key: ${{ steps.variables.outputs.cache_key }}
release_id: ${{ steps.semver.outputs.base-version }}
release_branch: ${{ steps.variables.outputs.release_branch }}
steps:
# explicitly checkout the performance runner from main regardless of which
# version we are modeling.
- name: Checkout
uses: actions/checkout@v3
with:
ref: main
- name: Parse version into parts
id: semver
uses: dbt-labs/actions/parse-semver@v1
with:
version: ${{ github.event.inputs.release_id || github.event.release.tag_name }}
# collect all the variables that need to be used in subsequent jobs
- name: Set variables
id: variables
run: |
# create a cache key that will be used in the next job. without this the
# next job would have to checkout from main and hash the files itself.
echo "cache_key=${{ runner.os }}-${{ hashFiles('performance/runner/Cargo.toml')}}-${{ hashFiles('performance/runner/src/*') }}" >> $GITHUB_OUTPUT
branch_name="${{steps.semver.outputs.major}}.${{steps.semver.outputs.minor}}.latest"
echo "release_branch=$branch_name" >> $GITHUB_OUTPUT
echo "release branch is inferred to be ${branch_name}"
latest-runner:
name: Build or Fetch Runner
runs-on: ubuntu-latest
needs: [set-variables]
env:
RUSTFLAGS: "-D warnings"
steps:
- name: '[DEBUG] print variables'
run: |
echo "all variables defined in set-variables"
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
# explicitly checkout the performance runner from main regardless of which
# version we are modeling.
- name: Checkout
uses: actions/checkout@v3
with:
ref: main
# attempts to access a previously cached runner
- uses: actions/cache@v3
id: cache
with:
path: ${{ env.RUNNER_CACHE_PATH }}
key: ${{ needs.set-variables.outputs.cache_key }}
- name: Fetch Rust Toolchain
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Add fmt
if: steps.cache.outputs.cache-hit != 'true'
run: rustup component add rustfmt
- name: Cargo fmt
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/cargo@v1
with:
command: fmt
args: --manifest-path performance/runner/Cargo.toml --all -- --check
- name: Test
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/cargo@v1
with:
command: test
args: --manifest-path performance/runner/Cargo.toml
- name: Build (optimized)
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/cargo@v1
with:
command: build
args: --release --manifest-path performance/runner/Cargo.toml
# the cache action automatically caches this binary at the end of the job
model:
# depends on `latest-runner` as a separate job so that failures in this job do not prevent
# a successfully tested and built binary from being cached.
needs: [set-variables, latest-runner]
name: Model a release
runs-on: ubuntu-latest
steps:
- name: '[DEBUG] print variables'
run: |
echo "all variables defined in set-variables"
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: "3.8"
- name: Install dbt
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
- name: Install Hyperfine
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
# explicitly checkout main to get the latest project definitions
- name: Checkout
uses: actions/checkout@v3
with:
ref: main
# this was built in the previous job so it will be there.
- name: Fetch Runner
uses: actions/cache@v3
id: cache
with:
path: ${{ env.RUNNER_CACHE_PATH }}
key: ${{ needs.set-variables.outputs.cache_key }}
- name: Move Runner
run: mv performance/runner/target/release/runner performance/app
- name: Change Runner Permissions
run: chmod +x ./performance/app
- name: '[DEBUG] ls baseline directory before run'
run: ls -R performance/baselines/
# `${{ github.workspace }}` is used to pass the absolute path
- name: Create directories
run: |
mkdir ${{ github.workspace }}/performance/tmp/
mkdir -p performance/baselines/${{ needs.set-variables.outputs.release_id }}/
# Run modeling with taking 20 samples
- name: Run Measurement
run: |
performance/app model -v ${{ needs.set-variables.outputs.release_id }} -b ${{ github.workspace }}/performance/baselines/ -p ${{ github.workspace }}/performance/projects/ -t ${{ github.workspace }}/performance/tmp/ -n 20
- name: '[DEBUG] ls baseline directory after run'
run: ls -R performance/baselines/
- uses: actions/upload-artifact@v3
with:
name: baseline
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
create-pr:
name: Open PR for ${{ matrix.base-branch }}
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
# i.e. release branch and main
needs: [set-variables, latest-runner, model]
runs-on: ubuntu-latest
strategy:
matrix:
include:
- base-branch: refs/heads/main
target-branch: performance-bot/main_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
- base-branch: refs/heads/${{ needs.set-variables.outputs.release_branch }}
target-branch: performance-bot/release_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
steps:
- name: '[DEBUG] print variables'
run: |
echo "all variables defined in set-variables"
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ matrix.base-branch }}
- name: Create PR branch
run: |
git checkout -b ${{ matrix.target-branch }}
git push origin ${{ matrix.target-branch }}
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
- uses: actions/download-artifact@v3
with:
name: baseline
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
- name: '[DEBUG] ls baselines after artifact download'
run: ls -R performance/baselines/
- name: Commit baseline
uses: EndBug/add-and-commit@v9
with:
add: 'performance/baselines/*'
author_name: 'Github Build Bot'
author_email: 'buildbot@fishtownanalytics.com'
message: 'adding performance baseline for ${{ needs.set-variables.outputs.release_id }}'
push: 'origin origin/${{ matrix.target-branch }}'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
with:
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
base: ${{ matrix.base-branch }}
branch: '${{ matrix.target-branch }}'
title: 'Adding performance modeling for ${{needs.set-variables.outputs.release_id}} to ${{ matrix.base-branch }}'
body: 'Committing perf results for tracking for the ${{needs.set-variables.outputs.release_id}}'
labels: |
Skip Changelog
Performance

View File

@@ -1,109 +0,0 @@
# **what?**
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
# - generate and validate data for night release (commit SHA, version number, release branch);
# - pass data to release workflow;
# - night release will be pushed to GitHub as a draft release;
# - night build will be pushed to test PyPI;
#
# **why?**
# Ensure an automated and tested release process for nightly builds
#
# **when?**
# This workflow runs on schedule or can be run manually on demand.
name: Nightly Test Release to GitHub and PyPI
on:
workflow_dispatch: # for manual triggering
schedule:
- cron: 0 9 * * *
permissions:
contents: write # this is the permission that allows creating a new release
defaults:
run:
shell: bash
env:
RELEASE_BRANCH: "main"
jobs:
aggregate-release-data:
runs-on: ubuntu-latest
outputs:
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
version_number: ${{ steps.nightly-release-version.outputs.number }}
release_branch: ${{ steps.release-branch.outputs.name }}
steps:
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
uses: actions/checkout@v3
with:
ref: ${{ env.RELEASE_BRANCH }}
- name: "Resolve Commit To Release"
id: resolve-commit-sha
run: |
commit_sha=$(git rev-parse HEAD)
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
- name: "Get Current Version Number"
id: version-number-sources
run: |
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
echo "current_version=$current_version" >> $GITHUB_OUTPUT
- name: "Audit Version And Parse Into Parts"
id: semver
uses: dbt-labs/actions/parse-semver@v1.1.0
with:
version: ${{ steps.version-number-sources.outputs.current_version }}
- name: "Get Current Date"
id: current-date
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
- name: "Generate Nightly Release Version Number"
id: nightly-release-version
run: |
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
echo "number=$number" >> $GITHUB_OUTPUT
- name: "Audit Nightly Release Version And Parse Into Parts"
uses: dbt-labs/actions/parse-semver@v1.1.0
with:
version: ${{ steps.nightly-release-version.outputs.number }}
- name: "Set Release Branch"
id: release-branch
run: |
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
log-outputs-aggregate-release-data:
runs-on: ubuntu-latest
needs: [aggregate-release-data]
steps:
- name: "[DEBUG] Log Outputs"
run: |
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
release-github-pypi:
needs: [aggregate-release-data]
uses: ./.github/workflows/release.yml
with:
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
build_script_path: "scripts/build-dist.sh"
env_setup_script_path: "scripts/env-setup.sh"
s3_bucket_name: "core-team-artifacts"
package_test_command: "dbt --version"
test_run: true
nightly_release: true
secrets: inherit

View File

@@ -1,7 +1,11 @@
# **what?**
# The purpose of this workflow is to trigger CI to run for each
# release branch and main branch on a regular cadence. If the CI workflow
# fails for a branch, it will post to #dev-core-alerts to raise awareness.
# fails for a branch, it will post to dev-core-alerts to raise awareness.
# The 'aurelien-baudet/workflow-dispatch' Action triggers the existing
# CI worklow file on the given branch to run so that even if we change the
# CI workflow file in the future, the one that is tailored for the given
# release branch will be used.
# **why?**
# Ensures release branches and main are always shippable and not broken.
@@ -24,8 +28,35 @@ on:
permissions: read-all
jobs:
run_tests:
uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main
with:
workflows_to_run: '["main.yml"]'
secrets: inherit
kick-off-ci:
name: Kick-off CI
runs-on: ubuntu-latest
strategy:
# must run CI 1 branch at a time b/c the workflow-dispatch Action polls for
# latest run for results and it gets confused when we kick off multiple runs
# at once. There is a race condition so we will just run in sequential order.
max-parallel: 1
fail-fast: false
matrix:
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
steps:
- name: Call CI workflow for ${{ matrix.branch }} branch
id: trigger-step
uses: aurelien-baudet/workflow-dispatch@v2.1.1
with:
workflow: main.yml
ref: ${{ matrix.branch }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: Post failure to Slack
uses: ravsamhq/notify-slack-action@v1
if: ${{ always() && !contains(steps.trigger-step.outputs.workflow-conclusion,'success') }}
with:
status: ${{ job.status }}
notification_title: 'dbt-core scheduled run of "${{ matrix.branch }}" branch not successful'
message_format: ':x: CI on branch "${{ matrix.branch }}" ${{ steps.trigger-step.outputs.workflow-conclusion }}'
footer: 'Linked failed CI run ${{ steps.trigger-step.outputs.workflow-url }}'
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}

View File

@@ -36,14 +36,14 @@ jobs:
latest: ${{ steps.latest.outputs.latest }}
minor_latest: ${{ steps.latest.outputs.minor_latest }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v1
- name: Split version
id: version
run: |
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
echo "major=$MAJOR" >> $GITHUB_OUTPUT
echo "minor=$MINOR" >> $GITHUB_OUTPUT
echo "patch=$PATCH" >> $GITHUB_OUTPUT
echo "::set-output name=major::$MAJOR"
echo "::set-output name=minor::$MINOR"
echo "::set-output name=patch::$PATCH"
- name: Is pkg 'latest'
id: latest
@@ -60,7 +60,7 @@ jobs:
needs: [get_version_meta]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v1
build_and_push:
name: Build images and push to GHCR
@@ -70,20 +70,18 @@ jobs:
- name: Get docker build arg
id: build_arg
run: |
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
- name: Log in to the GHCR
uses: docker/login-action@v2
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push MAJOR.MINOR.PATCH tag
uses: docker/build-push-action@v4
uses: docker/build-push-action@v2
with:
file: docker/Dockerfile
push: True
@@ -94,7 +92,7 @@ jobs:
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
- name: Build and push MINOR.latest tag
uses: docker/build-push-action@v4
uses: docker/build-push-action@v2
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
with:
file: docker/Dockerfile
@@ -106,7 +104,7 @@ jobs:
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
- name: Build and push latest tag
uses: docker/build-push-action@v4
uses: docker/build-push-action@v2
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
with:
file: docker/Dockerfile

View File

@@ -1,110 +1,24 @@
# **what?**
# Release workflow provides the following steps:
# - checkout the given commit;
# - validate version in sources and changelog file for given version;
# - bump the version and generate a changelog if needed;
# - merge all changes to the target branch if needed;
# - run unit and integration tests against given commit;
# - build and package that SHA;
# - release it to GitHub and PyPI with that specific build;
#
# Take the given commit, run unit tests specifically on that sha, build and
# package it, and then release to GitHub and PyPi with that specific build
# **why?**
# Ensure an automated and tested release process
#
# **when?**
# This workflow can be run manually on demand or can be called by other workflows
name: Release to GitHub and PyPI
# **when?**
# This will only run manually with a given sha and version
name: Release to GitHub and PyPi
on:
workflow_dispatch:
inputs:
sha:
description: "The last commit sha in the release"
type: string
required: true
target_branch:
description: "The branch to release from"
type: string
required: true
description: 'The last commit sha in the release'
required: true
version_number:
description: "The release version number (i.e. 1.0.0b1)"
type: string
required: true
build_script_path:
description: "Build script path"
type: string
default: "scripts/build-dist.sh"
required: true
env_setup_script_path:
description: "Environment setup script path"
type: string
default: "scripts/env-setup.sh"
required: false
s3_bucket_name:
description: "AWS S3 bucket name"
type: string
default: "core-team-artifacts"
required: true
package_test_command:
description: "Package test command"
type: string
default: "dbt --version"
required: true
test_run:
description: "Test run (Publish release as draft)"
type: boolean
default: true
required: false
nightly_release:
description: "Nightly release to dev environment"
type: boolean
default: false
required: false
workflow_call:
inputs:
sha:
description: "The last commit sha in the release"
type: string
required: true
target_branch:
description: "The branch to release from"
type: string
required: true
version_number:
description: "The release version number (i.e. 1.0.0b1)"
type: string
required: true
build_script_path:
description: "Build script path"
type: string
default: "scripts/build-dist.sh"
required: true
env_setup_script_path:
description: "Environment setup script path"
type: string
default: "scripts/env-setup.sh"
required: false
s3_bucket_name:
description: "AWS S3 bucket name"
type: string
default: "core-team-artifacts"
required: true
package_test_command:
description: "Package test command"
type: string
default: "dbt --version"
required: true
test_run:
description: "Test run (Publish release as draft)"
type: boolean
default: true
required: false
nightly_release:
description: "Nightly release to dev environment"
type: boolean
default: false
required: false
description: 'The release version number (i.e. 1.0.0b1)'
required: true
permissions:
contents: write # this is the permission that allows creating a new release
@@ -114,116 +28,175 @@ defaults:
shell: bash
jobs:
log-inputs:
name: Log Inputs
unit:
name: Unit test
runs-on: ubuntu-latest
env:
TOXENV: "unit"
steps:
- name: "[DEBUG] Print Variables"
- name: Check out the repository
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.inputs.sha }}
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install python dependencies
run: |
echo The last commit sha in the release: ${{ inputs.sha }}
echo The branch to release from: ${{ inputs.target_branch }}
echo The release version number: ${{ inputs.version_number }}
echo Build script path: ${{ inputs.build_script_path }}
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
echo Package test command: ${{ inputs.package_test_command }}
echo Test run: ${{ inputs.test_run }}
echo Nightly release: ${{ inputs.nightly_release }}
pip install --user --upgrade pip
pip install tox
pip --version
tox --version
bump-version-generate-changelog:
name: Bump package version, Generate changelog
- name: Run tox
run: tox
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
with:
sha: ${{ inputs.sha }}
version_number: ${{ inputs.version_number }}
target_branch: ${{ inputs.target_branch }}
env_setup_script_path: ${{ inputs.env_setup_script_path }}
test_run: ${{ inputs.test_run }}
nightly_release: ${{ inputs.nightly_release }}
secrets: inherit
log-outputs-bump-version-generate-changelog:
name: "[Log output] Bump package version, Generate changelog"
if: ${{ !failure() && !cancelled() }}
needs: [bump-version-generate-changelog]
build:
name: build packages
runs-on: ubuntu-latest
steps:
- name: Print variables
- name: Check out the repository
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.inputs.sha }}
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install python dependencies
run: |
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
pip install --user --upgrade pip
pip install --upgrade setuptools wheel twine check-wheel-contents
pip --version
build-test-package:
name: Build, Test, Package
if: ${{ !failure() && !cancelled() }}
needs: [bump-version-generate-changelog]
- name: Build distributions
run: ./scripts/build-dist.sh
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
- name: Show distributions
run: ls -lh dist/
with:
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
version_number: ${{ inputs.version_number }}
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
build_script_path: ${{ inputs.build_script_path }}
s3_bucket_name: ${{ inputs.s3_bucket_name }}
package_test_command: ${{ inputs.package_test_command }}
test_run: ${{ inputs.test_run }}
nightly_release: ${{ inputs.nightly_release }}
- name: Check distribution descriptions
run: |
twine check dist/*
secrets:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Check wheel contents
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- uses: actions/upload-artifact@v2
with:
name: dist
path: |
dist/
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
test-build:
name: verify packages
needs: [build, unit]
runs-on: ubuntu-latest
steps:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install python dependencies
run: |
pip install --user --upgrade pip
pip install --upgrade wheel
pip --version
- uses: actions/download-artifact@v2
with:
name: dist
path: dist/
- name: Show distributions
run: ls -lh dist/
- name: Install wheel distributions
run: |
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
- name: Check wheel distributions
run: |
dbt --version
- name: Install source distributions
run: |
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
- name: Check source distributions
run: |
dbt --version
github-release:
name: GitHub Release
if: ${{ !failure() && !cancelled() }}
needs: [bump-version-generate-changelog, build-test-package]
needs: test-build
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
runs-on: ubuntu-latest
with:
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
version_number: ${{ inputs.version_number }}
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
test_run: ${{ inputs.test_run }}
steps:
- uses: actions/download-artifact@v2
with:
name: dist
path: '.'
# Need to set an output variable because env variables can't be taken as input
# This is needed for the next step with releasing to GitHub
- name: Find release type
id: release_type
env:
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
run: |
echo ::set-output name=isPrerelease::$IS_PRERELEASE
- name: Creating GitHub Release
uses: softprops/action-gh-release@v1
with:
name: dbt-core v${{github.event.inputs.version_number}}
tag_name: v${{github.event.inputs.version_number}}
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
target_commitish: ${{github.event.inputs.sha}}
body: |
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
files: |
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
dbt-core-${{github.event.inputs.version_number}}.tar.gz
pypi-release:
name: PyPI Release
name: Pypi release
needs: [github-release]
runs-on: ubuntu-latest
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
needs: github-release
with:
version_number: ${{ inputs.version_number }}
test_run: ${{ inputs.test_run }}
environment: PypiProd
steps:
- uses: actions/download-artifact@v2
with:
name: dist
path: 'dist'
secrets:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
slack-notification:
name: Slack Notification
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
needs:
[
bump-version-generate-changelog,
build-test-package,
github-release,
pypi-release,
]
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
with:
status: "failure"
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
- name: Publish distribution to PyPI
uses: pypa/gh-action-pypi-publish@v1.4.2
with:
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@@ -37,17 +37,17 @@ jobs:
steps:
- name: Set up Python
uses: actions/setup-python@v4
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Checkout dbt repo
uses: actions/checkout@v3
uses: actions/checkout@v2.3.4
with:
path: ${{ env.DBT_REPO_DIRECTORY }}
- name: Checkout schemas.getdbt.com repo
uses: actions/checkout@v3
uses: actions/checkout@v2.3.4
with:
repository: dbt-labs/schemas.getdbt.com
ref: 'main'
@@ -83,7 +83,7 @@ jobs:
fi
- name: Upload schema diff
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v2.2.4
if: ${{ failure() }}
with:
name: 'schema_schanges.txt'

View File

@@ -9,4 +9,13 @@ permissions:
jobs:
stale:
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
runs-on: ubuntu-latest
steps:
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
with:
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
# mark issues/PRs stale when they haven't seen activity in 180 days
days-before-stale: 180

View File

@@ -22,7 +22,7 @@ jobs:
# run the performance measurements on the current or default branch
test-schema:
name: Test Log Schema
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
env:
# turns warnings into errors
RUSTFLAGS: "-D warnings"
@@ -30,8 +30,6 @@ jobs:
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
# tells integration tests to output into json format
DBT_LOG_FORMAT: "json"
# tell eventmgr to convert logging events into bytes
DBT_TEST_BINARY_SERIALIZATION: "true"
# Additional test users
DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2
@@ -39,12 +37,12 @@ jobs:
steps:
- name: checkout dev
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Setup Python
uses: actions/setup-python@v4
uses: actions/setup-python@v2.2.2
with:
python-version: "3.8"

View File

@@ -1,155 +0,0 @@
# **what?**
# This workflow will test all test(s) at the input path given number of times to determine if it's flaky or not. You can test with any supported OS/Python combination.
# This is batched in 10 to allow more test iterations faster.
# **why?**
# Testing if a test is flaky and if a previously flaky test has been fixed. This allows easy testing on supported python versions and OS combinations.
# **when?**
# This is triggered manually from dbt-core.
name: Flaky Tester
on:
workflow_dispatch:
inputs:
branch:
description: 'Branch to check out'
type: string
required: true
default: 'main'
test_path:
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
type: string
required: true
default: 'tests/functional/...'
python_version:
description: 'Version of Python to Test Against'
type: choice
options:
- '3.8'
- '3.9'
- '3.10'
- '3.11'
os:
description: 'OS to run test in'
type: choice
options:
- 'ubuntu-latest'
- 'macos-latest'
- 'windows-latest'
num_runs_per_batch:
description: 'Max number of times to run the test per batch. We always run 10 batches.'
type: number
required: true
default: '50'
permissions: read-all
defaults:
run:
shell: bash
jobs:
debug:
runs-on: ubuntu-latest
steps:
- name: "[DEBUG] Output Inputs"
run: |
echo "Branch: ${{ inputs.branch }}"
echo "test_path: ${{ inputs.test_path }}"
echo "python_version: ${{ inputs.python_version }}"
echo "os: ${{ inputs.os }}"
echo "num_runs_per_batch: ${{ inputs.num_runs_per_batch }}"
pytest:
runs-on: ${{ inputs.os }}
strategy:
# run all batches, even if one fails. This informs how flaky the test may be.
fail-fast: false
# using a matrix to speed up the jobs since the matrix will run in parallel when runners are available
matrix:
batch: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
env:
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2
DBT_TEST_USER_3: dbt_test_user_3
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: ${{ github.event.repository.name }}
steps:
- name: "Checkout code"
uses: actions/checkout@v3
with:
ref: ${{ inputs.branch }}
- name: "Setup Python"
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python_version }}"
- name: "Setup Dev Environment"
run: make dev
- name: "Set up postgres (linux)"
if: inputs.os == 'ubuntu-latest'
run: make setup-db
# mac and windows don't use make due to limitations with docker with those runners in GitHub
- name: "Set up postgres (macos)"
if: inputs.os == 'macos-latest'
uses: ./.github/actions/setup-postgres-macos
- name: "Set up postgres (windows)"
if: inputs.os == 'windows-latest'
uses: ./.github/actions/setup-postgres-windows
- name: "Test Command"
id: command
run: |
test_command="python -m pytest ${{ inputs.test_path }}"
echo "test_command=$test_command" >> $GITHUB_OUTPUT
- name: "Run test ${{ inputs.num_runs_per_batch }} times"
id: pytest
run: |
set +e
for ((i=1; i<=${{ inputs.num_runs_per_batch }}; i++))
do
echo "Running pytest iteration $i..."
python -m pytest --ddtrace ${{ inputs.test_path }}
exit_code=$?
if [[ $exit_code -eq 0 ]]; then
success=$((success + 1))
echo "Iteration $i: Success"
else
failure=$((failure + 1))
echo "Iteration $i: Failure"
fi
echo
echo "==========================="
echo "Successful runs: $success"
echo "Failed runs: $failure"
echo "==========================="
echo
done
echo "failure=$failure" >> $GITHUB_OUTPUT
- name: "Success and Failure Summary: ${{ inputs.os }}/Python ${{ inputs.python_version }}"
run: |
echo "Batch: ${{ matrix.batch }}"
echo "Successful runs: ${{ steps.pytest.outputs.success }}"
echo "Failed runs: ${{ steps.pytest.outputs.failure }}"
- name: "Error for Failures"
if: ${{ steps.pytest.outputs.failure }}
run: |
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
exit 1

View File

@@ -24,8 +24,10 @@ permissions:
jobs:
triage_label:
if: contains(github.event.issue.labels.*.name, 'awaiting_response')
uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main
with:
add_label: "triage"
remove_label: "awaiting_response"
secrets: inherit
runs-on: ubuntu-latest
steps:
- name: initial labeling
uses: andymckay/labeler@master
with:
add-labels: "triage"
remove-labels: "awaiting_response"

View File

@@ -20,9 +20,106 @@ on:
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
required: true
permissions:
contents: write
pull-requests: write
jobs:
version_bump_and_changie:
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
with:
version_number: ${{ inputs.version_number }}
secrets: inherit # ok since what we are calling is internally maintained
bump:
runs-on: ubuntu-latest
steps:
- name: "[DEBUG] Print Variables"
run: |
echo "all variables defined as inputs"
echo The version_number: ${{ github.event.inputs.version_number }}
- name: Check out the repository
uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.8"
- name: Install python dependencies
run: |
python3 -m venv env
source env/bin/activate
pip install --upgrade pip
- name: Add Homebrew to PATH
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- name: Install Homebrew packages
run: |
brew install pre-commit
brew tap miniscruff/changie https://github.com/miniscruff/changie
brew install changie
- name: Audit Version and Parse Into Parts
id: semver
uses: dbt-labs/actions/parse-semver@v1
with:
version: ${{ github.event.inputs.version_number }}
- name: Set branch value
id: variables
run: |
echo "::set-output name=BRANCH_NAME::prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID"
- name: Create PR branch
run: |
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
git push origin ${{ steps.variables.outputs.BRANCH_NAME }}
git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}
- name: Bump version
run: |
source env/bin/activate
pip install -r dev-requirements.txt
env/bin/bumpversion --allow-dirty --new-version ${{ github.event.inputs.version_number }} major
git status
- name: Run changie
run: |
if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
then
changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'
else
changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases
fi
changie merge
git status
# this step will fail on whitespace errors but also correct them
- name: Remove trailing whitespace
continue-on-error: true
run: |
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/*
git status
# this step will fail on newline errors but also correct them
- name: Removing extra newlines
continue-on-error: true
run: |
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/*
git status
- name: Commit version bump to branch
uses: EndBug/add-and-commit@v7
with:
author_name: 'Github Build Bot'
author_email: 'buildbot@fishtownanalytics.com'
message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v3
with:
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
base: ${{github.ref}}
title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
labels: |
Skip Changelog

5
.gitignore vendored
View File

@@ -11,8 +11,6 @@ __pycache__/
env*/
dbt_env/
build/
!tests/functional/build
!core/dbt/docs/build
develop-eggs/
dist/
downloads/
@@ -29,8 +27,6 @@ var/
.mypy_cache/
.dmypy.json
logs/
.user.yml
profiles.yml
# PyInstaller
# Usually these files are written by a python script from a template
@@ -54,7 +50,6 @@ coverage.xml
*,cover
.hypothesis/
test.env
makefile.test.env
*.pytest_cache/

View File

@@ -1,7 +1,8 @@
# Configuration for pre-commit hooks (see https://pre-commit.com/).
# Eventually the hooks described here will be run as tests before merging each PR.
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
# TODO: remove global exclusion of tests when testing overhaul is complete
exclude: ^test/
# Force all unspecified python hooks to run python 3.8
default_language_version:
@@ -37,7 +38,7 @@ repos:
alias: flake8-check
stages: [manual]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.3.0
rev: v0.942
hooks:
- id: mypy
# N.B.: Mypy is... a bit fragile.

View File

@@ -5,14 +5,12 @@
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
## Previous Releases
For information on prior major and minor releases, see their changelogs:
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)

View File

@@ -5,10 +5,10 @@
1. [About this document](#about-this-document)
2. [Getting the code](#getting-the-code)
3. [Setting up an environment](#setting-up-an-environment)
4. [Running dbt-core in development](#running-dbt-core-in-development)
4. [Running `dbt` in development](#running-dbt-core-in-development)
5. [Testing dbt-core](#testing)
6. [Debugging](#debugging)
7. [Adding or modifying a changelog entry](#adding-or-modifying-a-changelog-entry)
7. [Adding a changelog entry](#adding-a-changelog-entry)
8. [Submitting a Pull Request](#submitting-a-pull-request)
## About this document
@@ -56,7 +56,7 @@ There are some tools that will be helpful to you in developing locally. While th
These are the tools used in `dbt-core` development and testing:
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.8, 3.9, 3.10 and 3.11
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
- [`black`](https://github.com/psf/black) for code formatting
@@ -96,15 +96,12 @@ brew install postgresql
### Installation
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
```sh
make dev
```
or, alternatively:
```sh
# or
pip install -r dev-requirements.txt -r editable-requirements.txt
pre-commit install
```
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
@@ -113,7 +110,7 @@ When installed in this way, any changes you make to your local copy of the sourc
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate.
## Testing
@@ -163,7 +160,7 @@ suites.
#### `tox`
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
#### `pytest`
@@ -171,10 +168,12 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
```sh
# run all unit tests in a file
python3 -m pytest tests/unit/test_graph.py
python3 -m pytest test/unit/test_graph.py
# run a specific unit test
python3 -m pytest tests/unit/test_graph.py::GraphTest::test__dependency_list
# run specific Postgres functional tests
python3 -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
# run specific Postgres integration tests (old way)
python3 -m pytest -m profile_postgres test/integration/074_postgres_unlogged_table_tests
# run specific Postgres integration tests (new way)
python3 -m pytest tests/functional/sources
```
@@ -183,8 +182,9 @@ python3 -m pytest tests/functional/sources
### Unit, Integration, Functional?
Here are some general rules for adding tests:
* unit tests (`tests/unit`) dont need to access a database; "pure Python" tests should be written as unit tests
* functional tests (`tests/functional`) cover anything that interacts with a database, namely adapter
* unit tests (`test/unit` & `tests/unit`) dont need to access a database; "pure Python" tests should be written as unit tests
* functional tests (`test/integration` & `tests/functional`) cover anything that interacts with a database, namely adapter
* *everything in* `test/*` *is being steadily migrated to* `tests/*`
## Debugging
@@ -201,21 +201,13 @@ Here are some general rules for adding tests:
* Sometimes flake8 complains about lines that are actually fine, in which case you can put a comment on the line such as: # noqa or # noqa: ANNN, where ANNN is the error code that flake8 issues.
* To collect output for `CProfile`, run dbt with the `-r` option and the name of an output file, i.e. `dbt -r dbt.cprof run`. If you just want to profile parsing, you can do: `dbt -r dbt.cprof parse`. `pip` install `snakeviz` to view the output. Run `snakeviz dbt.cprof` and output will be rendered in a browser window.
## Adding or modifying a CHANGELOG Entry
## Adding a CHANGELOG Entry
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
Once changie is installed and your PR is created for a new feature, simply run the following command and changie will walk you through the process of creating a changelog entry:
```shell
changie new
```
Commit the file that's created and your changelog entry is complete!
If you are contributing to a feature already in progress, you will modify the changie yaml file in dbt/.changes/unreleased/ related to your change. If you need help finding this file, please ask within the discussion for the pull request!
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.

View File

@@ -9,7 +9,7 @@ ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
software-properties-common gpg-agent \
software-properties-common \
&& add-apt-repository ppa:git-core/ppa -y \
&& apt-get dist-upgrade -y \
&& apt-get install -y --no-install-recommends \
@@ -30,9 +30,16 @@ RUN apt-get update \
unixodbc-dev \
&& add-apt-repository ppa:deadsnakes/ppa \
&& apt-get install -y \
python-is-python3 \
python-dev-is-python3 \
python \
python-dev \
python3-pip \
python3.6 \
python3.6-dev \
python3-pip \
python3.6-venv \
python3.7 \
python3.7-dev \
python3.7-venv \
python3.8 \
python3.8-dev \
python3.8-venv \
@@ -42,9 +49,6 @@ RUN apt-get update \
python3.10 \
python3.10-dev \
python3.10-venv \
python3.11 \
python3.11-dev \
python3.11-venv \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

View File

@@ -6,41 +6,23 @@ ifeq ($(USE_DOCKER),true)
DOCKER_CMD := docker-compose run --rm test
endif
#
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
# with any ENV_VAR overrides required by your test environment, e.g.
# DBT_TEST_USER_1=user
# LOG_DIR="dir with a space in it"
#
# Warn: Restrict each line to one variable only.
#
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
include ./makefile.test.env
endif
LOGS_DIR := ./logs
# Optional flag to invoke tests using our CI env.
# But we always want these active for structured
# log testing.
CI_FLAGS =\
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
.PHONY: dev_req
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
@\
pip install -r dev-requirements.txt
pip install -r editable-requirements.txt
DBT_TEST_USER_1=dbt_test_user_1\
DBT_TEST_USER_2=dbt_test_user_2\
DBT_TEST_USER_3=dbt_test_user_3\
RUSTFLAGS="-D warnings"\
LOG_DIR=./logs\
DBT_LOG_FORMAT=json
.PHONY: dev
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
@\
pre-commit install
.PHONY: proto_types
proto_types: ## generates google protobuf python file from types.proto
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
pip install -r dev-requirements.txt -r editable-requirements.txt
.PHONY: mypy
mypy: .env ## Runs mypy against staged changes for static type checking.
@@ -79,7 +61,7 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
.PHONY: integration
integration: .env ## Runs postgres integration tests with py-integration
@\
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
$(if $(USE_CI_FLAGS), $(CI_FLAGS)) $(DOCKER_CMD) tox -e py-integration -- -nauto
.PHONY: integration-fail-fast
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
@@ -89,9 +71,9 @@ integration-fail-fast: .env ## Runs postgres integration tests with py-integrati
.PHONY: interop
interop: clean
@\
mkdir $(LOG_DIR) && \
mkdir $(LOGS_DIR) && \
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
LOG_DIR=$(LOGS_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
.PHONY: setup-db
setup-db: ## Setup Postgres database with docker-compose for system testing.

View File

@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
## Getting started
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
- [Install dbt](https://docs.getdbt.com/docs/installation)
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
## Join the dbt Community

View File

@@ -2,59 +2,50 @@
## The following are individual files in this directory.
### compilation.py
### constants.py
### dataclass_schema.py
### deprecations.py
### exceptions.py
### flags.py
### helper_types.py
### hooks.py
### lib.py
### links.py
### logger.py
### main.py
### node_types.py
### profiler.py
### selected_resources.py
### semver.py
### tracking.py
### version.py
### lib.py
### node_types.py
### helper_types.py
### links.py
### semver.py
### ui.py
### compilation.py
### dataclass_schema.py
### exceptions.py
### hooks.py
### logger.py
### profiler.py
### utils.py
### version.py
## The subdirectories will be documented in a README in the subdirectory
* adapters
* cli
* clients
* config
* context
* contracts
* deps
* docs
* events
* graph
* include
* parser
* adapters
* context
* deps
* graph
* task
* tests
* clients
* events

View File

@@ -1,19 +1,14 @@
# these are all just exports, #noqa them so flake8 will be happy
# TODO: Should we still include this in the `adapters` namespace?
from dbt.contracts.connection import Credentials # noqa: F401
from dbt.adapters.base.meta import available # noqa: F401
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
from dbt.adapters.base.relation import ( # noqa: F401
from dbt.contracts.connection import Credentials # noqa
from dbt.adapters.base.meta import available # noqa
from dbt.adapters.base.connections import BaseConnectionManager # noqa
from dbt.adapters.base.relation import ( # noqa
BaseRelation,
RelationType,
SchemaSearchMap,
)
from dbt.adapters.base.column import Column # noqa: F401
from dbt.adapters.base.impl import ( # noqa: F401
AdapterConfig,
BaseAdapter,
PythonJobHelper,
ConstraintSupport,
)
from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401
from dbt.adapters.base.column import Column # noqa
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter, PythonJobHelper # noqa
from dbt.adapters.base.plugin import AdapterPlugin # noqa

View File

@@ -2,7 +2,7 @@ from dataclasses import dataclass
import re
from typing import Dict, ClassVar, Any, Optional
from dbt.exceptions import DbtRuntimeError
from dbt.exceptions import RuntimeException
@dataclass
@@ -60,7 +60,6 @@ class Column:
"float",
"double precision",
"float8",
"double",
]
def is_integer(self) -> bool:
@@ -86,7 +85,7 @@ class Column:
def string_size(self) -> int:
if not self.is_string():
raise DbtRuntimeError("Called string_size() on non-string field!")
raise RuntimeException("Called string_size() on non-string field!")
if self.dtype == "text" or self.char_size is None:
# char_size should never be None. Handle it reasonably just in case
@@ -125,7 +124,7 @@ class Column:
def from_description(cls, name: str, raw_data_type: str) -> "Column":
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
if match is None:
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
data_type, size_info = match.groups()
char_size = None
numeric_precision = None
@@ -138,7 +137,7 @@ class Column:
try:
char_size = int(parts[0])
except ValueError:
raise DbtRuntimeError(
raise RuntimeException(
f'Could not interpret data_type "{raw_data_type}": '
f'could not convert "{parts[0]}" to an integer'
)
@@ -146,14 +145,14 @@ class Column:
try:
numeric_precision = int(parts[0])
except ValueError:
raise DbtRuntimeError(
raise RuntimeException(
f'Could not interpret data_type "{raw_data_type}": '
f'could not convert "{parts[0]}" to an integer'
)
try:
numeric_scale = int(parts[1])
except ValueError:
raise DbtRuntimeError(
raise RuntimeException(
f'Could not interpret data_type "{raw_data_type}": '
f'could not convert "{parts[1]}" to an integer'
)

View File

@@ -48,7 +48,6 @@ from dbt.events.types import (
Rollback,
RollbackFailed,
)
from dbt.events.contextvars import get_node_info
from dbt import flags
from dbt.utils import cast_to_str
@@ -91,13 +90,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
key = self.get_thread_identifier()
with self.lock:
if key not in self.thread_connections:
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
return self.thread_connections[key]
def set_thread_connection(self, conn: Connection) -> None:
key = self.get_thread_identifier()
if key in self.thread_connections:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
"In set_thread_connection, existing connection exists for {}"
)
self.thread_connections[key] = conn
@@ -137,49 +136,47 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:return: A context manager that handles exceptions raised by the
underlying database.
"""
raise dbt.exceptions.NotImplementedError(
raise dbt.exceptions.NotImplementedException(
"`exception_handler` is not implemented for this adapter!"
)
def set_connection_name(self, name: Optional[str] = None) -> Connection:
"""Called by 'acquire_connection' in BaseAdapter, which is called by
'connection_named', called by 'connection_for(node)'.
Creates a connection for this thread if one doesn't already
exist, and will rename an existing connection."""
conn_name: str
if name is None:
# if a name isn't specified, we'll re-use a single handle
# named 'master'
conn_name = "master"
else:
if not isinstance(name, str):
raise dbt.exceptions.CompilerException(
f"For connection name, got {name} - not a string!"
)
assert isinstance(name, str)
conn_name = name
conn_name: str = "master" if name is None else name
# Get a connection for this thread
conn = self.get_if_exists()
if conn and conn.name == conn_name and conn.state == "open":
# Found a connection and nothing to do, so just return it
return conn
if conn is None:
# Create a new connection
conn = Connection(
type=Identifier(self.TYPE),
name=conn_name,
name=None,
state=ConnectionState.INIT,
transaction_open=False,
handle=None,
credentials=self.profile.credentials,
)
conn.handle = LazyHandle(self.open)
# Add the connection to thread_connections for this thread
self.set_thread_connection(conn)
fire_event(
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
)
else: # existing connection either wasn't open or didn't have the right name
if conn.state != "open":
conn.handle = LazyHandle(self.open)
if conn.name != conn_name:
orig_conn_name: str = conn.name or ""
conn.name = conn_name
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
if conn.name == conn_name and conn.state == "open":
return conn
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
if conn.state == "open":
fire_event(ConnectionReused(conn_name=conn_name))
else:
conn.handle = LazyHandle(self.open)
conn.name = conn_name
return conn
@classmethod
@@ -211,7 +208,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
connect should trigger a retry.
:type retryable_exceptions: Iterable[Type[Exception]]
:param int retry_limit: How many times to retry the call to connect. If this limit
is exceeded before a successful call, a FailedToConnectError will be raised.
is exceeded before a successful call, a FailedToConnectException will be raised.
Must be non-negative.
:param retry_timeout: Time to wait between attempts to connect. Can also take a
Callable that takes the number of attempts so far, beginning at 0, and returns an int
@@ -220,14 +217,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
connect function across recursive calls. Passed as an argument to retry_timeout if it
is a Callable. This parameter should not be set by the initial caller.
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
successfully acquiring a handle.
:return: The given connection with its appropriate state and handle attributes set
depending on whether we successfully acquired a handle or not.
"""
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
if timeout < 0:
raise dbt.exceptions.FailedToConnectError(
raise dbt.exceptions.FailedToConnectException(
"retry_timeout cannot be negative or return a negative time."
)
@@ -235,7 +232,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
connection.handle = None
connection.state = ConnectionState.FAIL
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
try:
connection.handle = connect()
@@ -246,7 +243,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
if retry_limit <= 0:
connection.handle = None
connection.state = ConnectionState.FAIL
raise dbt.exceptions.FailedToConnectError(str(e))
raise dbt.exceptions.FailedToConnectException(str(e))
logger.debug(
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
@@ -268,12 +265,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
except Exception as e:
connection.handle = None
connection.state = ConnectionState.FAIL
raise dbt.exceptions.FailedToConnectError(str(e))
raise dbt.exceptions.FailedToConnectException(str(e))
@abc.abstractmethod
def cancel_open(self) -> Optional[List[str]]:
"""Cancel all open connections on the adapter. (passable)"""
raise dbt.exceptions.NotImplementedError(
raise dbt.exceptions.NotImplementedException(
"`cancel_open` is not implemented for this adapter!"
)
@@ -288,7 +285,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
This should be thread-safe, or hold the lock if necessary. The given
connection should not be in either in_use or available.
"""
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
def release(self) -> None:
with self.lock:
@@ -320,12 +317,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
@abc.abstractmethod
def begin(self) -> None:
"""Begin a transaction. (passable)"""
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`begin` is not implemented for this adapter!"
)
@abc.abstractmethod
def commit(self) -> None:
"""Commit a transaction. (passable)"""
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`commit` is not implemented for this adapter!"
)
@classmethod
def _rollback_handle(cls, connection: Connection) -> None:
@@ -335,9 +336,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
except Exception:
fire_event(
RollbackFailed(
conn_name=cast_to_str(connection.name),
exc_info=traceback.format_exc(),
node_info=get_node_info(),
conn_name=cast_to_str(connection.name), exc_info=traceback.format_exc()
)
)
@@ -346,27 +345,21 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
"""Perform the actual close operation."""
# On windows, sometimes connection handles don't have a close() attr.
if hasattr(connection.handle, "close"):
fire_event(
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
)
fire_event(ConnectionClosed(conn_name=cast_to_str(connection.name)))
connection.handle.close()
else:
fire_event(
ConnectionLeftOpen(
conn_name=cast_to_str(connection.name), node_info=get_node_info()
)
)
fire_event(ConnectionLeftOpen(conn_name=cast_to_str(connection.name)))
@classmethod
def _rollback(cls, connection: Connection) -> None:
"""Roll back the given connection."""
if connection.transaction_open is False:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
f"Tried to rollback transaction on connection "
f'"{connection.name}", but it does not have one open!'
)
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
cls._rollback_handle(connection)
connection.transaction_open = False
@@ -378,7 +371,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
return connection
if connection.transaction_open and connection.handle:
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
cls._rollback_handle(connection)
connection.transaction_open = False
@@ -411,4 +404,6 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:return: A tuple of the query status and results (empty if fetch=False).
:rtype: Tuple[AdapterResponse, agate.Table]
"""
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`execute` is not implemented for this adapter!"
)

View File

@@ -2,52 +2,45 @@ import abc
from concurrent.futures import as_completed, Future
from contextlib import contextmanager
from datetime import datetime
from enum import Enum
import time
from itertools import chain
from typing import (
Any,
Optional,
Tuple,
Callable,
Dict,
Iterable,
Iterator,
Type,
Dict,
Any,
List,
Mapping,
Optional,
Set,
Tuple,
Type,
Iterator,
Union,
Set,
)
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
import agate
import pytz
from dbt.exceptions import (
DbtInternalError,
DbtRuntimeError,
DbtValidationError,
MacroArgTypeError,
MacroResultError,
NotImplementedError,
NullRelationCacheAttemptedError,
NullRelationDropAttemptedError,
QuoteConfigTypeError,
RelationReturnedMultipleResultsError,
RenameToNoneAttemptedError,
SnapshotTargetIncompleteError,
SnapshotTargetNotSnapshotTableError,
UnexpectedNonTimestampError,
UnexpectedNullError,
raise_database_error,
raise_compiler_error,
invalid_type_error,
get_relation_returned_multiple_results,
InternalException,
NotImplementedException,
RuntimeException,
)
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
from dbt.adapters.protocol import (
AdapterConfig,
ConnectionManagerProtocol,
)
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
from dbt.clients.jinja import MacroGenerator
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
from dbt.contracts.graph.manifest import Manifest, MacroManifest
from dbt.contracts.graph.nodes import ResultNode
from dbt.contracts.graph.parsed import ParsedSeedNode
from dbt.events.functions import fire_event, warn_or_error
from dbt.events.types import (
CacheMiss,
@@ -55,10 +48,8 @@ from dbt.events.types import (
CodeExecution,
CodeExecutionStatus,
CatalogGenerationError,
ConstraintNotSupported,
ConstraintNotEnforced,
)
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
from dbt.utils import filter_null_values, executor, cast_to_str
from dbt.adapters.base.connections import Connection, AdapterResponse
from dbt.adapters.base.meta import AdapterMeta, available
@@ -70,22 +61,19 @@ from dbt.adapters.base.relation import (
)
from dbt.adapters.base import Column as BaseColumn
from dbt.adapters.base import Credentials
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
from dbt import deprecations
from dbt.adapters.cache import RelationsCache, _make_ref_key_msg
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
GET_CATALOG_MACRO_NAME = "get_catalog"
FRESHNESS_MACRO_NAME = "collect_freshness"
class ConstraintSupport(str, Enum):
ENFORCED = "enforced"
NOT_ENFORCED = "not_enforced"
NOT_SUPPORTED = "not_supported"
def _expect_row_value(key: str, row: agate.Row):
if key not in row.keys():
raise DbtInternalError(
raise InternalException(
'Got a row without "{}" column, columns: {}'.format(key, row.keys())
)
return row[key]
@@ -114,10 +102,18 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
assume the datetime is already for UTC and add the timezone.
"""
if dt is None:
raise UnexpectedNullError(field_name, source)
raise raise_database_error(
"Expected a non-null value when querying field '{}' of table "
" {} but received value 'null' instead".format(field_name, source)
)
elif not hasattr(dt, "tzinfo"):
raise UnexpectedNonTimestampError(field_name, source, dt)
raise raise_database_error(
"Expected a timestamp value when querying field '{}' of table "
"{} but received value of type '{}' instead".format(
field_name, source, type(dt).__name__
)
)
elif dt.tzinfo:
return dt.astimezone(pytz.UTC)
@@ -187,7 +183,6 @@ class BaseAdapter(metaclass=AdapterMeta):
- truncate_relation
- rename_relation
- get_columns_in_relation
- get_column_schema_from_query
- expand_column_types
- list_relations_without_caching
- is_cancelable
@@ -214,14 +209,6 @@ class BaseAdapter(metaclass=AdapterMeta):
# for use in materializations
AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig
CONSTRAINT_SUPPORT = {
ConstraintType.check: ConstraintSupport.NOT_SUPPORTED,
ConstraintType.not_null: ConstraintSupport.ENFORCED,
ConstraintType.unique: ConstraintSupport.NOT_ENFORCED,
ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED,
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
}
def __init__(self, config):
self.config = config
self.cache = RelationsCache()
@@ -256,7 +243,9 @@ class BaseAdapter(metaclass=AdapterMeta):
return conn.name
@contextmanager
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
def connection_named(
self, name: str, node: Optional[CompileResultNode] = None
) -> Iterator[None]:
try:
if self.connections.query_header is not None:
self.connections.query_header.set(name, node)
@@ -268,13 +257,13 @@ class BaseAdapter(metaclass=AdapterMeta):
self.connections.query_header.reset()
@contextmanager
def connection_for(self, node: ResultNode) -> Iterator[None]:
def connection_for(self, node: CompileResultNode) -> Iterator[None]:
with self.connection_named(node.unique_id, node):
yield
@available.parse(lambda *a, **k: ("", empty_table()))
def execute(
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
self, sql: str, auto_begin: bool = False, fetch: bool = False
) -> Tuple[AdapterResponse, agate.Table]:
"""Execute the given SQL. This is a thin wrapper around
ConnectionManager.execute.
@@ -283,35 +272,10 @@ class BaseAdapter(metaclass=AdapterMeta):
:param bool auto_begin: If set, and dbt is not currently inside a
transaction, automatically begin one.
:param bool fetch: If set, fetch results.
:param Optional[int] limit: If set, only fetch n number of rows
:return: A tuple of the query status and results (empty if fetch=False).
:rtype: Tuple[AdapterResponse, agate.Table]
"""
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit)
def validate_sql(self, sql: str) -> AdapterResponse:
"""Submit the given SQL to the engine for validation, but not execution.
This should throw an appropriate exception if the input SQL is invalid, although
in practice that will generally be handled by delegating to an existing method
for execution and allowing the error handler to take care of the rest.
:param str sql: The sql to validate
"""
raise NotImplementedError("`validate_sql` is not implemented for this adapter!")
@available.parse(lambda *a, **k: [])
def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
"""Get a list of the Columns with names and data types from the given sql."""
_, cursor = self.connections.add_select_query(sql)
columns = [
self.Column.create(
column_name, self.connections.data_type_code_to_name(column_type_code)
)
# https://peps.python.org/pep-0249/#description
for column_name, column_type_code, *_ in cursor.description
]
return columns
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
@available.parse(lambda *a, **k: ("", empty_table()))
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
@@ -395,7 +359,7 @@ class BaseAdapter(metaclass=AdapterMeta):
return {
self.Relation.create_from(self.config, node).without_identifier()
for node in manifest.nodes.values()
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
if (node.is_relational and not node.is_ephemeral_model)
}
def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:
@@ -408,7 +372,7 @@ class BaseAdapter(metaclass=AdapterMeta):
lowercase strings.
"""
info_schema_name_map = SchemaSearchMap()
nodes: Iterator[ResultNode] = chain(
nodes: Iterator[CompileResultNode] = chain(
[
node
for node in manifest.nodes.values()
@@ -426,7 +390,7 @@ class BaseAdapter(metaclass=AdapterMeta):
return info_schema_name_map
def _relations_cache_for_schemas(
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
self, manifest: Manifest, cache_schemas: Set[BaseRelation] = None
) -> None:
"""Populate the relations cache for the given schemas. Returns an
iterable of the schemas populated, as strings.
@@ -462,7 +426,7 @@ class BaseAdapter(metaclass=AdapterMeta):
self,
manifest: Manifest,
clear: bool = False,
required_schemas: Optional[Set[BaseRelation]] = None,
required_schemas: Set[BaseRelation] = None,
) -> None:
"""Run a query that gets a populated cache of the relations in the
database and set the cache on this adapter.
@@ -477,7 +441,7 @@ class BaseAdapter(metaclass=AdapterMeta):
"""Cache a new relation in dbt. It will show up in `list relations`."""
if relation is None:
name = self.nice_connection_name()
raise NullRelationCacheAttemptedError(name)
raise_compiler_error("Attempted to cache a null relation for {}".format(name))
self.cache.add(relation)
# so jinja doesn't render things
return ""
@@ -489,7 +453,7 @@ class BaseAdapter(metaclass=AdapterMeta):
"""
if relation is None:
name = self.nice_connection_name()
raise NullRelationDropAttemptedError(name)
raise_compiler_error("Attempted to drop a null relation for {}".format(name))
self.cache.drop(relation)
return ""
@@ -506,7 +470,9 @@ class BaseAdapter(metaclass=AdapterMeta):
name = self.nice_connection_name()
src_name = _relation_name(from_relation)
dst_name = _relation_name(to_relation)
raise RenameToNoneAttemptedError(src_name, dst_name, name)
raise_compiler_error(
"Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
)
self.cache.rename(from_relation, to_relation)
return ""
@@ -518,12 +484,12 @@ class BaseAdapter(metaclass=AdapterMeta):
@abc.abstractmethod
def date_function(cls) -> str:
"""Get the date function used by this adapter's database."""
raise NotImplementedError("`date_function` is not implemented for this adapter!")
raise NotImplementedException("`date_function` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
def is_cancelable(cls) -> bool:
raise NotImplementedError("`is_cancelable` is not implemented for this adapter!")
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
###
# Abstract methods about schemas
@@ -531,7 +497,7 @@ class BaseAdapter(metaclass=AdapterMeta):
@abc.abstractmethod
def list_schemas(self, database: str) -> List[str]:
"""Get a list of existing schemas in database"""
raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
@available.parse(lambda *a, **k: False)
def check_schema_exists(self, database: str, schema: str) -> bool:
@@ -554,13 +520,13 @@ class BaseAdapter(metaclass=AdapterMeta):
*Implementors must call self.cache.drop() to preserve cache state!*
"""
raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_none
def truncate_relation(self, relation: BaseRelation) -> None:
"""Truncate the given relation."""
raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_none
@@ -569,13 +535,15 @@ class BaseAdapter(metaclass=AdapterMeta):
Implementors must call self.cache.rename() to preserve cache state.
"""
raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_list
def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
"""Get a list of the columns in the given Relation."""
raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
raise NotImplementedException(
"`get_columns_in_relation` is not implemented for this adapter!"
)
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
@@ -597,7 +565,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param self.Relation current: A relation that currently exists in the
database with columns of unspecified types.
"""
raise NotImplementedError(
raise NotImplementedException(
"`expand_target_column_types` is not implemented for this adapter!"
)
@@ -612,7 +580,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:return: The relations in schema
:rtype: List[self.Relation]
"""
raise NotImplementedError(
raise NotImplementedException(
"`list_relations_without_caching` is not implemented for this adapter!"
)
@@ -654,7 +622,7 @@ class BaseAdapter(metaclass=AdapterMeta):
to_relation.
"""
if not isinstance(from_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="get_missing_columns",
arg_name="from_relation",
got_value=from_relation,
@@ -662,7 +630,7 @@ class BaseAdapter(metaclass=AdapterMeta):
)
if not isinstance(to_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="get_missing_columns",
arg_name="to_relation",
got_value=to_relation,
@@ -683,11 +651,11 @@ class BaseAdapter(metaclass=AdapterMeta):
expected columns.
:param Relation relation: The relation to check
:raises InvalidMacroArgType: If the columns are
:raises CompilationException: If the columns are
incorrect.
"""
if not isinstance(relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="valid_snapshot_target",
arg_name="relation",
got_value=relation,
@@ -708,16 +676,24 @@ class BaseAdapter(metaclass=AdapterMeta):
if missing:
if extra:
raise SnapshotTargetIncompleteError(extra, missing)
msg = (
'Snapshot target has ("{}") but not ("{}") - is it an '
"unmigrated previous version archive?".format(
'", "'.join(extra), '", "'.join(missing)
)
)
else:
raise SnapshotTargetNotSnapshotTableError(missing)
msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
'", "'.join(missing)
)
raise_compiler_error(msg)
@available.parse_none
def expand_target_column_types(
self, from_relation: BaseRelation, to_relation: BaseRelation
) -> None:
if not isinstance(from_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="expand_target_column_types",
arg_name="from_relation",
got_value=from_relation,
@@ -725,7 +701,7 @@ class BaseAdapter(metaclass=AdapterMeta):
)
if not isinstance(to_relation, self.Relation):
raise MacroArgTypeError(
invalid_type_error(
method_name="expand_target_column_types",
arg_name="to_relation",
got_value=to_relation,
@@ -748,23 +724,11 @@ class BaseAdapter(metaclass=AdapterMeta):
# we can't build the relations cache because we don't have a
# manifest so we can't run any operations.
relations = self.list_relations_without_caching(schema_relation)
# if the cache is already populated, add this schema in
# otherwise, skip updating the cache and just ignore
if self.cache:
for relation in relations:
self.cache.add(relation)
if not relations:
# it's possible that there were no relations in some schemas. We want
# to insert the schemas we query into the cache's `.schemas` attribute
# so we can check it later
self.cache.update_schemas([(database, schema)])
fire_event(
ListRelations(
database=cast_to_str(database),
schema=schema,
relations=[_make_ref_key_dict(x) for x in relations],
relations=[_make_ref_key_msg(x) for x in relations],
)
)
@@ -796,6 +760,7 @@ class BaseAdapter(metaclass=AdapterMeta):
schema: str,
identifier: str,
) -> List[BaseRelation]:
matches = []
search = self._make_match_kwargs(database, schema, identifier)
@@ -818,7 +783,7 @@ class BaseAdapter(metaclass=AdapterMeta):
"schema": schema,
"database": database,
}
raise RelationReturnedMultipleResultsError(kwargs, matches)
get_relation_returned_multiple_results(kwargs, matches)
elif matches:
return matches[0]
@@ -840,20 +805,20 @@ class BaseAdapter(metaclass=AdapterMeta):
@available.parse_none
def create_schema(self, relation: BaseRelation):
"""Create the given schema if it does not exist."""
raise NotImplementedError("`create_schema` is not implemented for this adapter!")
raise NotImplementedException("`create_schema` is not implemented for this adapter!")
@abc.abstractmethod
@available.parse_none
def drop_schema(self, relation: BaseRelation):
"""Drop the given schema (and everything in it) if it exists."""
raise NotImplementedError("`drop_schema` is not implemented for this adapter!")
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
@available
@classmethod
@abc.abstractmethod
def quote(cls, identifier: str) -> str:
"""Quote the given identifier, as appropriate for the database."""
raise NotImplementedError("`quote` is not implemented for this adapter!")
raise NotImplementedException("`quote` is not implemented for this adapter!")
@available
def quote_as_configured(self, identifier: str, quote_key: str) -> str:
@@ -882,7 +847,10 @@ class BaseAdapter(metaclass=AdapterMeta):
elif quote_config is None:
pass
else:
raise QuoteConfigTypeError(quote_config)
raise_compiler_error(
f'The seed configuration value of "quote_columns" has an '
f"invalid type {type(quote_config)}"
)
if quote_columns:
return self.quote(column)
@@ -903,7 +871,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_text_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
@@ -915,7 +883,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
@@ -927,7 +895,9 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!")
raise NotImplementedException(
"`convert_boolean_type` is not implemented for this adapter!"
)
@classmethod
@abc.abstractmethod
@@ -939,7 +909,9 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!")
raise NotImplementedException(
"`convert_datetime_type` is not implemented for this adapter!"
)
@classmethod
@abc.abstractmethod
@@ -951,7 +923,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_date_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
@classmethod
@abc.abstractmethod
@@ -963,7 +935,7 @@ class BaseAdapter(metaclass=AdapterMeta):
:param col_idx: The index into the agate table for the column.
:return: The name of the type in the database
"""
raise NotImplementedError("`convert_time_type` is not implemented for this adapter!")
raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
@available
@classmethod
@@ -996,9 +968,9 @@ class BaseAdapter(metaclass=AdapterMeta):
manifest: Optional[Manifest] = None,
project: Optional[str] = None,
context_override: Optional[Dict[str, Any]] = None,
kwargs: Optional[Dict[str, Any]] = None,
kwargs: Dict[str, Any] = None,
text_only_columns: Optional[Iterable[str]] = None,
) -> AttrDict:
) -> agate.Table:
"""Look macro_name up in the manifest and execute its results.
:param macro_name: The name of the macro to execute.
@@ -1030,7 +1002,7 @@ class BaseAdapter(metaclass=AdapterMeta):
else:
package_name = 'the "{}" package'.format(project)
raise DbtRuntimeError(
raise RuntimeException(
'dbt could not find a macro with the name "{}" in {}'.format(
macro_name, package_name
)
@@ -1073,6 +1045,7 @@ class BaseAdapter(metaclass=AdapterMeta):
schemas: Set[str],
manifest: Manifest,
) -> agate.Table:
kwargs = {"information_schema": information_schema, "schemas": schemas}
table = self.execute_macro(
GET_CATALOG_MACRO_NAME,
@@ -1082,7 +1055,7 @@ class BaseAdapter(metaclass=AdapterMeta):
manifest=manifest,
)
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
results = self._catalog_filter_table(table, manifest)
return results
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
@@ -1114,7 +1087,7 @@ class BaseAdapter(metaclass=AdapterMeta):
loaded_at_field: str,
filter: Optional[str],
manifest: Optional[Manifest] = None,
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
) -> Dict[str, Any]:
"""Calculate the freshness of sources in dbt, and return it"""
kwargs: Dict[str, Any] = {
"source": source,
@@ -1123,23 +1096,15 @@ class BaseAdapter(metaclass=AdapterMeta):
}
# run the macro
# in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly
# starting in v1.5, by default, we return both the table and the adapter response (metadata about the query)
result: Union[
AttrDict, # current: contains AdapterResponse + agate.Table
agate.Table, # previous: just table
]
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
if isinstance(result, agate.Table):
deprecations.warn("collect-freshness-return-signature")
adapter_response = None
table = result
else:
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
# now we have a 1-row table of the maximum `loaded_at_field` value and
# the current time according to the db.
if len(table) != 1 or len(table[0]) != 2:
raise MacroResultError(FRESHNESS_MACRO_NAME, table)
raise_compiler_error(
'Got an invalid result from "{}" macro: {}'.format(
FRESHNESS_MACRO_NAME, [tuple(r) for r in table]
)
)
if table[0][0] is None:
# no records in the table, so really the max_loaded_at was
# infinitely long ago. Just call it 0:00 January 1 year UTC
@@ -1149,12 +1114,11 @@ class BaseAdapter(metaclass=AdapterMeta):
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
age = (snapshotted_at - max_loaded_at).total_seconds()
freshness = {
return {
"max_loaded_at": max_loaded_at,
"snapshotted_at": snapshotted_at,
"age": age,
}
return adapter_response, freshness
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
"""A hook for running some operation before the model materialization
@@ -1217,7 +1181,7 @@ class BaseAdapter(metaclass=AdapterMeta):
elif location == "prepend":
return f"'{value}' || {add_to}"
else:
raise DbtRuntimeError(f'Got an unexpected location value of "{location}"')
raise RuntimeException(f'Got an unexpected location value of "{location}"')
def get_rows_different_sql(
self,
@@ -1275,7 +1239,7 @@ class BaseAdapter(metaclass=AdapterMeta):
return self.generate_python_submission_response(submission_result)
def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse:
raise NotImplementedError(
raise NotImplementedException(
"Your adapter need to implement generate_python_submission_response"
)
@@ -1299,7 +1263,7 @@ class BaseAdapter(metaclass=AdapterMeta):
valid_strategies.append("default")
builtin_strategies = self.builtin_incremental_strategies()
if strategy in builtin_strategies and strategy not in valid_strategies:
raise DbtRuntimeError(
raise RuntimeException(
f"The incremental strategy '{strategy}' is not valid for this adapter"
)
@@ -1307,7 +1271,7 @@ class BaseAdapter(metaclass=AdapterMeta):
macro_name = f"get_incremental_{strategy}_sql"
# The model_context should have MacroGenerator callable objects for all macros
if macro_name not in model_context:
raise DbtRuntimeError(
raise RuntimeException(
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
macro_name, self.config.project_name
)
@@ -1316,119 +1280,6 @@ class BaseAdapter(metaclass=AdapterMeta):
# This returns a callable macro
return model_context[macro_name]
@classmethod
def _parse_column_constraint(cls, raw_constraint: Dict[str, Any]) -> ColumnLevelConstraint:
try:
ColumnLevelConstraint.validate(raw_constraint)
return ColumnLevelConstraint.from_dict(raw_constraint)
except Exception:
raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
@classmethod
def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]:
"""Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
rendering."""
constraint_expression = constraint.expression or ""
rendered_column_constraint = None
if constraint.type == ConstraintType.check and constraint_expression:
rendered_column_constraint = f"check ({constraint_expression})"
elif constraint.type == ConstraintType.not_null:
rendered_column_constraint = f"not null {constraint_expression}"
elif constraint.type == ConstraintType.unique:
rendered_column_constraint = f"unique {constraint_expression}"
elif constraint.type == ConstraintType.primary_key:
rendered_column_constraint = f"primary key {constraint_expression}"
elif constraint.type == ConstraintType.foreign_key and constraint_expression:
rendered_column_constraint = f"references {constraint_expression}"
elif constraint.type == ConstraintType.custom and constraint_expression:
rendered_column_constraint = constraint_expression
if rendered_column_constraint:
rendered_column_constraint = rendered_column_constraint.strip()
return rendered_column_constraint
@available
@classmethod
def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List:
rendered_column_constraints = []
for v in raw_columns.values():
col_name = cls.quote(v["name"]) if v.get("quote") else v["name"]
rendered_column_constraint = [f"{col_name} {v['data_type']}"]
for con in v.get("constraints", None):
constraint = cls._parse_column_constraint(con)
c = cls.process_parsed_constraint(constraint, cls.render_column_constraint)
if c is not None:
rendered_column_constraint.append(c)
rendered_column_constraints.append(" ".join(rendered_column_constraint))
return rendered_column_constraints
@classmethod
def process_parsed_constraint(
cls, parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint], render_func
) -> Optional[str]:
if (
parsed_constraint.warn_unsupported
and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED
):
warn_or_error(
ConstraintNotSupported(constraint=parsed_constraint.type.value, adapter=cls.type())
)
if (
parsed_constraint.warn_unenforced
and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_ENFORCED
):
warn_or_error(
ConstraintNotEnforced(constraint=parsed_constraint.type.value, adapter=cls.type())
)
if cls.CONSTRAINT_SUPPORT[parsed_constraint.type] != ConstraintSupport.NOT_SUPPORTED:
return render_func(parsed_constraint)
return None
@classmethod
def _parse_model_constraint(cls, raw_constraint: Dict[str, Any]) -> ModelLevelConstraint:
try:
ModelLevelConstraint.validate(raw_constraint)
c = ModelLevelConstraint.from_dict(raw_constraint)
return c
except Exception:
raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
@available
@classmethod
def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]:
return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None]
@classmethod
def render_raw_model_constraint(cls, raw_constraint: Dict[str, Any]) -> Optional[str]:
constraint = cls._parse_model_constraint(raw_constraint)
return cls.process_parsed_constraint(constraint, cls.render_model_constraint)
@classmethod
def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]:
"""Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
rendering."""
constraint_prefix = f"constraint {constraint.name} " if constraint.name else ""
column_list = ", ".join(constraint.columns)
if constraint.type == ConstraintType.check and constraint.expression:
return f"{constraint_prefix}check ({constraint.expression})"
elif constraint.type == ConstraintType.unique:
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
return f"{constraint_prefix}unique{constraint_expression} ({column_list})"
elif constraint.type == ConstraintType.primary_key:
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
return f"{constraint_prefix}primary key{constraint_expression} ({column_list})"
elif constraint.type == ConstraintType.foreign_key and constraint.expression:
return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}"
elif constraint.type == ConstraintType.custom and constraint.expression:
return f"{constraint_prefix}{constraint.expression}"
else:
return None
COLUMNS_EQUAL_SQL = """
with diff_count as (
@@ -1462,6 +1313,7 @@ join diff_count using (id)
def catch_as_completed(
futures, # typing: List[Future[agate.Table]]
) -> Tuple[agate.Table, List[Exception]]:
# catalogs: agate.Table = agate.Table(rows=[])
tables: List[agate.Table] = []
exceptions: List[Exception] = []

View File

@@ -1,17 +1,17 @@
from typing import List, Optional, Type
from dbt.adapters.base import Credentials
from dbt.exceptions import CompilationError
from dbt.exceptions import CompilationException
from dbt.adapters.protocol import AdapterProtocol
def project_name_from_path(include_path: str) -> str:
# avoid an import cycle
from dbt.config.project import PartialProject
from dbt.config.project import Project
partial = PartialProject.from_project_root(include_path)
partial = Project.partial_load(include_path)
if partial.project_name is None:
raise CompilationError(f"Invalid project at {include_path}: name not set!")
raise CompilationException(f"Invalid project at {include_path}: name not set!")
return partial.project_name

View File

@@ -5,9 +5,9 @@ from dbt.clients.jinja import QueryStringGenerator
from dbt.context.manifest import generate_query_header_context
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
from dbt.contracts.graph.nodes import ResultNode
from dbt.contracts.graph.compiled import CompileResultNode
from dbt.contracts.graph.manifest import Manifest
from dbt.exceptions import DbtRuntimeError
from dbt.exceptions import RuntimeException
class NodeWrapper:
@@ -48,7 +48,7 @@ class _QueryComment(local):
if isinstance(comment, str) and "*/" in comment:
# tell the user "no" so they don't hurt themselves by writing
# garbage
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
self.query_comment = comment
self.append = append
@@ -90,7 +90,7 @@ class MacroQueryStringSetter:
def reset(self):
self.set("master", None)
def set(self, name: str, node: Optional[ResultNode]):
def set(self, name: str, node: Optional[CompileResultNode]):
wrapped: Optional[NodeWrapper] = None
if node is not None:
wrapped = NodeWrapper(node)

View File

@@ -1,8 +1,9 @@
from collections.abc import Hashable
from dataclasses import dataclass, field
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
from dataclasses import dataclass
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
from dbt.contracts.graph.compiled import CompiledNode
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
from dbt.contracts.relation import (
RelationType,
ComponentName,
@@ -11,11 +12,7 @@ from dbt.contracts.relation import (
Policy,
Path,
)
from dbt.exceptions import (
ApproximateMatchError,
DbtInternalError,
MultipleDatabasesNotAllowedError,
)
from dbt.exceptions import InternalException
from dbt.node_types import NodeType
from dbt.utils import filter_null_values, deep_merge, classproperty
@@ -30,10 +27,8 @@ class BaseRelation(FakeAPIObject, Hashable):
path: Path
type: Optional[RelationType] = None
quote_character: str = '"'
# Python 3.11 requires that these use default_factory instead of simple default
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
include_policy: Policy = field(default_factory=lambda: Policy())
quote_policy: Policy = field(default_factory=lambda: Policy())
include_policy: Policy = Policy()
quote_policy: Policy = Policy()
dbt_created: bool = False
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
@@ -44,9 +39,9 @@ class BaseRelation(FakeAPIObject, Hashable):
@classmethod
def _get_field_named(cls, field_name):
for f, _ in cls._get_fields():
if f.name == field_name:
return f
for field, _ in cls._get_fields():
if field.name == field_name:
return field
# this should be unreachable
raise ValueError(f"BaseRelation has no {field_name} field!")
@@ -57,11 +52,11 @@ class BaseRelation(FakeAPIObject, Hashable):
@classmethod
def get_default_quote_policy(cls) -> Policy:
return cls._get_field_named("quote_policy").default_factory()
return cls._get_field_named("quote_policy").default
@classmethod
def get_default_include_policy(cls) -> Policy:
return cls._get_field_named("include_policy").default_factory()
return cls._get_field_named("include_policy").default
def get(self, key, default=None):
"""Override `.get` to return a metadata object so we don't break
@@ -87,7 +82,7 @@ class BaseRelation(FakeAPIObject, Hashable):
if not search:
# nothing was passed in
raise dbt.exceptions.DbtRuntimeError(
raise dbt.exceptions.RuntimeException(
"Tried to match relation, but no search path was passed!"
)
@@ -104,7 +99,7 @@ class BaseRelation(FakeAPIObject, Hashable):
if approximate_match and not exact_match:
target = self.create(database=database, schema=schema, identifier=identifier)
raise ApproximateMatchError(target, self)
dbt.exceptions.approximate_relation_match(target, self)
return exact_match
@@ -189,7 +184,7 @@ class BaseRelation(FakeAPIObject, Hashable):
)
@classmethod
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
source_quoting = source.quoting.to_dict(omit_none=True)
source_quoting.pop("column", None)
quote_policy = deep_merge(
@@ -214,7 +209,7 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_ephemeral_from_node(
cls: Type[Self],
config: HasQuoting,
node: ManifestNode,
node: Union[ParsedNode, CompiledNode],
) -> Self:
# Note that ephemeral models are based on the name.
identifier = cls.add_ephemeral_prefix(node.name)
@@ -227,7 +222,7 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_from_node(
cls: Type[Self],
config: HasQuoting,
node,
node: Union[ParsedNode, CompiledNode],
quote_policy: Optional[Dict[str, bool]] = None,
**kwargs: Any,
) -> Self:
@@ -248,20 +243,20 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_from(
cls: Type[Self],
config: HasQuoting,
node: ResultNode,
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
**kwargs: Any,
) -> Self:
if node.resource_type == NodeType.Source:
if not isinstance(node, SourceDefinition):
raise DbtInternalError(
"type mismatch, expected SourceDefinition but got {}".format(type(node))
if not isinstance(node, ParsedSourceDefinition):
raise InternalException(
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
)
return cls.create_from_source(node, **kwargs)
else:
# Can't use ManifestNode here because of parameterized generics
if not isinstance(node, (ParsedNode)):
raise DbtInternalError(
f"type mismatch, expected ManifestNode but got {type(node)}"
if not isinstance(node, (ParsedNode, CompiledNode)):
raise InternalException(
"type mismatch, expected ParsedNode or CompiledNode but "
"got {}".format(type(node))
)
return cls.create_from_node(config, node, **kwargs)
@@ -328,10 +323,6 @@ class BaseRelation(FakeAPIObject, Hashable):
def is_view(self) -> bool:
return self.type == RelationType.View
@property
def is_materialized_view(self) -> bool:
return self.type == RelationType.MaterializedView
@classproperty
def Table(cls) -> str:
return str(RelationType.Table)
@@ -348,10 +339,6 @@ class BaseRelation(FakeAPIObject, Hashable):
def External(cls) -> str:
return str(RelationType.External)
@classproperty
def MaterializedView(cls) -> str:
return str(RelationType.MaterializedView)
@classproperty
def get_relation_type(cls) -> Type[RelationType]:
return RelationType
@@ -366,7 +353,7 @@ class InformationSchema(BaseRelation):
def __post_init__(self):
if not isinstance(self.information_schema_view, (type(None), str)):
raise dbt.exceptions.CompilationError(
raise dbt.exceptions.CompilationException(
"Got an invalid name: {}".format(self.information_schema_view)
)
@@ -450,7 +437,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
if not allow_multiple_databases:
seen = {r.database.lower() for r in self if r.database}
if len(seen) > 1:
raise MultipleDatabasesNotAllowedError(seen)
dbt.exceptions.raise_compiler_error(str(seen))
for information_schema_name, schema in self.search():
path = {"database": information_schema_name.database, "schema": schema}

View File

@@ -1,22 +1,32 @@
import re
import threading
from copy import deepcopy
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
from dbt.adapters.reference_keys import (
_make_ref_key,
_make_ref_key_dict,
_make_ref_key_msg,
_make_msg_from_ref_key,
_ReferenceKey,
)
from dbt.exceptions import (
DependentLinkNotCachedError,
NewNameAlreadyInCacheError,
NoneRelationFoundError,
ReferencedLinkNotCachedError,
TruncatedModelNameCausedCollisionError,
)
import dbt.exceptions
from dbt.events.functions import fire_event, fire_event_if
from dbt.events.types import CacheAction, CacheDumpGraph
from dbt.flags import get_flags
from dbt.events.types import (
AddLink,
AddRelation,
DropCascade,
DropMissingRelation,
DropRelation,
DumpAfterAddGraph,
DumpAfterRenameSchema,
DumpBeforeAddGraph,
DumpBeforeRenameSchema,
RenameSchema,
TemporaryRelation,
UncachedRelation,
UpdateReference,
)
import dbt.flags as flags
from dbt.utils import lowercase
@@ -140,7 +150,11 @@ class _CachedRelation:
:raises InternalError: If the new key already exists.
"""
if new_key in self.referenced_by:
raise NewNameAlreadyInCacheError(old_key, new_key)
dbt.exceptions.raise_cache_inconsistent(
'in rename of "{}" -> "{}", new name is in the cache already'.format(
old_key, new_key
)
)
if old_key not in self.referenced_by:
return
@@ -229,7 +243,7 @@ class RelationsCache:
# self.relations or any cache entry's referenced_by during iteration
# it's a runtime error!
with self.lock:
return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()}
return {dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items()}
def _setdefault(self, relation: _CachedRelation):
"""Add a relation to the cache, or return it if it already exists.
@@ -256,17 +270,21 @@ class RelationsCache:
if referenced is None:
return
if referenced is None:
raise ReferencedLinkNotCachedError(referenced_key)
dbt.exceptions.raise_cache_inconsistent(
"in add_link, referenced link key {} not in cache!".format(referenced_key)
)
dependent = self.relations.get(dependent_key)
if dependent is None:
raise DependentLinkNotCachedError(dependent_key)
dbt.exceptions.raise_cache_inconsistent(
"in add_link, dependent link key {} not in cache!".format(dependent_key)
)
assert dependent is not None # we just raised!
referenced.add_reference(dependent)
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
def add_link(self, referenced, dependent):
"""Add a link between two relations to the database. If either relation
does not exist, it will be added as an "external" relation.
@@ -288,9 +306,9 @@ class RelationsCache:
# referring to a table outside our control. There's no need to make
# a link - we will never drop the referenced relation during a run.
fire_event(
CacheAction(
ref_key=ref_key._asdict(),
ref_key_2=dep_key._asdict(),
UncachedRelation(
dep_key=_make_msg_from_ref_key(dep_key),
ref_key=_make_msg_from_ref_key(ref_key),
)
)
return
@@ -303,10 +321,8 @@ class RelationsCache:
dependent = dependent.replace(type=referenced.External)
self.add(dependent)
fire_event(
CacheAction(
action="add_link",
ref_key=dep_key._asdict(),
ref_key_2=ref_key._asdict(),
AddLink(
dep_key=_make_msg_from_ref_key(dep_key), ref_key=_make_msg_from_ref_key(ref_key)
)
)
with self.lock:
@@ -318,20 +334,13 @@ class RelationsCache:
:param BaseRelation relation: The underlying relation.
"""
flags = get_flags()
cached = _CachedRelation(relation)
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
)
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
fire_event(AddRelation(relation=_make_ref_key_msg(cached)))
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpBeforeAddGraph(dump=self.dump_graph()))
with self.lock:
self._setdefault(cached)
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
)
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpAfterAddGraph(dump=self.dump_graph()))
def _remove_refs(self, keys):
"""Removes all references to all entries in keys. This does not
@@ -358,20 +367,17 @@ class RelationsCache:
:param str identifier: The identifier of the relation to drop.
"""
dropped_key = _make_ref_key(relation)
dropped_key_msg = _make_ref_key_dict(relation)
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
dropped_key_msg = _make_ref_key_msg(relation)
fire_event(DropRelation(dropped=dropped_key_msg))
with self.lock:
if dropped_key not in self.relations:
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
fire_event(DropMissingRelation(relation=dropped_key_msg))
return
consequences = self.relations[dropped_key].collect_consequences()
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
consequence_msgs = [key._asdict() for key in consequences]
fire_event(
CacheAction(
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
)
)
consequence_msgs = [_make_msg_from_ref_key(key) for key in consequences]
fire_event(DropCascade(dropped=dropped_key_msg, consequences=consequence_msgs))
self._remove_refs(consequences)
def _rename_relation(self, old_key, new_relation):
@@ -394,14 +400,12 @@ class RelationsCache:
for cached in self.relations.values():
if cached.is_referenced_by(old_key):
fire_event(
CacheAction(
action="update_reference",
ref_key=_make_ref_key_dict(old_key),
ref_key_2=_make_ref_key_dict(new_key),
ref_key_3=_make_ref_key_dict(cached.key()),
UpdateReference(
old_key=_make_ref_key_msg(old_key),
new_key=_make_ref_key_msg(new_key),
cached_key=_make_ref_key_msg(cached.key()),
)
)
cached.rename_key(old_key, new_key)
self.relations[new_key] = relation
@@ -426,10 +430,27 @@ class RelationsCache:
if new_key in self.relations:
# Tell user when collision caused by model names truncated during
# materialization.
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
match = re.search("__dbt_backup|__dbt_tmp$", new_key.identifier)
if match:
truncated_model_name_prefix = new_key.identifier[: match.start()]
message_addendum = (
"\n\nName collisions can occur when the length of two "
"models' names approach your database's builtin limit. "
"Try restructuring your project such that no two models "
"share the prefix '{}'.".format(truncated_model_name_prefix)
+ " Then, clean your warehouse of any removed models."
)
else:
message_addendum = ""
dbt.exceptions.raise_cache_inconsistent(
"in rename, new key {} already in cache: {}{}".format(
new_key, list(self.relations.keys()), message_addendum
)
)
if old_key not in self.relations:
fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict()))
fire_event(TemporaryRelation(key=_make_msg_from_ref_key(old_key)))
return False
return True
@@ -448,16 +469,13 @@ class RelationsCache:
old_key = _make_ref_key(old)
new_key = _make_ref_key(new)
fire_event(
CacheAction(
action="rename_relation",
ref_key=old_key._asdict(),
ref_key_2=new_key._asdict(),
RenameSchema(
old_key=_make_msg_from_ref_key(old_key), new_key=_make_msg_from_ref_key(new)
)
)
flags = get_flags()
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
flags.LOG_CACHE_EVENTS, lambda: DumpBeforeRenameSchema(dump=self.dump_graph())
)
with self.lock:
@@ -467,8 +485,7 @@ class RelationsCache:
self._setdefault(_CachedRelation(new))
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
flags.LOG_CACHE_EVENTS, lambda: DumpAfterRenameSchema(dump=self.dump_graph())
)
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
@@ -488,7 +505,9 @@ class RelationsCache:
]
if None in results:
raise NoneRelationFoundError()
dbt.exceptions.raise_cache_inconsistent(
"in get_relations, a None relation was found in the cache!"
)
return results
def clear(self):

View File

@@ -9,11 +9,10 @@ from dbt.adapters.base.plugin import AdapterPlugin
from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
from dbt.events.functions import fire_event
from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered
from dbt.exceptions import DbtInternalError, DbtRuntimeError
from dbt.events.types import AdapterImportError, PluginLoadError
from dbt.exceptions import InternalException, RuntimeException
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
from dbt.semver import VersionSpecifier
Adapter = AdapterProtocol
@@ -35,7 +34,7 @@ class AdapterContainer:
names = ", ".join(self.plugins.keys())
message = f"Invalid adapter type {name}! Must be one of {names}"
raise DbtRuntimeError(message)
raise RuntimeException(message)
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
plugin = self.get_plugin_by_name(name)
@@ -61,7 +60,7 @@ class AdapterContainer:
# the user about it via a runtime error
if exc.name == "dbt.adapters." + name:
fire_event(AdapterImportError(exc=str(exc)))
raise DbtRuntimeError(f"Could not find adapter type {name}!")
raise RuntimeException(f"Could not find adapter type {name}!")
# otherwise, the error had to have come from some underlying
# library. Log the stack trace.
@@ -71,7 +70,7 @@ class AdapterContainer:
plugin_type = plugin.adapter.type()
if plugin_type != name:
raise DbtRuntimeError(
raise RuntimeException(
f"Expected to find adapter with type named {name}, got "
f"adapter with type {plugin_type}"
)
@@ -90,13 +89,7 @@ class AdapterContainer:
def register_adapter(self, config: AdapterRequiredConfig) -> None:
adapter_name = config.credentials.type
adapter_type = self.get_adapter_class_by_name(adapter_name)
adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version
adapter_version_specifier = VersionSpecifier.from_version_string(
adapter_version
).to_version_string()
fire_event(
AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier)
)
with self.lock:
if adapter_name in self.adapters:
# this shouldn't really happen...
@@ -139,7 +132,7 @@ class AdapterContainer:
try:
plugin = self.plugins[plugin_name]
except KeyError:
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
raise InternalException(f"No plugin found for {plugin_name}") from None
plugins.append(plugin)
seen.add(plugin_name)
for dep in plugin.dependencies:
@@ -158,16 +151,13 @@ class AdapterContainer:
try:
path = self.packages[package_name]
except KeyError:
raise DbtInternalError(f"No internal package listing found for {package_name}")
raise InternalException(f"No internal package listing found for {package_name}")
paths.append(path)
return paths
def get_adapter_type_names(self, name: Optional[str]) -> List[str]:
return [p.adapter.type() for p in self.get_adapter_plugins(name)]
def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]:
return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore
FACTORY: AdapterContainer = AdapterContainer()
@@ -224,10 +214,6 @@ def get_adapter_type_names(name: Optional[str]) -> List[str]:
return FACTORY.get_adapter_type_names(name)
def get_adapter_constraint_support(name: Optional[str]) -> List[str]:
return FACTORY.get_adapter_constraint_support(name)
@contextmanager
def adapter_management():
reset_adapters()

View File

@@ -8,6 +8,7 @@ from typing import (
Generic,
TypeVar,
Tuple,
Union,
Dict,
Any,
)
@@ -16,7 +17,8 @@ from typing_extensions import Protocol
import agate
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
from dbt.contracts.graph.model_config import BaseConfig
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.relation import Policy, HasQuoting
@@ -46,7 +48,11 @@ class RelationProtocol(Protocol):
...
@classmethod
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
def create_from(
cls: Type[Self],
config: HasQuoting,
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
) -> Self:
...
@@ -59,7 +65,7 @@ class CompilerProtocol(Protocol):
node: ManifestNode,
manifest: Manifest,
extra_context: Optional[Dict[str, Any]] = None,
) -> ManifestNode:
) -> NonSourceCompiledNode:
...

View File

@@ -2,6 +2,7 @@
from collections import namedtuple
from typing import Any, Optional
from dbt.events.proto_types import ReferenceKeyMsg
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
@@ -29,9 +30,11 @@ def _make_ref_key(relation: Any) -> _ReferenceKey:
)
def _make_ref_key_dict(relation: Any):
return {
"database": relation.database,
"schema": relation.schema,
"identifier": relation.identifier,
}
def _make_ref_key_msg(relation: Any):
return _make_msg_from_ref_key(_make_ref_key(relation))
def _make_msg_from_ref_key(ref_key: _ReferenceKey) -> ReferenceKeyMsg:
return ReferenceKeyMsg(
database=ref_key.database, schema=ref_key.schema, identifier=ref_key.identifier
)

View File

@@ -1,25 +0,0 @@
# RelationConfig
This package serves as an initial abstraction for managing the inspection of existing relations and determining
changes on those relations. It arose from the materialized view work and is currently only supporting
materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main
classes in this package.
## RelationConfigBase
This is a very small class that only has a `from_dict()` method and a default `NotImplementedError()`. At some
point this could be replaced by a more robust framework, like `mashumaro` or `pydantic`.
## RelationConfigChange
This class inherits from `RelationConfigBase` ; however, this can be thought of as a separate class. The subclassing
merely points to the idea that both classes would likely inherit from the same class in a `mashumaro` or
`pydantic` implementation. This class is much more restricted in attribution. It should really only
ever need an `action` and a `context`. This can be though of as being analogous to a web request. You need to
know what you're doing (`action`: 'create' = GET, 'drop' = DELETE, etc.) and the information (`context`) needed
to make the change. In our scenarios, the context tends to be an instance of `RelationConfigBase` corresponding
to the new state.
## RelationConfigValidationMixin
This mixin provides optional validation mechanics that can be applied to either `RelationConfigBase` or
`RelationConfigChange` subclasses. A validation rule is a combination of a `validation_check`, something
that should evaluate to `True`, and an optional `validation_error`, an instance of `DbtRuntimeError`
that should be raised in the event the `validation_check` fails. While optional, it's recommended that
the `validation_error` be provided for clearer transparency to the end user.

View File

@@ -1,12 +0,0 @@
from dbt.adapters.relation_configs.config_base import ( # noqa: F401
RelationConfigBase,
RelationResults,
)
from dbt.adapters.relation_configs.config_change import ( # noqa: F401
RelationConfigChangeAction,
RelationConfigChange,
)
from dbt.adapters.relation_configs.config_validation import ( # noqa: F401
RelationConfigValidationMixin,
RelationConfigValidationRule,
)

View File

@@ -1,44 +0,0 @@
from dataclasses import dataclass
from typing import Union, Dict
import agate
from dbt.utils import filter_null_values
"""
This is what relation metadata from the database looks like. It's a dictionary because there will be
multiple grains of data for a single object. For example, a materialized view in Postgres has base level information,
like name. But it also can have multiple indexes, which needs to be a separate query. It might look like this:
{
"base": agate.Row({"table_name": "table_abc", "query": "select * from table_def"})
"indexes": agate.Table("rows": [
agate.Row({"name": "index_a", "columns": ["column_a"], "type": "hash", "unique": False}),
agate.Row({"name": "index_b", "columns": ["time_dim_a"], "type": "btree", "unique": False}),
])
}
"""
RelationResults = Dict[str, Union[agate.Row, agate.Table]]
@dataclass(frozen=True)
class RelationConfigBase:
@classmethod
def from_dict(cls, kwargs_dict) -> "RelationConfigBase":
"""
This assumes the subclass of `RelationConfigBase` is flat, in the sense that no attribute is
itself another subclass of `RelationConfigBase`. If that's not the case, this should be overriden
to manually manage that complexity.
Args:
kwargs_dict: the dict representation of this instance
Returns: the `RelationConfigBase` representation associated with the provided dict
"""
return cls(**filter_null_values(kwargs_dict)) # type: ignore
@classmethod
def _not_implemented_error(cls) -> NotImplementedError:
return NotImplementedError(
"This relation type has not been fully configured for this adapter."
)

View File

@@ -1,23 +0,0 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Hashable
from dbt.adapters.relation_configs.config_base import RelationConfigBase
from dbt.dataclass_schema import StrEnum
class RelationConfigChangeAction(StrEnum):
alter = "alter"
create = "create"
drop = "drop"
@dataclass(frozen=True, eq=True, unsafe_hash=True)
class RelationConfigChange(RelationConfigBase, ABC):
action: RelationConfigChangeAction
context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
@property
@abstractmethod
def requires_full_refresh(self) -> bool:
raise self._not_implemented_error()

View File

@@ -1,57 +0,0 @@
from dataclasses import dataclass
from typing import Set, Optional
from dbt.exceptions import DbtRuntimeError
@dataclass(frozen=True, eq=True, unsafe_hash=True)
class RelationConfigValidationRule:
validation_check: bool
validation_error: Optional[DbtRuntimeError]
@property
def default_error(self):
return DbtRuntimeError(
"There was a validation error in preparing this relation config."
"No additional context was provided by this adapter."
)
@dataclass(frozen=True)
class RelationConfigValidationMixin:
def __post_init__(self):
self.run_validation_rules()
@property
def validation_rules(self) -> Set[RelationConfigValidationRule]:
"""
A set of validation rules to run against the object upon creation.
A validation rule is a combination of a validation check (bool) and an optional error message.
This defaults to no validation rules if not implemented. It's recommended to override this with values,
but that may not always be necessary.
Returns: a set of validation rules
"""
return set()
def run_validation_rules(self):
for validation_rule in self.validation_rules:
try:
assert validation_rule.validation_check
except AssertionError:
if validation_rule.validation_error:
raise validation_rule.validation_error
else:
raise validation_rule.default_error
self.run_child_validation_rules()
def run_child_validation_rules(self):
for attr_value in vars(self).values():
if hasattr(attr_value, "validation_rules"):
attr_value.run_validation_rules()
if isinstance(attr_value, set):
for member in attr_value:
if hasattr(member, "validation_rules"):
member.run_validation_rules()

View File

@@ -1,6 +1,6 @@
import abc
import time
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
from typing import List, Optional, Tuple, Any, Iterable, Dict
import agate
@@ -10,7 +10,6 @@ from dbt.adapters.base import BaseConnectionManager
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
from dbt.events.functions import fire_event
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
from dbt.events.contextvars import get_node_info
from dbt.utils import cast_to_str
@@ -27,7 +26,9 @@ class SQLConnectionManager(BaseConnectionManager):
@abc.abstractmethod
def cancel(self, connection: Connection):
"""Cancel the given connection."""
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
raise dbt.exceptions.NotImplementedException(
"`cancel` is not implemented for this adapter!"
)
def cancel_open(self) -> List[str]:
names = []
@@ -55,13 +56,7 @@ class SQLConnectionManager(BaseConnectionManager):
connection = self.get_thread_connection()
if auto_begin and connection.transaction_open is False:
self.begin()
fire_event(
ConnectionUsed(
conn_type=self.TYPE,
conn_name=cast_to_str(connection.name),
node_info=get_node_info(),
)
)
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=cast_to_str(connection.name)))
with self.exception_handler(sql):
if abridge_sql_log:
@@ -69,11 +64,7 @@ class SQLConnectionManager(BaseConnectionManager):
else:
log_sql = sql
fire_event(
SQLQuery(
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
)
)
fire_event(SQLQuery(conn_name=cast_to_str(connection.name), sql=log_sql))
pre = time.time()
cursor = connection.handle.cursor()
@@ -81,9 +72,7 @@ class SQLConnectionManager(BaseConnectionManager):
fire_event(
SQLQueryStatus(
status=str(self.get_response(cursor)),
elapsed=round((time.time() - pre)),
node_info=get_node_info(),
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
)
)
@@ -93,7 +82,7 @@ class SQLConnectionManager(BaseConnectionManager):
@abc.abstractmethod
def get_response(cls, cursor: Any) -> AdapterResponse:
"""Get the status of the cursor."""
raise dbt.exceptions.NotImplementedError(
raise dbt.exceptions.NotImplementedException(
"`get_response` is not implemented for this adapter!"
)
@@ -117,36 +106,25 @@ class SQLConnectionManager(BaseConnectionManager):
return [dict(zip(column_names, row)) for row in rows]
@classmethod
def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table:
def get_result_from_cursor(cls, cursor: Any) -> agate.Table:
data: List[Any] = []
column_names: List[str] = []
if cursor.description is not None:
column_names = [col[0] for col in cursor.description]
if limit:
rows = cursor.fetchmany(limit)
else:
rows = cursor.fetchall()
rows = cursor.fetchall()
data = cls.process_results(column_names, rows)
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
@classmethod
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
"""Get the string representation of the data type from the type_code."""
# https://peps.python.org/pep-0249/#type-objects
raise dbt.exceptions.NotImplementedError(
"`data_type_code_to_name` is not implemented for this adapter!"
)
def execute(
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
self, sql: str, auto_begin: bool = False, fetch: bool = False
) -> Tuple[AdapterResponse, agate.Table]:
sql = self._add_query_comment(sql)
_, cursor = self.add_query(sql, auto_begin)
response = self.get_response(cursor)
if fetch:
table = self.get_result_from_cursor(cursor, limit)
table = self.get_result_from_cursor(cursor)
else:
table = dbt.clients.agate_helper.empty_table()
return response, table
@@ -157,14 +135,10 @@ class SQLConnectionManager(BaseConnectionManager):
def add_commit_query(self):
return self.add_query("COMMIT", auto_begin=False)
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
sql = self._add_query_comment(sql)
return self.add_query(sql, auto_begin=False)
def begin(self):
connection = self.get_thread_connection()
if connection.transaction_open is True:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
'Tried to begin a new transaction on connection "{}", but '
"it already had one open!".format(connection.name)
)
@@ -177,12 +151,12 @@ class SQLConnectionManager(BaseConnectionManager):
def commit(self):
connection = self.get_thread_connection()
if connection.transaction_open is False:
raise dbt.exceptions.DbtInternalError(
raise dbt.exceptions.InternalException(
'Tried to commit transaction on connection "{}", but '
"it does not have one open!".format(connection.name)
)
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
fire_event(SQLCommit(conn_name=connection.name))
self.add_commit_query()
connection.transaction_open = False

View File

@@ -1,10 +1,11 @@
import agate
from typing import Any, Optional, Tuple, Type, List
from dbt.contracts.connection import Connection, AdapterResponse
from dbt.exceptions import RelationTypeNullError
import dbt.clients.agate_helper
from dbt.contracts.connection import Connection
import dbt.exceptions
from dbt.adapters.base import BaseAdapter, available
from dbt.adapters.cache import _make_ref_key_dict
from dbt.adapters.cache import _make_ref_key_msg
from dbt.adapters.sql import SQLConnectionManager
from dbt.events.functions import fire_event
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
@@ -22,7 +23,6 @@ RENAME_RELATION_MACRO_NAME = "rename_relation"
TRUNCATE_RELATION_MACRO_NAME = "truncate_relation"
DROP_RELATION_MACRO_NAME = "drop_relation"
ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
VALIDATE_SQL_MACRO_NAME = "validate_sql"
class SQLAdapter(BaseAdapter):
@@ -110,7 +110,7 @@ class SQLAdapter(BaseAdapter):
ColTypeChange(
orig_type=target_column.data_type,
new_type=new_type,
table=_make_ref_key_dict(current),
table=_make_ref_key_msg(current),
)
)
@@ -132,7 +132,9 @@ class SQLAdapter(BaseAdapter):
def drop_relation(self, relation):
if relation.type is None:
raise RelationTypeNullError(relation)
dbt.exceptions.raise_compiler_error(
"Tried to drop relation {}, but its type is null.".format(relation)
)
self.cache_dropped(relation)
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
@@ -153,7 +155,7 @@ class SQLAdapter(BaseAdapter):
def create_schema(self, relation: BaseRelation) -> None:
relation = relation.without_identifier()
fire_event(SchemaCreation(relation=_make_ref_key_dict(relation)))
fire_event(SchemaCreation(relation=_make_ref_key_msg(relation)))
kwargs = {
"relation": relation,
}
@@ -164,7 +166,7 @@ class SQLAdapter(BaseAdapter):
def drop_schema(self, relation: BaseRelation) -> None:
relation = relation.without_identifier()
fire_event(SchemaDrop(relation=_make_ref_key_dict(relation)))
fire_event(SchemaDrop(relation=_make_ref_key_msg(relation)))
kwargs = {
"relation": relation,
}
@@ -198,7 +200,6 @@ class SQLAdapter(BaseAdapter):
)
return relations
@classmethod
def quote(self, identifier):
return '"{}"'.format(identifier)
@@ -219,34 +220,6 @@ class SQLAdapter(BaseAdapter):
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
return results[0][0] > 0
def validate_sql(self, sql: str) -> AdapterResponse:
"""Submit the given SQL to the engine for validation, but not execution.
By default we simply prefix the query with the explain keyword and allow the
exceptions thrown by the underlying engine on invalid SQL inputs to bubble up
to the exception handler. For adjustments to the explain statement - such as
for adapters that have different mechanisms for hinting at query validation
or dry-run - callers may be able to override the validate_sql_query macro with
the addition of an <adapter>__validate_sql implementation.
:param sql str: The sql to validate
"""
kwargs = {
"sql": sql,
}
result = self.execute_macro(VALIDATE_SQL_MACRO_NAME, kwargs=kwargs)
# The statement macro always returns an AdapterResponse in the output AttrDict's
# `response` property, and we preserve the full payload in case we want to
# return fetched output for engines where explain plans are emitted as columnar
# results. Any macro override that deviates from this behavior may encounter an
# assertion error in the runtime.
adapter_response = result.response # type: ignore[attr-defined]
assert isinstance(adapter_response, AdapterResponse), (
f"Expected AdapterResponse from validate_sql macro execution, "
f"got {type(adapter_response)}."
)
return adapter_response
# This is for use in the test suite
def run_sql_for_tests(self, sql, fetch, conn):
cursor = conn.handle.cursor()

View File

@@ -1,71 +1 @@
# Adding a new command
## `main.py`
Add the new command with all necessary decorators. Every command will need at minimum:
- a decorator for the click group it belongs to which also names the command
- the postflight decorator (must come before other decorators from the `requires` module for error handling)
- the preflight decorator
```py
@cli.command("my-new-command")
@requires.postflight
@requires.preflight
def my_new_command(ctx, **kwargs):
...
```
## `types.py`
Add an entry to the `Command` enum with your new command. Commands that are sub-commands should have entries
that represent their full command path (e.g. `source freshness -> SOURCE_FRESHNESS`, `docs serve -> DOCS_SERVE`).
## `flags.py`
Add the new command to the dictionary within the `command_args` function.
# Exception Handling
## `requires.py`
### `postflight`
In the postflight decorator, the click command is invoked (i.e. `func(*args, **kwargs)`) and wrapped in a `try/except` block to handle any exceptions thrown.
Any exceptions thrown from `postflight` are wrapped by custom exceptions from the `dbt.cli.exceptions` module (i.e. `ResultExit`, `ExceptionExit`) to instruct click to complete execution with a particular exit code.
Some `dbt-core` handled exceptions have an attribute named `results` which contains results from running nodes (e.g. `FailFastError`). These are wrapped in the `ResultExit` exception to represent runs that have failed in a way that `dbt-core` expects.
If the invocation of the command does not throw any exceptions but does not succeed, `postflight` will still raise the `ResultExit` exception to make use of the exit code.
These exceptions produce an exit code of `1`.
Exceptions wrapped with `ExceptionExit` may be thrown by `dbt-core` intentionally (i.e. an exception that inherits from `dbt.exceptions.Exception`) or unintentionally (i.e. exceptions thrown by the python runtime). In either case these are considered errors that `dbt-core` did not expect and are treated as genuine exceptions.
These exceptions produce an exit code of `2`.
If no exceptions are thrown from invoking the command and the command succeeds, `postflight` will not raise any exceptions.
When no exceptions are raised an exit code of `0` is produced.
## `main.py`
### `dbtRunner`
`dbtRunner` provides a programmatic interface for our click CLI and wraps the invocation of the click commands to handle any exceptions thrown.
`dbtRunner.invoke` should ideally only ever return an instantiated `dbtRunnerResult` which contains the following fields:
- `success`: A boolean representing whether the command invocation was successful
- `result`: The optional result of the command invoked. This attribute can have many types, please see the definition of `dbtRunnerResult` for more information
- `exception`: If an exception was thrown during command invocation it will be saved here, otherwise it will be `None`. Please note that the exceptions held in this attribute are not the exceptions thrown by `preflight` but instead the exceptions that `ResultExit` and `ExceptionExit` wrap
Programmatic exception handling might look like the following:
```python
res = dbtRunner().invoke(["run"])
if not res.success:
...
if type(res.exception) == SomeExceptionType:
...
```
## `dbt/tests/util.py`
### `run_dbt`
In many of our functional and integration tests, we want to be sure that an invocation of `dbt` raises a certain exception.
A common pattern for these assertions:
```python
class TestSomething:
def test_something(self, project):
with pytest.raises(SomeException):
run_dbt(["run"])
```
To allow these tests to assert that exceptions have been thrown, the `run_dbt` function will raise any exceptions it recieves from the invocation of a `dbt` command.
TODO

View File

@@ -1 +0,0 @@
from .main import cli as dbt_cli # noqa

View File

@@ -1,16 +0,0 @@
import click
from typing import Optional
from dbt.cli.main import cli as dbt
def make_context(args, command=dbt) -> Optional[click.Context]:
try:
ctx = command.make_context(command.name, args)
except click.exceptions.Exit:
return None
ctx.invoked_subcommand = ctx.protected_args[0] if ctx.protected_args else None
ctx.obj = {}
return ctx

View File

@@ -1,43 +0,0 @@
from typing import Optional, IO
from click.exceptions import ClickException
from dbt.utils import ExitCodes
class DbtUsageException(Exception):
pass
class DbtInternalException(Exception):
pass
class CliException(ClickException):
"""The base exception class for our implementation of the click CLI.
The exit_code attribute is used by click to determine which exit code to produce
after an invocation."""
def __init__(self, exit_code: ExitCodes) -> None:
self.exit_code = exit_code.value
# the typing of _file is to satisfy the signature of ClickException.show
# overriding this method prevents click from printing any exceptions to stdout
def show(self, _file: Optional[IO] = None) -> None:
pass
class ResultExit(CliException):
"""This class wraps any exception that contains results while invoking dbt, or the
results of an invocation that did not succeed but did not throw any exceptions."""
def __init__(self, result) -> None:
super().__init__(ExitCodes.ModelError)
self.result = result
class ExceptionExit(CliException):
"""This class wraps any exception that does not contain results thrown while invoking dbt."""
def __init__(self, exception: Exception) -> None:
super().__init__(ExitCodes.UnhandledError)
self.exception = exception

View File

@@ -1,404 +1,44 @@
# TODO Move this to /core/dbt/flags.py when we're ready to break things
import os
import sys
from dataclasses import dataclass
from importlib import import_module
from multiprocessing import get_context
from pprint import pformat as pf
from typing import Any, Callable, Dict, List, Optional, Set, Union
from click import Context, get_current_context, Parameter
from click.core import Command as ClickCommand, Group, ParameterSource
from dbt.cli.exceptions import DbtUsageException
from dbt.cli.resolvers import default_log_path, default_project_dir
from dbt.cli.types import Command as CliCommand
from dbt.config.profile import read_user_config
from dbt.contracts.project import UserConfig
from dbt.exceptions import DbtInternalError
from dbt.deprecations import renamed_env_var
from dbt.helper_types import WarnErrorOptions
from click import get_current_context
if os.name != "nt":
# https://bugs.python.org/issue41567
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
FLAGS_DEFAULTS = {
"INDIRECT_SELECTION": "eager",
"TARGET_PATH": None,
# Cli args without user_config or env var option.
"FULL_REFRESH": False,
"STRICT_MODE": False,
"STORE_FAILURES": False,
"INTROSPECT": True,
}
DEPRECATED_PARAMS = {
"deprecated_defer": "defer",
"deprecated_favor_state": "favor_state",
"deprecated_print": "print",
"deprecated_state": "state",
}
WHICH_KEY = "which"
def convert_config(config_name, config_value):
"""Convert the values from config and original set_from_args to the correct type."""
ret = config_value
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
ret = WarnErrorOptions(
include=config_value.get("include", []), exclude=config_value.get("exclude", [])
)
return ret
def args_to_context(args: List[str]) -> Context:
"""Convert a list of args to a click context with proper hierarchy for dbt commands"""
from dbt.cli.main import cli
cli_ctx = cli.make_context(cli.name, args)
# Split args if they're a comma seperated string.
if len(args) == 1 and "," in args[0]:
args = args[0].split(",")
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
# Handle source and docs group.
if isinstance(sub_command, Group):
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
assert isinstance(sub_command, ClickCommand)
sub_command_ctx = sub_command.make_context(sub_command_name, args)
sub_command_ctx.parent = cli_ctx
return sub_command_ctx
@dataclass(frozen=True)
class Flags:
"""Primary configuration artifact for running dbt"""
def __init__(
self, ctx: Optional[Context] = None, user_config: Optional[UserConfig] = None
) -> None:
# Set the default flags.
for key, value in FLAGS_DEFAULTS.items():
object.__setattr__(self, key, value)
def __init__(self, ctx=None) -> None:
if ctx is None:
ctx = get_current_context()
def _get_params_by_source(ctx: Context, source_type: ParameterSource):
"""Generates all params of a given source type."""
yield from [
name for name, source in ctx._parameter_source.items() if source is source_type
]
if ctx.parent:
yield from _get_params_by_source(ctx.parent, source_type)
# Ensure that any params sourced from the commandline are not present more than once.
# Click handles this exclusivity, but only at a per-subcommand level.
seen_params = []
for param in _get_params_by_source(ctx, ParameterSource.COMMANDLINE):
if param in seen_params:
raise DbtUsageException(
f"{param.lower()} was provided both before and after the subcommand, it can only be set either before or after.",
)
seen_params.append(param)
def _assign_params(
ctx: Context,
params_assigned_from_default: set,
deprecated_env_vars: Dict[str, Callable],
):
def assign_params(ctx):
"""Recursively adds all click params to flag object"""
for param_name, param_value in ctx.params.items():
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
# when using frozen dataclasses.
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
# Handle deprecated env vars while still respecting old values
# e.g. DBT_NO_PRINT -> DBT_PRINT if DBT_NO_PRINT is set, it is
# respected over DBT_PRINT or --print.
new_name: Union[str, None] = None
if param_name in DEPRECATED_PARAMS:
# Deprecated env vars can only be set via env var.
# We use the deprecated option in click to serialize the value
# from the env var string.
param_source = ctx.get_parameter_source(param_name)
if param_source == ParameterSource.DEFAULT:
continue
elif param_source != ParameterSource.ENVIRONMENT:
raise DbtUsageException(
"Deprecated parameters can only be set via environment variables",
)
# Rename for clarity.
dep_name = param_name
new_name = DEPRECATED_PARAMS.get(dep_name)
try:
assert isinstance(new_name, str)
except AssertionError:
raise Exception(
f"No deprecated param name match in DEPRECATED_PARAMS from {dep_name} to {new_name}"
)
# Find param objects for their envvar name.
try:
dep_param = [x for x in ctx.command.params if x.name == dep_name][0]
new_param = [x for x in ctx.command.params if x.name == new_name][0]
except IndexError:
raise Exception(
f"No deprecated param name match in context from {dep_name} to {new_name}"
)
# Remove param from defaulted set since the deprecated
# value is not set from default, but from an env var.
if new_name in params_assigned_from_default:
params_assigned_from_default.remove(new_name)
# Add the deprecation warning function to the set.
assert isinstance(dep_param.envvar, str)
assert isinstance(new_param.envvar, str)
deprecated_env_vars[new_name] = renamed_env_var(
old_name=dep_param.envvar,
new_name=new_param.envvar,
)
# Set the flag value.
is_duplicate = hasattr(self, param_name.upper())
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
flag_name = (new_name or param_name).upper()
if (is_duplicate and not is_default) or not is_duplicate:
object.__setattr__(self, flag_name, param_value)
# Track default assigned params.
if is_default:
params_assigned_from_default.add(param_name)
if hasattr(self, param_name):
raise Exception(f"Duplicate flag names found in click command: {param_name}")
object.__setattr__(self, param_name.upper(), param_value)
if ctx.parent:
_assign_params(ctx.parent, params_assigned_from_default, deprecated_env_vars)
assign_params(ctx.parent)
params_assigned_from_default = set() # type: Set[str]
deprecated_env_vars: Dict[str, Callable] = {}
_assign_params(ctx, params_assigned_from_default, deprecated_env_vars)
assign_params(ctx)
# Set deprecated_env_var_warnings to be fired later after events have been init.
object.__setattr__(
self, "deprecated_env_var_warnings", [x for x in deprecated_env_vars.values()]
)
# Get the invoked command flags.
invoked_subcommand_name = (
ctx.invoked_subcommand if hasattr(ctx, "invoked_subcommand") else None
)
if invoked_subcommand_name is not None:
invoked_subcommand = getattr(import_module("dbt.cli.main"), invoked_subcommand_name)
invoked_subcommand.allow_extra_args = True
invoked_subcommand.ignore_unknown_options = True
invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv)
_assign_params(
invoked_subcommand_ctx, params_assigned_from_default, deprecated_env_vars
)
if not user_config:
profiles_dir = getattr(self, "PROFILES_DIR", None)
user_config = read_user_config(profiles_dir) if profiles_dir else None
# Add entire invocation command to flags
object.__setattr__(self, "INVOCATION_COMMAND", "dbt " + " ".join(sys.argv[1:]))
# Overwrite default assignments with user config if available.
if user_config:
param_assigned_from_default_copy = params_assigned_from_default.copy()
for param_assigned_from_default in params_assigned_from_default:
user_config_param_value = getattr(user_config, param_assigned_from_default, None)
if user_config_param_value is not None:
object.__setattr__(
self,
param_assigned_from_default.upper(),
convert_config(param_assigned_from_default, user_config_param_value),
)
param_assigned_from_default_copy.remove(param_assigned_from_default)
params_assigned_from_default = param_assigned_from_default_copy
# Set hard coded flags.
object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name)
# Hard coded flags
object.__setattr__(self, "WHICH", ctx.info_name)
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
# Apply the lead/follow relationship between some parameters.
self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default)
self._override_if_set("LOG_LEVEL", "LOG_LEVEL_FILE", params_assigned_from_default)
self._override_if_set("LOG_FORMAT", "LOG_FORMAT_FILE", params_assigned_from_default)
# Set default LOG_PATH from PROJECT_DIR, if available.
# Starting in v1.5, if `log-path` is set in `dbt_project.yml`, it will raise a deprecation warning,
# with the possibility of removing it in a future release.
if getattr(self, "LOG_PATH", None) is None:
project_dir = getattr(self, "PROJECT_DIR", default_project_dir())
version_check = getattr(self, "VERSION_CHECK", True)
object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check))
# Support console DO NOT TRACK initiative.
if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"):
object.__setattr__(self, "SEND_ANONYMOUS_USAGE_STATS", False)
# Check mutual exclusivity once all flags are set.
self._assert_mutually_exclusive(
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
)
# Support lower cased access for legacy code.
params = set(
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
)
for param in params:
object.__setattr__(self, param.lower(), getattr(self, param))
# Support console DO NOT TRACK initiave
if os.getenv("DO_NOT_TRACK", "").lower() in (1, "t", "true", "y", "yes"):
object.__setattr__(self, "ANONYMOUS_USAGE_STATS", False)
def __str__(self) -> str:
return str(pf(self.__dict__))
def _override_if_set(self, lead: str, follow: str, defaulted: Set[str]) -> None:
"""If the value of the lead parameter was set explicitly, apply the value to follow, unless follow was also set explicitly."""
if lead.lower() not in defaulted and follow.lower() in defaulted:
object.__setattr__(self, follow.upper(), getattr(self, lead.upper(), None))
def _assert_mutually_exclusive(
self, params_assigned_from_default: Set[str], group: List[str]
) -> None:
"""
Ensure no elements from group are simultaneously provided by a user, as inferred from params_assigned_from_default.
Raises click.UsageError if any two elements from group are simultaneously provided by a user.
"""
set_flag = None
for flag in group:
flag_set_by_user = flag.lower() not in params_assigned_from_default
if flag_set_by_user and set_flag:
raise DbtUsageException(
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
)
elif flag_set_by_user:
set_flag = flag
def fire_deprecations(self):
"""Fires events for deprecated env_var usage."""
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
# It is necessary to remove this attr from the class so it does
# not get pickled when written to disk as json.
object.__delattr__(self, "deprecated_env_var_warnings")
@classmethod
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
command_arg_list = command_params(command, args_dict)
ctx = args_to_context(command_arg_list)
flags = cls(ctx=ctx)
flags.fire_deprecations()
return flags
CommandParams = List[str]
def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandParams:
"""Given a command and a dict, returns a list of strings representing
the CLI params for that command. The order of this list is consistent with
which flags are expected at the parent level vs the command level.
e.g. fn("run", {"defer": True, "print": False}) -> ["--no-print", "run", "--defer"]
The result of this function can be passed in to the args_to_context function
to produce a click context to instantiate Flags with.
"""
cmd_args = set(command_args(command))
prnt_args = set(parent_args())
default_args = set([x.lower() for x in FLAGS_DEFAULTS.keys()])
res = command.to_list()
for k, v in args_dict.items():
k = k.lower()
# if a "which" value exists in the args dict, it should match the command provided
if k == WHICH_KEY:
if v != command.value:
raise DbtInternalError(
f"Command '{command.value}' does not match value of which: '{v}'"
)
continue
# param was assigned from defaults and should not be included
if k not in (cmd_args | prnt_args) - default_args:
continue
# if the param is in parent args, it should come before the arg name
# e.g. ["--print", "run"] vs ["run", "--print"]
add_fn = res.append
if k in prnt_args:
def add_fn(x):
res.insert(0, x)
spinal_cased = k.replace("_", "-")
if k == "macro" and command == CliCommand.RUN_OPERATION:
add_fn(v)
elif v in (None, False):
add_fn(f"--no-{spinal_cased}")
elif v is True:
add_fn(f"--{spinal_cased}")
else:
add_fn(f"--{spinal_cased}={v}")
return res
ArgsList = List[str]
def parent_args() -> ArgsList:
"""Return a list representing the params the base click command takes."""
from dbt.cli.main import cli
return format_params(cli.params)
def command_args(command: CliCommand) -> ArgsList:
"""Given a command, return a list of strings representing the params
that command takes. This function only returns params assigned to a
specific command, not those of its parent command.
e.g. fn("run") -> ["defer", "favor_state", "exclude", ...]
"""
import dbt.cli.main as cli
CMD_DICT: Dict[CliCommand, ClickCommand] = {
CliCommand.BUILD: cli.build,
CliCommand.CLEAN: cli.clean,
CliCommand.CLONE: cli.clone,
CliCommand.COMPILE: cli.compile,
CliCommand.DOCS_GENERATE: cli.docs_generate,
CliCommand.DOCS_SERVE: cli.docs_serve,
CliCommand.DEBUG: cli.debug,
CliCommand.DEPS: cli.deps,
CliCommand.INIT: cli.init,
CliCommand.LIST: cli.list,
CliCommand.PARSE: cli.parse,
CliCommand.RUN: cli.run,
CliCommand.RUN_OPERATION: cli.run_operation,
CliCommand.SEED: cli.seed,
CliCommand.SHOW: cli.show,
CliCommand.SNAPSHOT: cli.snapshot,
CliCommand.SOURCE_FRESHNESS: cli.freshness,
CliCommand.TEST: cli.test,
CliCommand.RETRY: cli.retry,
}
click_cmd: Optional[ClickCommand] = CMD_DICT.get(command, None)
if click_cmd is None:
raise DbtInternalError(f"No command found for name '{command.name}'")
return format_params(click_cmd.params)
def format_params(params: List[Parameter]) -> ArgsList:
return [str(x.name) for x in params if not str(x.name).lower().startswith("deprecated_")]

View File

@@ -1,121 +1,22 @@
import inspect # This is temporary for RAT-ing
from copy import copy
from dataclasses import dataclass
from typing import Callable, List, Optional, Union
from pprint import pformat as pf # This is temporary for RAT-ing
import click
from click.exceptions import (
Exit as ClickExit,
BadOptionUsage,
NoSuchOption,
UsageError,
)
from dbt.cli import requires, params as p
from dbt.cli.exceptions import (
DbtInternalException,
DbtUsageException,
)
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.results import (
CatalogArtifact,
RunExecutionResult,
)
from dbt.events.base_types import EventMsg
from dbt.task.build import BuildTask
from dbt.task.clean import CleanTask
from dbt.task.clone import CloneTask
from dbt.task.compile import CompileTask
from dbt.task.debug import DebugTask
from dbt.task.deps import DepsTask
from dbt.task.freshness import FreshnessTask
from dbt.task.generate import GenerateTask
from dbt.task.init import InitTask
from dbt.task.list import ListTask
from dbt.task.retry import RetryTask
from dbt.task.run import RunTask
from dbt.task.run_operation import RunOperationTask
from dbt.task.seed import SeedTask
from dbt.task.serve import ServeTask
from dbt.task.show import ShowTask
from dbt.task.snapshot import SnapshotTask
from dbt.task.test import TestTask
from dbt.adapters.factory import adapter_management
from dbt.cli import params as p
from dbt.cli.flags import Flags
from dbt.profiler import profiler
@dataclass
class dbtRunnerResult:
"""Contains the result of an invocation of the dbtRunner"""
def cli_runner():
# Alias "list" to "ls"
ls = copy(cli.commands["list"])
ls.hidden = True
cli.add_command(ls, "ls")
success: bool
exception: Optional[BaseException] = None
result: Union[
bool, # debug
CatalogArtifact, # docs generate
List[str], # list/ls
Manifest, # parse
None, # clean, deps, init, source
RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation
] = None
# Programmatic invocation
class dbtRunner:
def __init__(
self,
manifest: Optional[Manifest] = None,
callbacks: Optional[List[Callable[[EventMsg], None]]] = None,
):
self.manifest = manifest
if callbacks is None:
callbacks = []
self.callbacks = callbacks
def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult:
try:
dbt_ctx = cli.make_context(cli.name, args)
dbt_ctx.obj = {
"manifest": self.manifest,
"callbacks": self.callbacks,
}
for key, value in kwargs.items():
dbt_ctx.params[key] = value
# Hack to set parameter source to custom string
dbt_ctx.set_parameter_source(key, "kwargs") # type: ignore
result, success = cli.invoke(dbt_ctx)
return dbtRunnerResult(
result=result,
success=success,
)
except requires.ResultExit as e:
return dbtRunnerResult(
result=e.result,
success=False,
)
except requires.ExceptionExit as e:
return dbtRunnerResult(
exception=e.exception,
success=False,
)
except (BadOptionUsage, NoSuchOption, UsageError) as e:
return dbtRunnerResult(
exception=DbtUsageException(e.message),
success=False,
)
except ClickExit as e:
if e.exit_code == 0:
return dbtRunnerResult(success=True)
return dbtRunnerResult(
exception=DbtInternalException(f"unhandled exit code {e.exit_code}"),
success=False,
)
except BaseException as e:
return dbtRunnerResult(
exception=e,
success=False,
)
# Run the cli
cli()
# dbt
@@ -126,86 +27,74 @@ class dbtRunner:
epilog="Specify one of these sub-commands and you can find more help from there.",
)
@click.pass_context
@p.anonymous_usage_stats
@p.cache_selected_only
@p.debug
@p.deprecated_print
@p.enable_legacy_logger
@p.event_buffer_size
@p.fail_fast
@p.log_cache_events
@p.log_format
@p.log_format_file
@p.log_level
@p.log_level_file
@p.log_path
@p.macro_debugging
@p.partial_parse
@p.partial_parse_file_path
@p.populate_cache
@p.print
@p.printer_width
@p.quiet
@p.record_timing_info
@p.send_anonymous_usage_stats
@p.single_threaded
@p.static_parser
@p.use_colors
@p.use_colors_file
@p.use_experimental_parser
@p.version
@p.version_check
@p.warn_error
@p.warn_error_options
@p.write_json
def cli(ctx, **kwargs):
"""An ELT tool for managing your SQL transformations and data models.
For more documentation on these commands, visit: docs.getdbt.com
"""
incomplete_flags = Flags()
# Profiling
if incomplete_flags.RECORD_TIMING_INFO:
ctx.with_resource(profiler(enable=True, outfile=incomplete_flags.RECORD_TIMING_INFO))
# Adapter management
ctx.with_resource(adapter_management())
# Version info
if incomplete_flags.VERSION:
click.echo(f"`version` called\n ctx.params: {pf(ctx.params)}")
return
else:
del ctx.params["version"]
# dbt build
@cli.command("build")
@click.pass_context
@p.defer
@p.deprecated_defer
@p.exclude
@p.fail_fast
@p.favor_state
@p.deprecated_favor_state
@p.full_refresh
@p.indirect_selection
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.resource_type
@p.select
@p.selector
@p.show
@p.state
@p.defer_state
@p.deprecated_state
@p.store_failures
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def build(ctx, **kwargs):
"""Run all seeds, models, snapshots, and tests in DAG order"""
task = BuildTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt clean
@@ -215,19 +104,11 @@ def build(ctx, **kwargs):
@p.profiles_dir
@p.project_dir
@p.target
@p.target_path
@p.vars
@requires.postflight
@requires.preflight
@requires.unset_profile
@requires.project
def clean(ctx, **kwargs):
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt docs
@@ -242,41 +123,23 @@ def docs(ctx, **kwargs):
@click.pass_context
@p.compile_docs
@p.defer
@p.deprecated_defer
@p.exclude
@p.favor_state
@p.deprecated_favor_state
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.empty_catalog
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest(write=False)
def docs_generate(ctx, **kwargs):
"""Generate the documentation website for your project"""
task = GenerateTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt docs serve
@@ -288,184 +151,81 @@ def docs_generate(ctx, **kwargs):
@p.profiles_dir
@p.project_dir
@p.target
@p.target_path
@p.vars
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
def docs_serve(ctx, **kwargs):
"""Serve the documentation website for your project"""
task = ServeTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt compile
@cli.command("compile")
@click.pass_context
@p.defer
@p.deprecated_defer
@p.exclude
@p.favor_state
@p.deprecated_favor_state
@p.full_refresh
@p.show_output_format
@p.indirect_selection
@p.introspect
@p.log_path
@p.models
@p.parse_only
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.inline
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def compile(ctx, **kwargs):
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the
target/ directory."""
task = CompileTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
# dbt show
@cli.command("show")
@click.pass_context
@p.defer
@p.deprecated_defer
@p.exclude
@p.favor_state
@p.deprecated_favor_state
@p.full_refresh
@p.show_output_format
@p.show_limit
@p.indirect_selection
@p.introspect
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.inline
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def show(ctx, **kwargs):
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
results. Does not materialize anything to the warehouse."""
task = ShowTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt debug
@cli.command("debug")
@click.pass_context
@p.debug_connection
@p.config_dir
@p.profile
@p.profiles_dir_exists_false
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
def debug(ctx, **kwargs):
"""Show information on the current dbt environment and check dependencies, then test the database connection. Not to be confused with the --debug option which increases verbosity."""
task = DebugTask(
ctx.obj["flags"],
None,
)
results = task.run()
success = task.interpret_results(results)
return results, success
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt deps
@cli.command("deps")
@click.pass_context
@p.profile
@p.profiles_dir_exists_false
@p.profiles_dir
@p.project_dir
@p.target
@p.vars
@requires.postflight
@requires.preflight
@requires.unset_profile
@requires.project
def deps(ctx, **kwargs):
"""Pull the most recent version of the dependencies listed in packages.yml"""
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt init
@cli.command("init")
@click.pass_context
# for backwards compatibility, accept 'project_name' as an optional positional argument
@click.argument("project_name", required=False)
@p.profile
@p.profiles_dir_exists_false
@p.profiles_dir
@p.project_dir
@p.skip_profile_setup
@p.target
@p.vars
@requires.postflight
@requires.preflight
def init(ctx, **kwargs):
"""Initialize a new dbt project."""
task = InitTask(ctx.obj["flags"], None)
results = task.run()
success = task.interpret_results(results)
return results, success
"""Initialize a new DBT project."""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt list
@@ -480,42 +240,21 @@ def init(ctx, **kwargs):
@p.profiles_dir
@p.project_dir
@p.resource_type
@p.raw_select
@p.selector
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.vars
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def list(ctx, **kwargs):
"""List the resources in your project"""
task = ListTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
# Alias "list" to "ls"
ls = copy(cli.commands["list"])
ls.hidden = True
cli.add_command(ls, "ls")
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt parse
@cli.command("parse")
@click.pass_context
@p.compile_parse
@p.log_path
@p.profile
@p.profiles_dir
@p.project_dir
@@ -524,157 +263,51 @@ cli.add_command(ls, "ls")
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest(write_perf_info=True)
@p.write_manifest
def parse(ctx, **kwargs):
"""Parses the project and provides information on performance"""
# manifest generation and writing happens in @requires.manifest
return ctx.obj["manifest"], True
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt run
@cli.command("run")
@click.pass_context
@p.defer
@p.deprecated_defer
@p.favor_state
@p.deprecated_favor_state
@p.exclude
@p.fail_fast
@p.full_refresh
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def run(ctx, **kwargs):
"""Compile SQL and execute against the current target database."""
task = RunTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
# dbt retry
@cli.command("retry")
@click.pass_context
@p.project_dir
@p.profiles_dir
@p.vars
@p.profile
@p.target
@p.state
@p.threads
@p.fail_fast
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def retry(ctx, **kwargs):
"""Retry the nodes that failed in the previous run."""
task = RetryTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
# dbt clone
@cli.command("clone")
@click.pass_context
@p.defer_state
@p.exclude
@p.full_refresh
@p.profile
@p.profiles_dir
@p.project_dir
@p.resource_type
@p.select
@p.selector
@p.state # required
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
@requires.postflight
def clone(ctx, **kwargs):
"""Create clones of selected nodes based on their location in the manifest provided to --state."""
task = CloneTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt run operation
@cli.command("run-operation")
@click.pass_context
@click.argument("macro")
@p.args
@p.profile
@p.profiles_dir
@p.project_dir
@p.target
@p.target_path
@p.threads
@p.vars
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def run_operation(ctx, **kwargs):
"""Run the named macro with any supplied arguments."""
task = RunOperationTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt seed
@@ -682,75 +315,43 @@ def run_operation(ctx, **kwargs):
@click.pass_context
@p.exclude
@p.full_refresh
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.show
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def seed(ctx, **kwargs):
"""Load data from csv files into your data warehouse."""
task = SeedTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt snapshot
@cli.command("snapshot")
@click.pass_context
@p.defer
@p.deprecated_defer
@p.exclude
@p.favor_state
@p.deprecated_favor_state
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def snapshot(ctx, **kwargs):
"""Execute snapshots defined in your project"""
task = SnapshotTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt source
@@ -764,87 +365,48 @@ def source(ctx, **kwargs):
@source.command("freshness")
@click.pass_context
@p.exclude
@p.models
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.threads
@p.vars
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def freshness(ctx, **kwargs):
"""check the current freshness of the project's sources"""
task = FreshnessTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
# Alias "source freshness" to "snapshot-freshness"
snapshot_freshness = copy(cli.commands["source"].commands["freshness"]) # type: ignore
snapshot_freshness.hidden = True
cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") # type: ignore
"""Snapshots the current freshness of the project's sources"""
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# dbt test
@cli.command("test")
@click.pass_context
@p.defer
@p.deprecated_defer
@p.exclude
@p.fail_fast
@p.favor_state
@p.deprecated_favor_state
@p.indirect_selection
@p.log_path
@p.models
@p.profile
@p.profiles_dir
@p.project_dir
@p.select
@p.selector
@p.state
@p.defer_state
@p.deprecated_state
@p.store_failures
@p.target
@p.target_path
@p.threads
@p.vars
@p.version_check
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def test(ctx, **kwargs):
"""Runs tests on data in deployed models. Run this after `dbt run`"""
task = TestTask(
ctx.obj["flags"],
ctx.obj["runtime_config"],
ctx.obj["manifest"],
)
results = task.run()
success = task.interpret_results(results)
return results, success
flags = Flags()
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
# Support running as a module
if __name__ == "__main__":
cli()
cli_runner()

View File

@@ -1,9 +1,5 @@
from click import ParamType, Choice
from dbt.config.utils import parse_cli_yaml_string
from dbt.exceptions import ValidationError, DbtValidationError, OptionNotYamlDictError
from dbt.helper_types import WarnErrorOptions
from click import ParamType
import yaml
class YAML(ParamType):
@@ -16,26 +12,11 @@ class YAML(ParamType):
if not isinstance(value, str):
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
try:
param_option_name = param.opts[0] if param.opts else param.name
return parse_cli_yaml_string(value, param_option_name.strip("-"))
except (ValidationError, DbtValidationError, OptionNotYamlDictError):
return yaml.load(value, Loader=yaml.Loader)
except yaml.parser.ParserError:
self.fail(f"String '{value}' is not valid YAML", param, ctx)
class WarnErrorOptionsType(YAML):
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
name = "WarnErrorOptionsType"
def convert(self, value, param, ctx):
# this function is being used by param in click
include_exclude = super().convert(value, param, ctx)
return WarnErrorOptions(
include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", [])
)
class Truthy(ParamType):
"""The Click Truthy type. Converts strings into a "truthy" type"""
@@ -50,13 +31,3 @@ class Truthy(ParamType):
return None
else:
return value
class ChoiceTuple(Choice):
name = "CHOICE_TUPLE"
def convert(self, value, param, ctx):
for value_item in value:
super().convert(value_item, param, ctx)
return value

View File

@@ -1,75 +0,0 @@
import click
import inspect
import typing as t
from click import Context
from dbt.cli.option_types import ChoiceTuple
# Implementation from: https://stackoverflow.com/a/48394004
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
class MultiOption(click.Option):
def __init__(self, *args, **kwargs):
self.save_other_options = kwargs.pop("save_other_options", True)
nargs = kwargs.pop("nargs", -1)
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
super(MultiOption, self).__init__(*args, **kwargs)
self._previous_parser_process = None
self._eat_all_parser = None
# validate that multiple=True
multiple = kwargs.pop("multiple", None)
msg = f"MultiOption named `{self.name}` must have multiple=True (rather than {multiple})"
assert multiple, msg
# validate that type=tuple or type=ChoiceTuple
option_type = kwargs.pop("type", None)
msg = f"MultiOption named `{self.name}` must be tuple or ChoiceTuple (rather than {option_type})"
if inspect.isclass(option_type):
assert issubclass(option_type, tuple), msg
else:
assert isinstance(option_type, ChoiceTuple), msg
def add_to_parser(self, parser, ctx):
def parser_process(value, state):
# method to hook to the parser.process
done = False
value = [value]
if self.save_other_options:
# grab everything up to the next option
while state.rargs and not done:
for prefix in self._eat_all_parser.prefixes:
if state.rargs[0].startswith(prefix):
done = True
if not done:
value.append(state.rargs.pop(0))
else:
# grab everything remaining
value += state.rargs
state.rargs[:] = []
value = tuple(value)
# call the actual process
self._previous_parser_process(value, state)
retval = super(MultiOption, self).add_to_parser(parser, ctx)
for name in self.opts:
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
if our_parser:
self._eat_all_parser = our_parser
self._previous_parser_process = our_parser.process
our_parser.process = parser_process
break
return retval
def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
def flatten(data):
if isinstance(data, tuple):
for x in data:
yield from flatten(x)
else:
yield data
# there will be nested tuples to flatten when multiple=True
value = super(MultiOption, self).type_cast_value(ctx, value)
if value:
value = tuple(flatten(value))
return value

Some files were not shown because too many files have changed in this diff Show More