Compare commits

..

1 Commits

Author SHA1 Message Date
Michelle Ark
b29709b4d7 add python-dev-tools to dev-requirements 2023-07-27 13:38:42 -04:00
171 changed files with 1493 additions and 9330 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 1.7.0b1
current_version = 1.7.0a1
parse = (?P<major>[\d]+) # major version number
\.(?P<minor>[\d]+) # minor version number
\.(?P<patch>[\d]+) # patch version number

View File

@@ -1,70 +0,0 @@
## dbt-core 1.7.0-b1 - August 17, 2023
### Breaking Changes
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
### Features
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
### Fixes
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
### Docs
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
### Under the Hood
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
### Dependencies
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
### Contributors
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))

View File

@@ -1,7 +0,0 @@
kind: Breaking Changes
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
the existing RunResultError in their place.
time: 2023-07-25T17:13:59.441682-04:00
custom:
Author: peterallenwebb
Issue: "7963"

View File

@@ -1,6 +0,0 @@
kind: "Dependencies"
body: "Bump mypy from 1.4.0 to 1.4.1"
time: 2023-07-26T20:17:40.00000Z
custom:
Author: dependabot[bot]
PR: 8219

View File

@@ -1,6 +0,0 @@
kind: Dependencies
body: Update pin for click<9
time: 2023-07-27T14:57:03.180458-05:00
custom:
Author: emmyoop
PR: "8232"

View File

@@ -1,6 +0,0 @@
kind: Dependencies
body: Add upper bound to sqlparse pin of <0.5
time: 2023-07-27T14:57:26.40416-05:00
custom:
Author: emmyoop
PR: "8236"

View File

@@ -1,6 +0,0 @@
kind: Dependencies
body: Support dbt-semantic-interfaces 0.2.0
time: 2023-07-28T13:52:27.207241-07:00
custom:
Author: QMalcolm
PR: "8250"

View File

@@ -1,6 +0,0 @@
kind: Docs
body: fixed comment util.py
time: 2023-07-27T17:09:00.089237+09:00
custom:
Author: d-kaneshiro
Issue: None

View File

@@ -1,6 +0,0 @@
kind: Docs
body: Display contract and column constraints on the model page
time: 2023-08-04T13:18:15.627005-05:00
custom:
Author: emmyoop
Issue: "433"

View File

@@ -1,6 +0,0 @@
kind: Docs
body: Display semantic model details in docs
time: 2023-08-07T15:25:48.711627-05:00
custom:
Author: emmyoop
Issue: "431"

View File

@@ -1,7 +0,0 @@
kind: Features
body: Enable re-population of metadata vars post-environment change during programmatic
invocation
time: 2023-07-02T12:28:13.416305-04:00
custom:
Author: gem7318
Issue: "8010"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Added support to configure a delimiter for a seed file, defaults to comma
time: 2023-07-14T20:24:45.513847165+02:00
custom:
Author: ramonvermeulen
Issue: "3990"

View File

@@ -1,6 +0,0 @@
kind: Features
body: 'Allow specification of `create_metric: true` on measures'
time: 2023-08-03T15:18:24.351003-07:00
custom:
Author: QMalcolm
Issue: "8125"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Copy dir during `dbt deps` if symlink fails
time: 2023-04-24T21:07:34.336797+05:30
custom:
Author: anjutiwari
Issue: "7428 8223"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Copy target_schema from config into snapshot node
time: 2023-07-17T16:06:52.957724-04:00
custom:
Author: gshank
Issue: "6745"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
time: 2023-07-27T12:58:30.673803-07:00
custom:
Author: QMalcolm
Issue: "8230"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
etc)
time: 2023-07-28T11:56:20.863718-04:00
custom:
Author: michelleark
Issue: "8166"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix retry not working with log-file-max-bytes
time: 2023-08-02T14:15:56.306027-07:00
custom:
Author: ChenyuLInx
Issue: "8297"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Detect changes to model access, version, or latest_version in state:modified
time: 2023-08-06T22:23:19.166334-04:00
custom:
Author: michelleark
Issue: "8189"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Add connection status into list of statuses for dbt debug
time: 2023-08-10T18:48:59.221344+01:00
custom:
Author: aranke
Issue: "8350"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: fix fqn-selection for external versioned models
time: 2023-08-11T20:41:44.725144-04:00
custom:
Author: michelleark
Issue: "8374"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: 'Fix: DbtInternalError after model that previously ref''d external model is
deleted'
time: 2023-08-11T21:20:08.145554-04:00
custom:
Author: michelleark
Issue: "8375"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix using list command with path selector and project-dir
time: 2023-08-14T14:57:02.02816-04:00
custom:
Author: gshank
Issue: "8385"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Remedy performance regression by only writing run_results.json once.
time: 2023-08-15T10:44:44.836991-04:00
custom:
Author: peterallenwebb
Issue: "8360"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Use python version 3.10.7 in Docker image.
time: 2023-08-17T13:09:15.936349-05:00
custom:
Author: McKnight-42
Issue: "8444"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Bump manifest schema version to v11, freeze manifest v10
time: 2023-08-07T16:45:09.712744-04:00
custom:
Author: gshank
Issue: "8333"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: add tracking for plugin.get_nodes calls
time: 2023-08-09T09:48:34.819445-04:00
custom:
Author: michelleark
Issue: "8344"

View File

@@ -1,7 +0,0 @@
kind: Under the Hood
body: 'add internal flag: --no-partial-parse-file-diff to inform whether to compute
a file diff during partial parsing'
time: 2023-08-11T10:09:02.832241-04:00
custom:
Author: michelleark
Issue: "8363"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Add return values to a number of functions for mypy
time: 2023-08-15T17:03:07.895252-04:00
custom:
Author: gshank
Issue: "8389"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Fix mypy warnings for ManifestLoader.load()
time: 2023-08-17T13:45:48.937252-04:00
custom:
Author: gshank
Issue: "8401"

View File

@@ -1,6 +0,0 @@
kind: Docs
body: Fix newline escapes and improve formatting in docker README
time: 2023-07-28T19:34:38.351042747+02:00
custom:
Author: jamezrin
Issue: "8211"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension`
time: 2023-08-18T09:53:48.154848-07:00
custom:
Author: QMalcolm
Issue: "8453"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Turn breaking changes to contracted models into warnings for unversioned models
time: 2023-08-18T10:38:02.251286-05:00
custom:
Author: emmyoop
Issue: 8384 8282

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: fix ambiguous reference error for tests and versions when model name is duplicated across
packages
time: 2023-08-24T16:10:24.437362-04:00
custom:
Author: michelleark
Issue: "8327 8493"

View File

@@ -1,7 +0,0 @@
kind: Under the Hood
body: 'Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>,
relation agnostic in /macros/relations'
time: 2023-08-21T13:48:01.474731-04:00
custom:
Author: mikealfare
Issue: "8449"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Update typing to meet mypy standards
time: 2023-08-23T19:42:37.130694-04:00
custom:
Author: mikealfare
Issue: "8396"

View File

@@ -1,7 +1,7 @@
name: 🛠️ Implementation
description: This is an implementation ticket intended for use by the maintainers of dbt-core
title: "[<project>] <title>"
labels: ["user docs"]
labels: ["user_docs"]
body:
- type: markdown
attributes:
@@ -11,7 +11,7 @@ body:
label: Housekeeping
description: >
A couple friendly reminders:
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
options:
- label: I am a maintainer of dbt-core
@@ -25,29 +25,11 @@ body:
required: true
- type: textarea
attributes:
label: Acceptance criteria
label: Acceptance critera
description: |
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
validations:
required: true
- type: textarea
attributes:
label: Impact to Other Teams
description: |
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
placeholder: |
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
validations:
required: true
- type: textarea
attributes:
label: Will backports be required?
description: |
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
placeholder: |
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
validations:
required: true
- type: textarea
attributes:
label: Context

View File

@@ -2,8 +2,10 @@
# Checks that a file has been committed under the /.changes directory
# as a new CHANGELOG entry. Cannot check for a specific filename as
# it is dynamically generated by change type and timestamp.
# This workflow runs on pull_request_target because it requires
# secrets to post comments.
# This workflow should not require any secrets since it runs for PRs
# from forked repos.
# By default, secrets are not passed to workflows running from
# a forked repo.
# **why?**
# Ensure code change gets reflected in the CHANGELOG.
@@ -17,7 +19,7 @@
name: Check Changelog Entry
on:
pull_request_target:
pull_request:
types: [opened, reopened, labeled, unlabeled, synchronize]
workflow_dispatch:

View File

@@ -1,37 +0,0 @@
# **what?**
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
# **why?**
# To reduce barriers for keeping docs up to date
# **when?**
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
name: Open issues in docs.getdbt.com repo when a PR is labeled
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
on:
pull_request_target:
types: [labeled, closed]
defaults:
run:
shell: bash
permissions:
issues: write # opens new issues
pull-requests: write # comments on PRs
jobs:
open_issues:
if: contains( github.event.pull_request.labels.*.name, 'user docs') && github.event.pull_request.merged == true
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
with:
issue_repository: "dbt-labs/docs.getdbt.com"
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
issue_labels: "content,improvement,dbt Core"
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
secrets: inherit

View File

@@ -36,7 +36,7 @@ defaults:
# top-level adjustments can be made here
env:
# number of parallel processes to spawn for python integration testing
PYTHON_INTEGRATION_TEST_WORKERS: 5
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
jobs:
code-quality:
@@ -108,9 +108,8 @@ jobs:
- name: Upload Unit Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: unit
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
integration-metadata:
name: integration test metadata generation
@@ -222,26 +221,17 @@ jobs:
- name: Upload Integration Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: integration
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
integration-report:
if: ${{ always() }}
name: Integration Test Suite
name: integration test suite
runs-on: ubuntu-latest
needs: integration
steps:
- name: "Integration Tests Failed"
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
# when this is true the next step won't execute
- name: "[Notification] Integration test suite passes"
run: |
echo "::notice title='Integration test suite failed'"
exit 1
- name: "Integration Tests Passed"
run: |
echo "::notice title='Integration test suite passed'"
echo "::notice title="Integration test suite passes""
build:
name: build packages

View File

@@ -21,7 +21,7 @@ permissions: read-all
# top-level adjustments can be made here
env:
# number of parallel processes to spawn for python testing
PYTHON_INTEGRATION_TEST_WORKERS: 5
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
jobs:
integration-metadata:

View File

@@ -37,7 +37,7 @@ repos:
alias: flake8-check
stages: [manual]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.4.1
rev: v1.4.0
hooks:
- id: mypy
# N.B.: Mypy is... a bit fragile.

View File

@@ -5,78 +5,6 @@
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
## dbt-core 1.7.0-b1 - August 17, 2023
### Breaking Changes
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
### Features
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
### Fixes
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
### Docs
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
### Under the Hood
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
### Dependencies
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
### Contributors
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
## Previous Releases
For information on prior major and minor releases, see their changelogs:

View File

@@ -1,9 +0,0 @@
ignore:
- ".github"
- ".changes"
coverage:
status:
project:
default:
target: auto
threshold: 0.01% # Reduce noise by ignoring rounding errors in coverage drops

View File

@@ -400,7 +400,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
@abc.abstractmethod
def execute(
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
self, sql: str, auto_begin: bool = False, fetch: bool = False
) -> Tuple[AdapterResponse, agate.Table]:
"""Execute the given SQL.
@@ -408,28 +408,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
:param bool auto_begin: If set, and dbt is not currently inside a
transaction, automatically begin one.
:param bool fetch: If set, fetch results.
:param int limit: If set, limits the result set
:return: A tuple of the query status and results (empty if fetch=False).
:rtype: Tuple[AdapterResponse, agate.Table]
"""
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
"""
This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here.
That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager
or defined this method on <Adapter>ConnectionManager before passing it in to <Adapter>Adapter.
See https://github.com/dbt-labs/dbt-core/issues/8396 for more information.
"""
raise dbt.exceptions.NotImplementedError(
"`add_select_query` is not implemented for this adapter!"
)
@classmethod
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
"""Get the string representation of the data type from the type_code."""
# https://peps.python.org/pep-0249/#type-objects
raise dbt.exceptions.NotImplementedError(
"`data_type_code_to_name` is not implemented for this adapter!"
)

View File

@@ -43,7 +43,7 @@ from dbt.exceptions import (
UnexpectedNullError,
)
from dbt.adapters.protocol import AdapterConfig
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
from dbt.clients.jinja import MacroGenerator
from dbt.contracts.graph.manifest import Manifest, MacroManifest
@@ -60,7 +60,7 @@ from dbt.events.types import (
)
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager
from dbt.adapters.base.connections import Connection, AdapterResponse
from dbt.adapters.base.meta import AdapterMeta, available
from dbt.adapters.base.relation import (
ComponentName,
@@ -208,7 +208,7 @@ class BaseAdapter(metaclass=AdapterMeta):
Relation: Type[BaseRelation] = BaseRelation
Column: Type[BaseColumn] = BaseColumn
ConnectionManager: Type[BaseConnectionManager]
ConnectionManager: Type[ConnectionManagerProtocol]
# A set of clobber config fields accepted by this adapter
# for use in materializations
@@ -315,21 +315,14 @@ class BaseAdapter(metaclass=AdapterMeta):
@available.parse(lambda *a, **k: ("", empty_table()))
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
"""
TODO: Can we move this to dbt-bigquery?
Obtain partitions metadata for a BigQuery partitioned table.
"""Obtain partitions metadata for a BigQuery partitioned table.
:param str table: a partitioned table id, in standard SQL format.
:param str table_id: a partitioned table id, in standard SQL format.
:return: a partition metadata tuple, as described in
https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
:rtype: agate.Table
"""
if hasattr(self.connections, "get_partitions_metadata"):
return self.connections.get_partitions_metadata(table=table)
else:
raise NotImplementedError(
"`get_partitions_metadata` is not implemented for this adapter!"
)
###
# Methods that should never be overridden
@@ -460,9 +453,8 @@ class BaseAdapter(metaclass=AdapterMeta):
# it's possible that there were no relations in some schemas. We want
# to insert the schemas we query into the cache's `.schemas` attribute
# so we can check it later
cache_update: Set[Tuple[Optional[str], str]] = set()
cache_update: Set[Tuple[Optional[str], Optional[str]]] = set()
for relation in cache_schemas:
if relation.schema:
cache_update.add((relation.database, relation.schema))
self.cache.update_schemas(cache_update)

View File

@@ -25,9 +25,9 @@ class _QueryComment(local):
- a source_name indicating what set the current thread's query comment
"""
def __init__(self, initial) -> None:
def __init__(self, initial):
self.query_comment: Optional[str] = initial
self.append: bool = False
self.append = False
def add(self, sql: str) -> str:
if not self.query_comment:

View File

@@ -1,6 +1,6 @@
import abc
import time
from typing import List, Optional, Tuple, Any, Iterable, Dict
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
import agate
@@ -131,6 +131,14 @@ class SQLConnectionManager(BaseConnectionManager):
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
@classmethod
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
"""Get the string representation of the data type from the type_code."""
# https://peps.python.org/pep-0249/#type-objects
raise dbt.exceptions.NotImplementedError(
"`data_type_code_to_name` is not implemented for this adapter!"
)
def execute(
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
) -> Tuple[AdapterResponse, agate.Table]:

View File

@@ -61,6 +61,7 @@ def args_to_context(args: List[str]) -> Context:
if len(args) == 1 and "," in args[0]:
args = args[0].split(",")
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
# Handle source and docs group.
if isinstance(sub_command, Group):
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
@@ -318,6 +319,7 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
for k, v in args_dict.items():
k = k.lower()
# if a "which" value exists in the args dict, it should match the command provided
if k == WHICH_KEY:
if v != command.value:
@@ -342,8 +344,7 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
if k == "macro" and command == CliCommand.RUN_OPERATION:
add_fn(v)
# None is a Singleton, False is a Flyweight, only one instance of each.
elif v is None or v is False:
elif v in (None, False):
add_fn(f"--no-{spinal_cased}")
elif v is True:
add_fn(f"--{spinal_cased}")

View File

@@ -141,7 +141,6 @@ class dbtRunner:
@p.macro_debugging
@p.partial_parse
@p.partial_parse_file_path
@p.partial_parse_file_diff
@p.populate_cache
@p.print
@p.printer_width

View File

@@ -257,14 +257,6 @@ partial_parse_file_path = click.option(
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
)
partial_parse_file_diff = click.option(
"--partial-parse-file-diff/--no-partial-parse-file-diff",
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
help="Internal flag for whether to compute a file diff during partial parsing.",
hidden=True,
default=True,
)
populate_cache = click.option(
"--populate-cache/--no-populate-cache",
envvar="DBT_POPULATE_CACHE",
@@ -397,9 +389,9 @@ inline = click.option(
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
models = click.option(*model_decls, **select_attrs)
raw_select = click.option(*select_decls, **select_attrs)
select = click.option(*select_decls, *model_decls, **select_attrs)
selector = click.option(
"--selector",

View File

@@ -9,6 +9,7 @@ from typing import Iterable, List, Dict, Union, Optional, Any
from dbt.exceptions import DbtRuntimeError
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
@@ -134,12 +135,12 @@ def as_matrix(table):
return [r.values() for r in table.rows.values()]
def from_csv(abspath, text_columns, delimiter=","):
def from_csv(abspath, text_columns):
type_tester = build_type_tester(text_columns=text_columns)
with open(abspath, encoding="utf-8") as fp:
if fp.read(1) != BOM:
fp.seek(0)
return agate.Table.from_csv(fp, column_types=type_tester, delimiter=delimiter)
return agate.Table.from_csv(fp, column_types=type_tester)
class _NullMarker:

View File

@@ -191,7 +191,7 @@ NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore
class TemplateCache:
def __init__(self) -> None:
def __init__(self):
self.file_cache: Dict[str, jinja2.Template] = {}
def get_node_template(self, node) -> jinja2.Template:

View File

@@ -40,7 +40,7 @@ class MacroResolver:
self._build_internal_packages_namespace()
self._build_macros_by_name()
def _build_internal_packages_namespace(self) -> None:
def _build_internal_packages_namespace(self):
# Iterate in reverse-order and overwrite: the packages that are first
# in the list are the ones we want to "win".
self.internal_packages_namespace: MacroNamespace = {}
@@ -56,7 +56,7 @@ class MacroResolver:
# root package namespace
# non-internal packages (that aren't local or root)
# dbt internal packages
def _build_macros_by_name(self) -> None:
def _build_macros_by_name(self):
macros_by_name = {}
# all internal packages (already in the right order)
@@ -78,7 +78,7 @@ class MacroResolver:
self,
package_namespaces: Dict[str, MacroNamespace],
macro: Macro,
) -> None:
):
if macro.package_name in package_namespaces:
namespace = package_namespaces[macro.package_name]
else:
@@ -89,7 +89,7 @@ class MacroResolver:
raise DuplicateMacroNameError(macro, macro, macro.package_name)
package_namespaces[macro.package_name][macro.name] = macro
def add_macro(self, macro: Macro) -> None:
def add_macro(self, macro: Macro):
macro_name: str = macro.name
# internal macros (from plugins) will be processed separately from
@@ -103,11 +103,11 @@ class MacroResolver:
if macro.package_name == self.root_project_name:
self.root_package_macros[macro_name] = macro
def add_macros(self) -> None:
def add_macros(self):
for macro in self.macros.values():
self.add_macro(macro)
def get_macro(self, local_package, macro_name) -> Optional[Macro]:
def get_macro(self, local_package, macro_name):
local_package_macros = {}
# If the macro is explicitly prefixed with an internal namespace
# (e.g. 'dbt.some_macro'), look there first
@@ -125,7 +125,7 @@ class MacroResolver:
return self.macros_by_name[macro_name]
return None
def get_macro_id(self, local_package, macro_name) -> Optional[str]:
def get_macro_id(self, local_package, macro_name):
macro = self.get_macro(local_package, macro_name)
if macro is None:
return None

View File

@@ -865,9 +865,8 @@ class ProviderContext(ManifestContext):
assert self.model.root_path
path = os.path.join(self.model.root_path, self.model.original_file_path)
column_types = self.model.config.column_types
delimiter = self.model.config.delimiter
try:
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
table = agate_helper.from_csv(path, text_columns=column_types)
except ValueError as e:
raise LoadAgateTableValueError(e, node=self.model)
table.original_abspath = os.path.abspath(path)

View File

@@ -225,8 +225,6 @@ class SchemaSourceFile(BaseSourceFile):
sources: List[str] = field(default_factory=list)
exposures: List[str] = field(default_factory=list)
metrics: List[str] = field(default_factory=list)
# metrics generated from semantic_model measures
generated_metrics: List[str] = field(default_factory=list)
groups: List[str] = field(default_factory=list)
# node patches contain models, seeds, snapshots, analyses
ndp: List[str] = field(default_factory=list)

View File

@@ -1331,13 +1331,10 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
self.exposures[exposure.unique_id] = exposure
source_file.exposures.append(exposure.unique_id)
def add_metric(self, source_file: SchemaSourceFile, metric: Metric, generated: bool = False):
def add_metric(self, source_file: SchemaSourceFile, metric: Metric):
_check_duplicates(metric, self.metrics)
self.metrics[metric.unique_id] = metric
if not generated:
source_file.metrics.append(metric.unique_id)
else:
source_file.generated_metrics.append(metric.unique_id)
def add_group(self, source_file: SchemaSourceFile, group: Group):
_check_duplicates(group, self.groups)
@@ -1425,7 +1422,7 @@ AnyManifest = Union[Manifest, MacroManifest]
@dataclass
@schema_version("manifest", 11)
@schema_version("manifest", 10)
class WritableManifest(ArtifactMixin):
nodes: Mapping[UniqueID, ManifestNode] = field(
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
@@ -1489,7 +1486,6 @@ class WritableManifest(ArtifactMixin):
("manifest", 7),
("manifest", 8),
("manifest", 9),
("manifest", 10),
]
@classmethod
@@ -1497,7 +1493,7 @@ class WritableManifest(ArtifactMixin):
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
manifest_schema_version = get_manifest_schema_version(data)
if manifest_schema_version <= 10:
if manifest_schema_version <= 9:
data = upgrade_manifest_json(data, manifest_schema_version)
return cls.from_dict(data)

View File

@@ -544,7 +544,6 @@ class NodeConfig(NodeAndTestConfig):
@dataclass
class SeedConfig(NodeConfig):
materialized: str = "seed"
delimiter: str = ","
quote_columns: Optional[bool] = None
@classmethod
@@ -620,8 +619,6 @@ class SnapshotConfig(EmptySnapshotConfig):
@classmethod
def validate(cls, data):
super().validate(data)
# Note: currently you can't just set these keys in schema.yml because this validation
# will fail when parsing the snapshot node.
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
raise ValidationError(
"Snapshots must be configured with a 'strategy', 'unique_key', "
@@ -652,7 +649,6 @@ class SnapshotConfig(EmptySnapshotConfig):
if data.get("materialized") and data.get("materialized") != "snapshot":
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
# Called by "calculate_node_config_dict" in ContextConfigGenerator
def finalize_and_validate(self):
data = self.to_dict(omit_none=True)
self.validate(data)

View File

@@ -29,11 +29,3 @@ class ModelNodeArgs:
unique_id = f"{unique_id}.v{self.version}"
return unique_id
@property
def fqn(self) -> List[str]:
fqn = [self.package_name, self.name]
if self.version:
fqn.append(f"v{self.version}")
return fqn

View File

@@ -44,14 +44,12 @@ from dbt.events.types import (
SeedExceedsLimitSamePath,
SeedExceedsLimitAndPathChanged,
SeedExceedsLimitChecksumChanged,
UnversionedBreakingChange,
)
from dbt.events.contextvars import set_log_contextvars
from dbt.flags import get_flags
from dbt.node_types import ModelLanguage, NodeType, AccessType
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
from dbt_semantic_interfaces.references import (
EntityReference,
MeasureReference,
LinkableElementReference,
SemanticModelReference,
@@ -591,7 +589,7 @@ class ModelNode(CompiledNode):
name=args.name,
package_name=args.package_name,
unique_id=unique_id,
fqn=args.fqn,
fqn=[args.package_name, args.name],
version=args.version,
latest_version=args.latest_version,
relation_name=args.relation_name,
@@ -627,18 +625,6 @@ class ModelNode(CompiledNode):
def materialization_enforces_constraints(self) -> bool:
return self.config.materialized in ["table", "incremental"]
def same_contents(self, old, adapter_type) -> bool:
return super().same_contents(old, adapter_type) and self.same_ref_representation(old)
def same_ref_representation(self, old) -> bool:
return (
# Changing the latest_version may break downstream unpinned refs
self.latest_version == old.latest_version
# Changes to access or deprecation_date may lead to ref-related parsing errors
and self.access == old.access
and self.deprecation_date == old.deprecation_date
)
def build_contract_checksum(self):
# We don't need to construct the checksum if the model does not
# have contract enforced, because it won't be used.
@@ -683,11 +669,11 @@ class ModelNode(CompiledNode):
# These are the categories of breaking changes:
contract_enforced_disabled: bool = False
columns_removed: List[str] = []
column_type_changes: List[Dict[str, str]] = []
enforced_column_constraint_removed: List[
Dict[str, str]
] = [] # column_name, constraint_type
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
column_type_changes: List[Tuple[str, str, str]] = []
enforced_column_constraint_removed: List[Tuple[str, str]] = [] # column, constraint_type
enforced_model_constraint_removed: List[
Tuple[str, List[str]]
] = [] # constraint_type, columns
materialization_changed: List[str] = []
if old.contract.enforced is True and self.contract.enforced is False:
@@ -709,11 +695,11 @@ class ModelNode(CompiledNode):
# Has this column's data type changed?
elif old_value.data_type != self.columns[old_key].data_type:
column_type_changes.append(
{
"column_name": str(old_value.name),
"previous_column_type": str(old_value.data_type),
"current_column_type": str(self.columns[old_key].data_type),
}
(
str(old_value.name),
str(old_value.data_type),
str(self.columns[old_key].data_type),
)
)
# track if there are any column level constraints for the materialization check late
@@ -734,11 +720,7 @@ class ModelNode(CompiledNode):
and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED
):
enforced_column_constraint_removed.append(
{
"column_name": old_key,
"constraint_name": old_constraint.name,
"constraint_type": ConstraintType(old_constraint.type),
}
(old_key, str(old_constraint.type))
)
# Now compare the model level constraints
@@ -749,11 +731,7 @@ class ModelNode(CompiledNode):
and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED
):
enforced_model_constraint_removed.append(
{
"constraint_name": old_constraint.name,
"constraint_type": ConstraintType(old_constraint.type),
"columns": old_constraint.columns,
}
(str(old_constraint.type), old_constraint.columns)
)
# Check for relevant materialization changes.
@@ -767,8 +745,7 @@ class ModelNode(CompiledNode):
# If a column has been added, it will be missing in the old.columns, and present in self.columns
# That's a change (caught by the different checksums), but not a breaking change
# Did we find any changes that we consider breaking? If there's an enforced contract, that's
# a warning unless the model is versioned, then it's an error.
# Did we find any changes that we consider breaking? If so, that's an error
if (
contract_enforced_disabled
or columns_removed
@@ -777,77 +754,20 @@ class ModelNode(CompiledNode):
or enforced_column_constraint_removed
or materialization_changed
):
breaking_changes = []
if contract_enforced_disabled:
breaking_changes.append(
"Contract enforcement was removed: Previously, this model had an enforced contract. It is no longer configured to enforce its contract, and this is a breaking change."
)
if columns_removed:
columns_removed_str = "\n - ".join(columns_removed)
breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}")
if column_type_changes:
column_type_changes_str = "\n - ".join(
[
f"{c['column_name']} ({c['previous_column_type']} -> {c['current_column_type']})"
for c in column_type_changes
]
)
breaking_changes.append(
f"Columns with data_type changes: \n - {column_type_changes_str}"
)
if enforced_column_constraint_removed:
column_constraint_changes_str = "\n - ".join(
[
f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on column {c['column_name']}"
for c in enforced_column_constraint_removed
]
)
breaking_changes.append(
f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}"
)
if enforced_model_constraint_removed:
model_constraint_changes_str = "\n - ".join(
[
f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on columns {c['columns']}"
for c in enforced_model_constraint_removed
]
)
breaking_changes.append(
f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}"
)
if materialization_changed:
materialization_changes_str = (
f"{materialization_changed[0]} -> {materialization_changed[1]}"
)
breaking_changes.append(
f"Materialization changed with enforced constraints: \n - {materialization_changes_str}"
)
if self.version is None:
warn_or_error(
UnversionedBreakingChange(
raise (
ContractBreakingChangeError(
contract_enforced_disabled=contract_enforced_disabled,
columns_removed=columns_removed,
column_type_changes=column_type_changes,
enforced_column_constraint_removed=enforced_column_constraint_removed,
enforced_model_constraint_removed=enforced_model_constraint_removed,
breaking_changes=breaking_changes,
model_name=self.name,
model_file_path=self.original_file_path,
),
node=self,
)
else:
raise (
ContractBreakingChangeError(
breaking_changes=breaking_changes,
materialization_changed=materialization_changed,
node=self,
)
)
# Otherwise, the contract has changed -- same_contract: False
# Otherwise, though we didn't find any *breaking* changes, the contract has still changed -- same_contract: False
else:
return False
@@ -1578,7 +1498,6 @@ class SemanticModel(GraphNode):
refs: List[RefArgs] = field(default_factory=list)
created_at: float = field(default_factory=lambda: time.time())
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
primary_entity: Optional[str] = None
@property
def entity_references(self) -> List[LinkableElementReference]:
@@ -1649,26 +1568,17 @@ class SemanticModel(GraphNode):
measure is not None
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
default_agg_time_dimension = (
self.defaults.agg_time_dimension if self.defaults is not None else None
)
if self.defaults is not None:
default_agg_time_dimesion = self.defaults.agg_time_dimension
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimesion
assert agg_time_dimension_name is not None, (
f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! "
"To fix this either specify a default `agg_time_dimension` for the semantic model or define an "
"`agg_time_dimension` on the measure directly."
f"Aggregation time dimension for measure {measure.name} is not set! This should either be set directly on "
f"the measure specification in the model, or else defaulted to the primary time dimension in the data "
f"source containing the measure."
)
return TimeDimensionReference(element_name=agg_time_dimension_name)
@property
def primary_entity_reference(self) -> Optional[EntityReference]:
return (
EntityReference(element_name=self.primary_entity)
if self.primary_entity is not None
else None
)
# ====================================
# Patches

View File

@@ -220,7 +220,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
versions: Sequence[UnparsedVersion] = field(default_factory=list)
deprecation_date: Optional[datetime.datetime] = None
def __post_init__(self) -> None:
def __post_init__(self):
if self.latest_version:
version_values = [version.v for version in self.versions]
if self.latest_version not in version_values:
@@ -228,7 +228,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
)
seen_versions = set()
seen_versions: set[str] = set()
for version in self.versions:
if str(version.v) in seen_versions:
raise ParsingError(
@@ -689,7 +689,7 @@ class UnparsedEntity(dbtClassMixin):
class UnparsedNonAdditiveDimension(dbtClassMixin):
name: str
window_choice: str # AggregationType enum
window_groupings: List[str] = field(default_factory=list)
window_groupings: List[str]
@dataclass
@@ -701,7 +701,6 @@ class UnparsedMeasure(dbtClassMixin):
agg_params: Optional[MeasureAggregationParameters] = None
non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None
agg_time_dimension: Optional[str] = None
create_metric: bool = False
@dataclass
@@ -729,7 +728,6 @@ class UnparsedSemanticModel(dbtClassMixin):
entities: List[UnparsedEntity] = field(default_factory=list)
measures: List[UnparsedMeasure] = field(default_factory=list)
dimensions: List[UnparsedDimension] = field(default_factory=list)
primary_entity: Optional[str] = None
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:

View File

@@ -258,13 +258,6 @@ class ArtifactMixin(VersionedSchema, Writable, Readable):
class Identifier(ValidatedStringMixin):
"""Our definition of a valid Identifier is the same as what's valid for an unquoted database table name.
That is:
1. It can contain a-z, A-Z, 0-9, and _
1. It cannot start with a number
"""
ValidationRegex = r"^[^\d\W]\w*$"
@classmethod

View File

@@ -51,15 +51,19 @@ class LocalPinnedPackage(LocalPackageMixin, PinnedPackage):
src_path = self.resolve_path(project)
dest_path = self.get_installation_path(project, renderer)
can_create_symlink = system.supports_symlinks()
if system.path_exists(dest_path):
if not system.path_is_symlink(dest_path):
system.rmdir(dest_path)
else:
system.remove_file(dest_path)
try:
if can_create_symlink:
fire_event(DepsCreatingLocalSymlink())
system.make_symlink(src_path, dest_path)
except OSError:
else:
fire_event(DepsSymlinkNotAvailable())
shutil.copytree(src_path, dest_path)

View File

@@ -8,7 +8,7 @@ import logging
from logging.handlers import RotatingFileHandler
import threading
import traceback
from typing import Any, Callable, List, Optional, TextIO, Protocol
from typing import Any, Callable, List, Optional, TextIO
from uuid import uuid4
from dbt.events.format import timestamp_to_datetime_string
@@ -206,7 +206,7 @@ class EventManager:
for callback in self.callbacks:
callback(msg)
def add_logger(self, config: LoggerConfig) -> None:
def add_logger(self, config: LoggerConfig):
logger = (
_JsonLogger(self, config)
if config.line_format == LineFormat.Json
@@ -218,25 +218,3 @@ class EventManager:
def flush(self):
for logger in self.loggers:
logger.flush()
class IEventManager(Protocol):
callbacks: List[Callable[[EventMsg], None]]
invocation_id: str
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
...
def add_logger(self, config: LoggerConfig) -> None:
...
class TestEventManager(IEventManager):
def __init__(self):
self.event_history = []
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
self.event_history.append((e, level))
def add_logger(self, config: LoggerConfig) -> None:
raise NotImplementedError()

View File

@@ -1,8 +1,8 @@
from dbt.constants import METADATA_ENV_PREFIX
from dbt.events.base_types import BaseEvent, EventLevel, EventMsg
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter, IEventManager
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter
from dbt.events.helpers import env_secrets, scrub_secrets
from dbt.events.types import Note
from dbt.events.types import Formatting, Note
from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER
from dbt.logger import GLOBAL_LOGGER, make_log_dir_if_missing
from functools import partial
@@ -115,7 +115,9 @@ def _stdout_filter(
line_format: LineFormat,
msg: EventMsg,
) -> bool:
return msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events
return (msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events) and not (
line_format == LineFormat.Json and type(msg.data) == Formatting
)
def _get_logfile_config(
@@ -138,8 +140,10 @@ def _get_logfile_config(
def _logfile_filter(log_cache_events: bool, line_format: LineFormat, msg: EventMsg) -> bool:
return msg.info.code not in nofile_codes and not (
msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events
return (
msg.info.code not in nofile_codes
and not (msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events)
and not (line_format == LineFormat.Json and type(msg.data) == Formatting)
)
@@ -178,7 +182,7 @@ def cleanup_event_logger():
# Since dbt-rpc does not do its own log setup, and since some events can
# currently fire before logs can be configured by setup_event_logger(), we
# create a default configuration with default settings and no file output.
EVENT_MANAGER: IEventManager = EventManager()
EVENT_MANAGER: EventManager = EventManager()
EVENT_MANAGER.add_logger(
_get_logbook_log_config(False, True, False, False) # type: ignore
if ENABLE_LEGACY_LOGGER
@@ -269,7 +273,7 @@ def fire_event(e: BaseEvent, level: Optional[EventLevel] = None) -> None:
def get_metadata_vars() -> Dict[str, str]:
global metadata_vars
if not metadata_vars:
if metadata_vars is None:
metadata_vars = {
k[len(METADATA_ENV_PREFIX) :]: v
for k, v in os.environ.items()
@@ -291,8 +295,3 @@ def set_invocation_id() -> None:
# This is primarily for setting the invocation_id for separate
# commands in the dbt servers. It shouldn't be necessary for the CLI.
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
def ctx_set_event_manager(event_manager: IEventManager):
global EVENT_MANAGER
EVENT_MANAGER = event_manager

View File

@@ -66,27 +66,6 @@ message ReferenceKeyMsg {
string identifier = 3;
}
//ColumnType
message ColumnType {
string column_name = 1;
string previous_column_type = 2;
string current_column_type = 3;
}
// ColumnConstraint
message ColumnConstraint {
string column_name = 1;
string constraint_name = 2;
string constraint_type = 3;
}
// ModelConstraint
message ModelConstraint {
string constraint_name = 1;
string constraint_type = 2;
repeated string columns = 3;
}
// GenericMessage, used for deserializing only
message GenericMessage {
EventInfo info = 1;
@@ -1269,24 +1248,6 @@ message SemanticValidationFailureMsg {
SemanticValidationFailure data = 2;
}
// I071
message UnversionedBreakingChange {
repeated string breaking_changes = 1;
string model_name = 2;
string model_file_path = 3;
bool contract_enforced_disabled = 4;
repeated string columns_removed = 5;
repeated ColumnType column_type_changes = 6;
repeated ColumnConstraint enforced_column_constraint_removed = 7;
repeated ModelConstraint enforced_model_constraint_removed = 8;
repeated string materialization_changed = 9;
}
message UnversionedBreakingChangeMsg {
EventInfo info = 1;
UnversionedBreakingChange data = 2;
}
// M - Deps generation
@@ -2284,7 +2245,25 @@ message CheckNodeTestFailureMsg {
CheckNodeTestFailure data = 2;
}
// Skipped Z028, Z029
// Z028
message FirstRunResultError {
string msg = 1;
}
message FirstRunResultErrorMsg {
EventInfo info = 1;
FirstRunResultError data = 2;
}
// Z029
message AfterFirstRunResultError {
string msg = 1;
}
message AfterFirstRunResultErrorMsg {
EventInfo info = 1;
AfterFirstRunResultError data = 2;
}
// Z030
message EndOfRunSummary {

View File

@@ -1233,20 +1233,6 @@ class SemanticValidationFailure(WarnLevel):
return self.msg
class UnversionedBreakingChange(WarnLevel):
def code(self):
return "I071"
def message(self) -> str:
reasons = "\n - ".join(self.breaking_changes)
return (
f"Breaking change to contracted, unversioned model {self.model_name} ({self.model_file_path})"
"\nWhile comparing to previous project state, dbt detected a breaking change to an unversioned model."
f"\n - {reasons}\n"
)
# =======================================================
# M - Deps generation
# =======================================================
@@ -2185,7 +2171,25 @@ class CheckNodeTestFailure(InfoLevel):
return f" See test failures:\n {border}\n {msg}\n {border}"
# Skipped Z028, Z029
# FirstRunResultError and AfterFirstRunResultError are just splitting the message from the result
# object into multiple log lines
# TODO: is this reallly needed? See printer.py
class FirstRunResultError(ErrorLevel):
def code(self):
return "Z028"
def message(self) -> str:
return yellow(self.msg)
class AfterFirstRunResultError(ErrorLevel):
def code(self):
return "Z029"
def message(self) -> str:
return self.msg
class EndOfRunSummary(InfoLevel):

File diff suppressed because one or more lines are too long

View File

@@ -3,7 +3,7 @@ import json
import re
import io
import agate
from typing import Any, Dict, List, Mapping, Optional, Union
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from dbt.dataclass_schema import ValidationError
from dbt.events.helpers import env_secrets, scrub_secrets
@@ -213,22 +213,67 @@ class ContractBreakingChangeError(DbtRuntimeError):
def __init__(
self,
breaking_changes: List[str],
contract_enforced_disabled: bool,
columns_removed: List[str],
column_type_changes: List[Tuple[str, str, str]],
enforced_column_constraint_removed: List[Tuple[str, str]],
enforced_model_constraint_removed: List[Tuple[str, List[str]]],
materialization_changed: List[str],
node=None,
):
self.breaking_changes = breaking_changes
self.contract_enforced_disabled = contract_enforced_disabled
self.columns_removed = columns_removed
self.column_type_changes = column_type_changes
self.enforced_column_constraint_removed = enforced_column_constraint_removed
self.enforced_model_constraint_removed = enforced_model_constraint_removed
self.materialization_changed = materialization_changed
super().__init__(self.message(), node)
@property
def type(self):
return "Breaking change to contract"
return "Breaking Change to Contract"
def message(self):
reasons = "\n - ".join(self.breaking_changes)
breaking_changes = []
if self.contract_enforced_disabled:
breaking_changes.append("The contract's enforcement has been disabled.")
if self.columns_removed:
columns_removed_str = "\n - ".join(self.columns_removed)
breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}")
if self.column_type_changes:
column_type_changes_str = "\n - ".join(
[f"{c[0]} ({c[1]} -> {c[2]})" for c in self.column_type_changes]
)
breaking_changes.append(
f"Columns with data_type changes: \n - {column_type_changes_str}"
)
if self.enforced_column_constraint_removed:
column_constraint_changes_str = "\n - ".join(
[f"{c[0]} ({c[1]})" for c in self.enforced_column_constraint_removed]
)
breaking_changes.append(
f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}"
)
if self.enforced_model_constraint_removed:
model_constraint_changes_str = "\n - ".join(
[f"{c[0]} -> {c[1]}" for c in self.enforced_model_constraint_removed]
)
breaking_changes.append(
f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}"
)
if self.materialization_changed:
materialization_changes_str = "\n - ".join(
f"{self.materialization_changed[0]} -> {self.materialization_changed[1]}"
)
breaking_changes.append(
f"Materialization changed with enforced constraints: \n - {materialization_changes_str}"
)
reasons = "\n\n".join(breaking_changes)
return (
"While comparing to previous project state, dbt detected a breaking change to an enforced contract."
f"\n - {reasons}\n"
f"\n\n{reasons}\n\n"
"Consider making an additive (non-breaking) change instead, if possible.\n"
"Otherwise, create a new model version: https://docs.getdbt.com/docs/collaborate/govern/model-versions"
)
@@ -441,7 +486,7 @@ class InvalidConnectionError(DbtRuntimeError):
self.thread_id = thread_id
self.known = known
super().__init__(
msg=f"connection never acquired for thread {self.thread_id}, have {self.known}"
msg="connection never acquired for thread {self.thread_id}, have {self.known}"
)

View File

@@ -103,7 +103,7 @@ SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric]
class SelectorMethod(metaclass=abc.ABCMeta):
def __init__(
self, manifest: Manifest, previous_state: Optional[PreviousState], arguments: List[str]
) -> None:
):
self.manifest: Manifest = manifest
self.previous_state = previous_state
self.arguments: List[str] = arguments
@@ -467,7 +467,7 @@ class TestTypeSelectorMethod(SelectorMethod):
class StateSelectorMethod(SelectorMethod):
def __init__(self, *args, **kwargs) -> None:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.modified_macros: Optional[List[str]] = None

View File

@@ -0,0 +1,44 @@
{% macro drop_relation(relation) -%}
{{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}
{% endmacro %}
{% macro default__drop_relation(relation) -%}
{% call statement('drop_relation', auto_begin=False) -%}
{%- if relation.is_table -%}
{{- drop_table(relation) -}}
{%- elif relation.is_view -%}
{{- drop_view(relation) -}}
{%- elif relation.is_materialized_view -%}
{{- drop_materialized_view(relation) -}}
{%- else -%}
drop {{ relation.type }} if exists {{ relation }} cascade
{%- endif -%}
{%- endcall %}
{% endmacro %}
{% macro drop_table(relation) -%}
{{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}
{%- endmacro %}
{% macro default__drop_table(relation) -%}
drop table if exists {{ relation }} cascade
{%- endmacro %}
{% macro drop_view(relation) -%}
{{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}
{%- endmacro %}
{% macro default__drop_view(relation) -%}
drop view if exists {{ relation }} cascade
{%- endmacro %}
{% macro drop_materialized_view(relation) -%}
{{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}
{%- endmacro %}
{% macro default__drop_materialized_view(relation) -%}
drop materialized view if exists {{ relation }} cascade
{%- endmacro %}

View File

@@ -43,6 +43,18 @@
{% endmacro %}
{% macro rename_relation(from_relation, to_relation) -%}
{{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}
{% endmacro %}
{% macro default__rename_relation(from_relation, to_relation) -%}
{% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}
{% call statement('rename_relation') -%}
alter table {{ from_relation }} rename to {{ target_name }}
{%- endcall %}
{% endmacro %}
{% macro get_or_create_relation(database, schema, identifier, type) -%}
{{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}
{% endmacro %}

View File

@@ -1,13 +0,0 @@
{# /*
This was already implemented. Instead of creating a new macro that aligns with the standard,
this was reused and the default was maintained. This gets called by `drop_relation`, which
actually executes the drop, and `get_drop_sql`, which returns the template.
*/ #}
{% macro drop_materialized_view(relation) -%}
{{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}
{%- endmacro %}
{% macro default__drop_materialized_view(relation) -%}
drop materialized view if exists {{ relation }} cascade
{%- endmacro %}

View File

@@ -1,13 +0,0 @@
{# /*
This was already implemented. Instead of creating a new macro that aligns with the standard,
this was reused and the default was maintained. This gets called by `drop_relation`, which
actually executes the drop, and `get_drop_sql`, which returns the template.
*/ #}
{% macro drop_table(relation) -%}
{{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}
{%- endmacro %}
{% macro default__drop_table(relation) -%}
drop table if exists {{ relation }} cascade
{%- endmacro %}

View File

@@ -1,13 +0,0 @@
{# /*
This was already implemented. Instead of creating a new macro that aligns with the standard,
this was reused and the default was maintained. This gets called by `drop_relation`, which
actually executes the drop, and `get_drop_sql`, which returns the template.
*/ #}
{% macro drop_view(relation) -%}
{{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}
{%- endmacro %}
{% macro default__drop_view(relation) -%}
drop view if exists {{ relation }} cascade
{%- endmacro %}

View File

@@ -5,7 +5,5 @@
{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}
{{ exceptions.raise_compiler_error(
"`get_create_materialized_view_as_sql` has not been implemented for this adapter."
) }}
{{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }}
{% endmacro %}

View File

@@ -5,5 +5,5 @@
{% macro default__refresh_materialized_view(relation) %}
{{ exceptions.raise_compiler_error("`refresh_materialized_view` has not been implemented for this adapter.") }}
{{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }}
{% endmacro %}

View File

@@ -1,8 +1,3 @@
{# /*
This only exists for backwards compatibility for 1.6.0. In later versions, the general `get_replace_sql`
macro is called as replace is inherently not limited to a single relation (it takes in two relations).
*/ #}
{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}
{{- log('Applying REPLACE to: ' ~ relation) -}}
{{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}}

View File

@@ -1,17 +0,0 @@
{% macro drop_relation(relation) -%}
{{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}
{% endmacro %}
{% macro default__drop_relation(relation) -%}
{% call statement('drop_relation', auto_begin=False) -%}
{%- if relation.is_table -%}
{{- drop_table(relation) -}}
{%- elif relation.is_view -%}
{{- drop_view(relation) -}}
{%- elif relation.is_materialized_view -%}
{{- drop_materialized_view(relation) -}}
{%- else -%}
drop {{ relation.type }} if exists {{ relation }} cascade
{%- endif -%}
{%- endcall %}
{% endmacro %}

View File

@@ -1,10 +0,0 @@
{% macro rename_relation(from_relation, to_relation) -%}
{{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}
{% endmacro %}
{% macro default__rename_relation(from_relation, to_relation) -%}
{% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}
{% call statement('rename_relation') -%}
alter table {{ from_relation }} rename to {{ target_name }}
{%- endcall %}
{% endmacro %}

File diff suppressed because one or more lines are too long

View File

@@ -4,8 +4,8 @@ from dbt.dataclass_schema import StrEnum
class AccessType(StrEnum):
Private = "private"
Protected = "protected"
Private = "private"
Public = "public"
@classmethod

View File

@@ -102,7 +102,8 @@ class RelationUpdate:
self.package_updaters = package_updaters
self.component = component
def __call__(self, parsed_node: Any, override: Optional[str]) -> None:
def __call__(self, parsed_node: Any, config_dict: Dict[str, Any]) -> None:
override = config_dict.get(self.component)
if parsed_node.package_name in self.package_updaters:
new_value = self.package_updaters[parsed_node.package_name](override, parsed_node)
else:
@@ -279,19 +280,9 @@ class ConfiguredParser(
def update_parsed_node_relation_names(
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
) -> None:
# These call the RelationUpdate callable to go through generate_name macros
self._update_node_database(parsed_node, config_dict.get("database"))
self._update_node_schema(parsed_node, config_dict.get("schema"))
self._update_node_alias(parsed_node, config_dict.get("alias"))
# Snapshot nodes use special "target_database" and "target_schema" fields for some reason
if parsed_node.resource_type == NodeType.Snapshot:
if "target_database" in config_dict and config_dict["target_database"]:
parsed_node.database = config_dict["target_database"]
if "target_schema" in config_dict and config_dict["target_schema"]:
parsed_node.schema = config_dict["target_schema"]
self._update_node_database(parsed_node, config_dict)
self._update_node_schema(parsed_node, config_dict)
self._update_node_alias(parsed_node, config_dict)
self._update_node_relation_name(parsed_node)
def update_parsed_node_config(
@@ -358,7 +349,7 @@ class ConfiguredParser(
# do this once before we parse the node database/schema/alias, so
# parsed_node.config is what it would be if they did nothing
self.update_parsed_node_config_dict(parsed_node, config_dict)
# This updates the node database/schema/alias/relation_name
# This updates the node database/schema/alias
self.update_parsed_node_relation_names(parsed_node, config_dict)
# tests don't have hooks

View File

@@ -177,10 +177,10 @@ class GenericTestBlock(TestBlock[Testable], Generic[Testable]):
class ParserRef:
"""A helper object to hold parse-time references."""
def __init__(self) -> None:
def __init__(self):
self.column_info: Dict[str, ColumnInfo] = {}
def _add(self, column: HasColumnProps) -> None:
def _add(self, column: HasColumnProps):
tags: List[str] = []
tags.extend(getattr(column, "tags", ()))
quote: Optional[bool]

View File

@@ -79,7 +79,6 @@ from dbt.parser.read_files import (
load_source_file,
FileDiff,
ReadFilesFromDiff,
ReadFiles,
)
from dbt.parser.partial import PartialParsing, special_override_macros
from dbt.contracts.graph.manifest import (
@@ -123,7 +122,7 @@ from dbt.parser.sources import SourcePatcher
from dbt.version import __version__
from dbt.dataclass_schema import StrEnum, dbtClassMixin
from dbt import plugins
from dbt.plugins import get_plugin_manager
from dbt_semantic_interfaces.enum_extension import assert_values_exhausted
from dbt_semantic_interfaces.type_enums import MetricType
@@ -260,7 +259,7 @@ class ManifestLoader:
# We need to know if we're actually partially parsing. It could
# have been enabled, but not happening because of some issue.
self.partially_parsing = False
self.partial_parser: Optional[PartialParsing] = None
self.partial_parser = None
# This is a saved manifest from a previous run that's used for partial parsing
self.saved_manifest: Optional[Manifest] = self.read_manifest_for_partial_parse()
@@ -285,17 +284,8 @@ class ManifestLoader:
adapter.clear_macro_manifest()
macro_hook = adapter.connections.set_query_header
flags = get_flags()
if not flags.PARTIAL_PARSE_FILE_DIFF:
file_diff = FileDiff.from_dict(
{
"deleted": [],
"changed": [],
"added": [],
}
)
# Hack to test file_diffs
elif os.environ.get("DBT_PP_FILE_DIFF_TEST"):
if os.environ.get("DBT_PP_FILE_DIFF_TEST"):
file_diff_path = "file_diff.json"
if path_exists(file_diff_path):
file_diff_dct = read_json(file_diff_path)
@@ -332,7 +322,7 @@ class ManifestLoader:
return manifest
# This is where the main action happens
def load(self) -> Manifest:
def load(self):
start_read_files = time.perf_counter()
# This updates the "files" dictionary in self.manifest, and creates
@@ -341,7 +331,6 @@ class ManifestLoader:
# of parsers to lists of file strings. The file strings are
# used to get the SourceFiles from the manifest files.
saved_files = self.saved_manifest.files if self.saved_manifest else {}
file_reader: Optional[ReadFiles] = None
if self.file_diff:
# We're getting files from a file diff
file_reader = ReadFilesFromDiff(
@@ -405,7 +394,7 @@ class ManifestLoader:
}
# get file info for local logs
parse_file_type: str = ""
parse_file_type = None
file_id = self.partial_parser.processing_file
if file_id:
source_file = None
@@ -486,7 +475,7 @@ class ManifestLoader:
self.manifest.rebuild_disabled_lookup()
# Load yaml files
parser_types = [SchemaParser] # type: ignore
parser_types = [SchemaParser]
for project in self.all_projects.values():
if project.project_name not in project_parser_files:
continue
@@ -514,7 +503,6 @@ class ManifestLoader:
self.manifest.selectors = self.root_project.manifest_selectors
# inject any available external nodes
self.manifest.build_parent_and_child_maps()
external_nodes_modified = self.inject_external_nodes()
if external_nodes_modified:
self.manifest.rebuild_ref_lookup()
@@ -763,7 +751,7 @@ class ManifestLoader:
manifest_nodes_modified = True
# Inject any newly-available external nodes
pm = plugins.get_plugin_manager(self.root_project.project_name)
pm = get_plugin_manager(self.root_project.project_name)
plugin_model_nodes = pm.get_nodes().models
for node_arg in plugin_model_nodes.values():
node = ModelNode.from_args(node_arg)
@@ -1064,7 +1052,7 @@ class ManifestLoader:
# Takes references in 'refs' array of nodes and exposures, finds the target
# node, and updates 'depends_on.nodes' with the unique id
def process_refs(self, current_project: str, dependencies: Optional[Mapping[str, Project]]):
def process_refs(self, current_project: str, dependencies: Optional[Dict[str, Project]]):
for node in self.manifest.nodes.values():
if node.created_at < self.started_at:
continue

Some files were not shown because too many files have changed in this diff Show More