mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 14:01:28 +00:00
Compare commits
1 Commits
gha-postgr
...
update-ind
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1552eccb05 |
@@ -1,6 +0,0 @@
|
|||||||
kind: Breaking Changes
|
|
||||||
body: Fix changing the current working directory when using dpt deps, clean and init.
|
|
||||||
time: 2023-12-06T19:24:42.575372+09:00
|
|
||||||
custom:
|
|
||||||
Author: rariyama
|
|
||||||
Issue: "8997"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Dependencies
|
|
||||||
body: Increase supported version range for dbt-semantic-interfaces. Needed to support
|
|
||||||
custom calendar features.
|
|
||||||
time: 2024-08-20T13:19:09.015225-07:00
|
|
||||||
custom:
|
|
||||||
Author: courtneyholcomb
|
|
||||||
Issue: "9265"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Enable display of unit tests
|
|
||||||
time: 2024-03-11T14:03:44.490834-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "501"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Unit tests not rendering
|
|
||||||
time: 2024-05-01T02:10:50.987412+02:00
|
|
||||||
custom:
|
|
||||||
Author: aranke
|
|
||||||
Issue: "506"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Add support for Saved Query node
|
|
||||||
time: 2024-05-16T22:30:36.206492-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "486"
|
|
||||||
6
.changes/unreleased/Docs-20240522-174713.yaml
Normal file
6
.changes/unreleased/Docs-20240522-174713.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Fix rendering docs with saved queries
|
||||||
|
time: 2024-05-22T17:47:13.414938-04:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx michelleark
|
||||||
|
Issue: "10168"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Fix npm security vulnerabilities as of June 2024
|
|
||||||
time: 2024-06-13T15:10:48.301989+01:00
|
|
||||||
custom:
|
|
||||||
Author: aranke
|
|
||||||
Issue: "513"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Maximally parallelize dbt clone
|
|
||||||
in clone command"
|
|
||||||
time: 2024-05-22T00:03:09.765977-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "7914"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Add --host flag to dbt docs serve, defaulting to '127.0.0.1'
|
|
||||||
time: 2024-05-27T12:44:05.040843-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10229"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Update data_test to accept arbitrary config options
|
|
||||||
time: 2024-05-31T15:08:16.431966-05:00
|
|
||||||
custom:
|
|
||||||
Author: McKnight-42
|
|
||||||
Issue: "10197"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: add pre_model and post_model hook calls to data and unit tests to be able to provide extra config options
|
|
||||||
time: 2024-06-06T11:23:34.758675-05:00
|
|
||||||
custom:
|
|
||||||
Author: McKnight-42
|
|
||||||
Issue: "10198"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: add --empty value to jinja context as flags.EMPTY
|
|
||||||
time: 2024-06-17T10:39:48.275801-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10317"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Warning message for snapshot timestamp data types
|
|
||||||
time: 2024-06-21T14:16:35.717637-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10234"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Support cumulative_type_params & sub-daily granularities in semantic manifest.
|
|
||||||
time: 2024-06-25T09:51:07.983248-07:00
|
|
||||||
custom:
|
|
||||||
Author: courtneyholcomb
|
|
||||||
Issue: "10360"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Add time_granularity to metric spec.
|
|
||||||
time: 2024-06-27T16:29:53.500917-07:00
|
|
||||||
custom:
|
|
||||||
Author: courtneyholcomb
|
|
||||||
Issue: "10376"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Support standard schema/database fields for snapshots
|
|
||||||
time: 2024-07-12T21:45:46.06011-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10301"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Support ref and source in foreign key constraint expressions, bump dbt-common minimum to 1.6
|
|
||||||
time: 2024-07-19T16:18:41.434278-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "8062"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Support new semantic layer time spine configs to enable sub-daily granularity.
|
|
||||||
time: 2024-07-22T20:22:38.258249-07:00
|
|
||||||
custom:
|
|
||||||
Author: courtneyholcomb
|
|
||||||
Issue: "10475"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Add support for behavior flags
|
|
||||||
time: 2024-08-29T13:53:20.16122-04:00
|
|
||||||
custom:
|
|
||||||
Author: mikealfare
|
|
||||||
Issue: "10618"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Convert "Skipping model due to fail_fast" message to DEBUG level
|
|
||||||
time: 2024-01-13T07:36:15.836294-00:00
|
|
||||||
custom:
|
|
||||||
Author: scottgigante,nevdelap
|
|
||||||
Issue: "8774"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: 'Fix: Order-insensitive unit test equality assertion for expected/actual with
|
|
||||||
multiple nulls'
|
|
||||||
time: 2024-05-22T18:28:55.91733-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10167"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Renaming or removing a contracted model should raise a BreakingChange warning/error
|
|
||||||
time: 2024-05-23T20:42:51.033946-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10116"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: prefer disabled project nodes to external node
|
|
||||||
time: 2024-05-24T13:11:35.440443-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10224"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix issues with selectors and inline nodes
|
|
||||||
time: 2024-06-05T11:16:52.187667-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: 8943 9269
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix snapshot config to work in yaml files
|
|
||||||
time: 2024-06-07T13:46:48.383215-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "4000"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Improve handling of error when loading schema file list
|
|
||||||
time: 2024-06-10T13:21:30.963371-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10284"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Use model alias for the CTE identifier generated during ephemeral materialization
|
|
||||||
time: 2024-06-10T20:05:22.510814008Z
|
|
||||||
custom:
|
|
||||||
Author: jeancochrane
|
|
||||||
Issue: "5273"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Saved Query node fail during skip
|
|
||||||
time: 2024-06-12T12:42:56.329073-07:00
|
|
||||||
custom:
|
|
||||||
Author: ChenyuLInx
|
|
||||||
Issue: "10029"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Implement state:modified for saved queries
|
|
||||||
time: 2024-06-12T15:21:39.851426-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10294"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: DOn't warn on `unit_test` config paths that are properly used
|
|
||||||
time: 2024-06-13T18:31:17.486497-07:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "10311"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags
|
|
||||||
time: 2024-06-24T17:17:29.464865-07:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "10160"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Attempt to provide test fixture tables with all values to set types correctly
|
|
||||||
for comparisong with source tables
|
|
||||||
time: 2024-06-25T17:17:37.514619-07:00
|
|
||||||
custom:
|
|
||||||
Author: versusfacit
|
|
||||||
Issue: "10365"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Limit data_tests deprecation to root_project
|
|
||||||
time: 2024-06-27T15:44:48.579869-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "9835"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: CLI flags should take precedence over env var flags
|
|
||||||
time: 2024-07-09T17:24:40.918977-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10304"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix typing for artifact schemas
|
|
||||||
time: 2024-07-14T10:02:54.452099+09:00
|
|
||||||
custom:
|
|
||||||
Author: nakamichiworks
|
|
||||||
Issue: "10442"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix over deletion of generated_metrics in partial parsing
|
|
||||||
time: 2024-07-16T13:37:03.49651-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10450"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix error constructing warn_error_options
|
|
||||||
time: 2024-07-16T17:14:27.837171-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10452"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Do not update varchar column definitions if a contract exists
|
|
||||||
time: 2024-07-28T22:14:21.67712-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10362"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: fix all_constraints access, disabled node parsing of non-uniquely named resources
|
|
||||||
time: 2024-07-31T09:51:52.751135-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark gshank
|
|
||||||
Issue: "10509"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Propagate measure label when using create_metrics
|
|
||||||
time: 2024-08-06T17:21:10.265494-07:00
|
|
||||||
custom:
|
|
||||||
Author: aliceliu
|
|
||||||
Issue: "10536"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: respect --quiet and --warn-error-options for flag deprecations
|
|
||||||
time: 2024-08-06T19:48:43.399453-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10105"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix state:modified check for exports
|
|
||||||
time: 2024-08-13T15:42:35.471685-07:00
|
|
||||||
custom:
|
|
||||||
Author: aliceliu
|
|
||||||
Issue: "10138"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Filter out empty nodes after graph selection to support consistent selection of nodes that depend on upstream public models
|
|
||||||
time: 2024-08-16T14:08:07.426235-07:00
|
|
||||||
custom:
|
|
||||||
Author: jtcohen6
|
|
||||||
Issue: "8987"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Late render pre- and post-hooks configs in properties / schema YAML files
|
|
||||||
time: 2024-08-24T21:09:03.252733-06:00
|
|
||||||
custom:
|
|
||||||
Author: dbeatty10
|
|
||||||
Issue: "10603"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Allow the use of env_var function in certain macros in which it was previously
|
|
||||||
unavailable.
|
|
||||||
time: 2024-08-29T10:57:01.160613-04:00
|
|
||||||
custom:
|
|
||||||
Author: peterallenwebb
|
|
||||||
Issue: "10609"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: 'Remove deprecation for tests: to data_tests: change'
|
|
||||||
time: 2024-09-05T18:02:48.086421-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10564"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Enable record filtering by type.
|
|
||||||
time: 2024-05-29T10:28:14.547624-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "10240"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Remove IntermediateSnapshotNode
|
|
||||||
time: 2024-06-18T14:06:52.618602-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10326"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Additional logging for skipped ephemeral models
|
|
||||||
time: 2024-07-01T13:17:50.827788-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10389"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: bump black to 24.3.0
|
|
||||||
time: 2024-07-16T18:48:59.651834-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10454"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: generate protos with protoc version 5.26.1
|
|
||||||
time: 2024-07-16T20:57:03.332448-04:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10457"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Move from minimal-snowplow-tracker fork back to snowplow-tracker
|
|
||||||
time: 2024-08-06T15:54:06.422444-04:00
|
|
||||||
custom:
|
|
||||||
Author: peterallenwebb
|
|
||||||
Issue: "8409"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Add group info to RunResultError, RunResultFailure, RunResultWarning log lines
|
|
||||||
time: 2024-08-07T15:56:52.171199-05:00
|
|
||||||
custom:
|
|
||||||
Author: aranke
|
|
||||||
Issue: ""
|
|
||||||
JiraID: "364"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Improve speed of tree traversal when finding children, increasing build speed for some selectors
|
|
||||||
time: 2024-08-09T13:02:34.759905-07:00
|
|
||||||
custom:
|
|
||||||
Author: ttusing
|
|
||||||
Issue: "10434"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Add test for sources tables with quotes
|
|
||||||
time: 2024-08-21T09:55:16.038101-04:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "10582"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Additional type hints for `core/dbt/version.py`
|
|
||||||
time: 2024-08-27T10:50:14.047859-05:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "10612"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Fix typing issues in core/dbt/contracts/sql.py
|
|
||||||
time: 2024-08-27T11:31:23.749912-05:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "10614"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Fix type errors in `dbt/core/task/clean.py`
|
|
||||||
time: 2024-08-27T11:48:10.438173-05:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "10616"
|
|
||||||
1
.flake8
1
.flake8
@@ -7,7 +7,6 @@ ignore =
|
|||||||
W503 # makes Flake8 work like black
|
W503 # makes Flake8 work like black
|
||||||
W504
|
W504
|
||||||
E203 # makes Flake8 work like black
|
E203 # makes Flake8 work like black
|
||||||
E704 # makes Flake8 work like black
|
|
||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test/
|
exclude = test/
|
||||||
|
|||||||
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
@@ -1,18 +0,0 @@
|
|||||||
name: 📄 Code docs
|
|
||||||
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
|
|
||||||
title: "[Code docs] <title>"
|
|
||||||
labels: ["triage"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
Thanks for taking the time to fill out this code docs issue!
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Please describe the issue and your proposals.
|
|
||||||
description: |
|
|
||||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
|
||||||
|
|
||||||
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1,5 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Documentation
|
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
|
|
||||||
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
|
|
||||||
- name: Ask the community for help
|
- name: Ask the community for help
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||||
about: Need help troubleshooting? Check out our guide on how to ask
|
about: Need help troubleshooting? Check out our guide on how to ask
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
brew services start postgresql
|
||||||
echo "Check PostgreSQL service is running"
|
echo "Check PostgreSQL service is running"
|
||||||
i=10
|
i=10
|
||||||
COMMAND='pg_isready'
|
COMMAND='pg_isready'
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ name: "Set up postgres (windows)"
|
|||||||
description: "Set up postgres service on windows vm for dbt integration tests"
|
description: "Set up postgres service on windows vm for dbt integration tests"
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
env:
|
|
||||||
PQ_LIB_DIR: 'C:\Program Files\PostgreSQL\16\lib'
|
|
||||||
steps:
|
steps:
|
||||||
- shell: pwsh
|
- shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
Resolves #
|
resolves #
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above, if applicable.
|
Include the number of the issue addressed by this PR above if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
@@ -26,8 +26,8 @@ Resolves #
|
|||||||
|
|
||||||
### Checklist
|
### Checklist
|
||||||
|
|
||||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||||
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||||
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||||
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||||
|
|||||||
2
.github/workflows/check-artifact-changes.yml
vendored
2
.github/workflows/check-artifact-changes.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
||||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
||||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
|
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR."
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: CI check passed
|
- name: CI check passed
|
||||||
|
|||||||
13
.github/workflows/main.yml
vendored
13
.github/workflows/main.yml
vendored
@@ -186,29 +186,17 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Install postgres 16
|
|
||||||
uses: ikalnytskyi/action-setup-postgres@v6
|
|
||||||
with:
|
|
||||||
postgres-version: "16"
|
|
||||||
id: postgres
|
|
||||||
|
|
||||||
- name: Set up postgres (linux)
|
- name: Set up postgres (linux)
|
||||||
if: runner.os == 'Linux'
|
if: runner.os == 'Linux'
|
||||||
uses: ./.github/actions/setup-postgres-linux
|
uses: ./.github/actions/setup-postgres-linux
|
||||||
env:
|
|
||||||
CONNECTION_STR: ${{ steps.postgres.outputs.connection-uri }}
|
|
||||||
|
|
||||||
- name: Set up postgres (macos)
|
- name: Set up postgres (macos)
|
||||||
if: runner.os == 'macOS'
|
if: runner.os == 'macOS'
|
||||||
uses: ./.github/actions/setup-postgres-macos
|
uses: ./.github/actions/setup-postgres-macos
|
||||||
env:
|
|
||||||
CONNECTION_STR: ${{ steps.postgres.outputs.connection-uri }}
|
|
||||||
|
|
||||||
- name: Set up postgres (windows)
|
- name: Set up postgres (windows)
|
||||||
if: runner.os == 'Windows'
|
if: runner.os == 'Windows'
|
||||||
uses: ./.github/actions/setup-postgres-windows
|
uses: ./.github/actions/setup-postgres-windows
|
||||||
env:
|
|
||||||
CONNECTION_STR: ${{ steps.postgres.outputs.connection-uri }}
|
|
||||||
|
|
||||||
- name: Install python tools
|
- name: Install python tools
|
||||||
run: |
|
run: |
|
||||||
@@ -225,7 +213,6 @@ jobs:
|
|||||||
command: tox -- --ddtrace
|
command: tox -- --ddtrace
|
||||||
env:
|
env:
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
CONNECTION_STR: ${{ steps.postgres.outputs.connection-uri }}
|
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
|
|||||||
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@@ -247,24 +247,3 @@ jobs:
|
|||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||||
|
|
||||||
testing-slack-notification:
|
|
||||||
# sends notifications to #slackbot-test
|
|
||||||
name: Testing - Slack Notification
|
|
||||||
if: ${{ failure() && inputs.test_run && !inputs.nightly_release }}
|
|
||||||
|
|
||||||
needs:
|
|
||||||
[
|
|
||||||
bump-version-generate-changelog,
|
|
||||||
build-test-package,
|
|
||||||
github-release,
|
|
||||||
pypi-release,
|
|
||||||
docker-release,
|
|
||||||
]
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
|
||||||
with:
|
|
||||||
status: "failure"
|
|
||||||
|
|
||||||
secrets:
|
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }}
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
[settings]
|
[settings]
|
||||||
profile=black
|
profile=black
|
||||||
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
||||||
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces
|
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interface
|
||||||
|
|||||||
@@ -15,19 +15,16 @@ repos:
|
|||||||
args: [--unsafe]
|
args: [--unsafe]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
exclude: schemas/dbt/manifest/
|
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude_types:
|
exclude_types:
|
||||||
- "markdown"
|
- "markdown"
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
# rev must match what's in dev-requirements.txt
|
rev: 5.12.0
|
||||||
rev: 5.13.2
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
# rev must match what's in dev-requirements.txt
|
rev: 22.3.0
|
||||||
rev: 24.3.0
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- id: black
|
- id: black
|
||||||
@@ -37,7 +34,6 @@ repos:
|
|||||||
- "--check"
|
- "--check"
|
||||||
- "--diff"
|
- "--diff"
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
# rev must match what's in dev-requirements.txt
|
|
||||||
rev: 4.0.1
|
rev: 4.0.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
@@ -45,7 +41,6 @@ repos:
|
|||||||
alias: flake8-check
|
alias: flake8-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
# rev must match what's in dev-requirements.txt
|
|
||||||
rev: v1.4.1
|
rev: v1.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
|
|||||||
@@ -170,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# run all unit tests in a file
|
# run all unit tests in a file
|
||||||
python3 -m pytest tests/unit/test_invocation_id.py
|
python3 -m pytest tests/unit/test_base_column.py
|
||||||
# run a specific unit test
|
# run a specific unit test
|
||||||
python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id
|
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
||||||
# run specific Postgres functional tests
|
# run specific Postgres functional tests
|
||||||
python3 -m pytest tests/functional/sources
|
python3 -m pytest tests/functional/sources
|
||||||
```
|
```
|
||||||
|
|||||||
4
Makefile
4
Makefile
@@ -144,7 +144,3 @@ help: ## Show this help message.
|
|||||||
@echo
|
@echo
|
||||||
@echo 'options:'
|
@echo 'options:'
|
||||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||||
|
|
||||||
.PHONY: json_schema
|
|
||||||
json_schema: ## Update generated JSON schema using code changes.
|
|
||||||
scripts/collect-artifact-schema.py --path schemas
|
|
||||||
|
|||||||
26
codecov.yml
26
codecov.yml
@@ -1,7 +1,6 @@
|
|||||||
ignore:
|
ignore:
|
||||||
- ".github"
|
- ".github"
|
||||||
- ".changes"
|
- ".changes"
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
@@ -12,28 +11,3 @@ coverage:
|
|||||||
default:
|
default:
|
||||||
target: auto
|
target: auto
|
||||||
threshold: 80%
|
threshold: 80%
|
||||||
|
|
||||||
comment:
|
|
||||||
layout: "header, diff, flags, components" # show component info in the PR comment
|
|
||||||
|
|
||||||
component_management:
|
|
||||||
default_rules: # default rules that will be inherited by all components
|
|
||||||
statuses:
|
|
||||||
- type: project # in this case every component that doens't have a status defined will have a project type one
|
|
||||||
target: auto
|
|
||||||
threshold: 0.1%
|
|
||||||
- type: patch
|
|
||||||
target: 80%
|
|
||||||
individual_components:
|
|
||||||
- component_id: unittests
|
|
||||||
name: "Unit Tests"
|
|
||||||
flag_regexes:
|
|
||||||
- "unit"
|
|
||||||
statuses:
|
|
||||||
- type: patch
|
|
||||||
target: 80%
|
|
||||||
threshold: 5%
|
|
||||||
- component_id: integrationtests
|
|
||||||
name: "Integration Tests"
|
|
||||||
flag_regexes:
|
|
||||||
- "integration"
|
|
||||||
|
|||||||
@@ -29,10 +29,6 @@ All existing resources are defined under `dbt/artifacts/resources/v1`.
|
|||||||
|
|
||||||
## Making changes to dbt/artifacts
|
## Making changes to dbt/artifacts
|
||||||
|
|
||||||
### All changes
|
|
||||||
|
|
||||||
All changes to any fields will require a manual update to [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) to ensure live checking continues to work.
|
|
||||||
|
|
||||||
### Non-breaking changes
|
### Non-breaking changes
|
||||||
|
|
||||||
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsO
|
|||||||
from dbt.artifacts.resources.v1.metric import (
|
from dbt.artifacts.resources.v1.metric import (
|
||||||
ConstantPropertyInput,
|
ConstantPropertyInput,
|
||||||
ConversionTypeParams,
|
ConversionTypeParams,
|
||||||
CumulativeTypeParams,
|
|
||||||
Metric,
|
Metric,
|
||||||
MetricConfig,
|
MetricConfig,
|
||||||
MetricInput,
|
MetricInput,
|
||||||
@@ -46,7 +45,7 @@ from dbt.artifacts.resources.v1.metric import (
|
|||||||
MetricTimeWindow,
|
MetricTimeWindow,
|
||||||
MetricTypeParams,
|
MetricTypeParams,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig, TimeSpine
|
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
||||||
from dbt.artifacts.resources.v1.owner import Owner
|
from dbt.artifacts.resources.v1.owner import Owner
|
||||||
from dbt.artifacts.resources.v1.saved_query import (
|
from dbt.artifacts.resources.v1.saved_query import (
|
||||||
Export,
|
Export,
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
|||||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||||
from dbt_common.contracts.util import Mergeable
|
from dbt_common.contracts.util import Mergeable
|
||||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
|
||||||
|
|
||||||
NodeVersion = Union[str, float]
|
NodeVersion = Union[str, float]
|
||||||
|
|
||||||
@@ -67,7 +66,6 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
|||||||
quote: Optional[bool] = None
|
quote: Optional[bool] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
granularity: Optional[TimeGranularity] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -2,6 +2,13 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Literal, Optional
|
from typing import Any, Dict, List, Literal, Optional
|
||||||
|
|
||||||
|
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
||||||
|
from dbt_semantic_interfaces.type_enums import (
|
||||||
|
ConversionCalculationType,
|
||||||
|
MetricType,
|
||||||
|
TimeGranularity,
|
||||||
|
)
|
||||||
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
@@ -11,13 +18,6 @@ from dbt.artifacts.resources.v1.semantic_layer_components import (
|
|||||||
)
|
)
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
|
||||||
from dbt_semantic_interfaces.type_enums import (
|
|
||||||
ConversionCalculationType,
|
|
||||||
MetricType,
|
|
||||||
PeriodAggregation,
|
|
||||||
TimeGranularity,
|
|
||||||
)
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The following classes are dataclasses which are used to construct the Metric
|
The following classes are dataclasses which are used to construct the Metric
|
||||||
@@ -80,13 +80,6 @@ class ConversionTypeParams(dbtClassMixin):
|
|||||||
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CumulativeTypeParams(dbtClassMixin):
|
|
||||||
window: Optional[MetricTimeWindow] = None
|
|
||||||
grain_to_date: Optional[TimeGranularity] = None
|
|
||||||
period_agg: PeriodAggregation = PeriodAggregation.FIRST
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MetricTypeParams(dbtClassMixin):
|
class MetricTypeParams(dbtClassMixin):
|
||||||
measure: Optional[MetricInputMeasure] = None
|
measure: Optional[MetricInputMeasure] = None
|
||||||
@@ -98,7 +91,6 @@ class MetricTypeParams(dbtClassMixin):
|
|||||||
grain_to_date: Optional[TimeGranularity] = None
|
grain_to_date: Optional[TimeGranularity] = None
|
||||||
metrics: Optional[List[MetricInput]] = None
|
metrics: Optional[List[MetricInput]] = None
|
||||||
conversion_type_params: Optional[ConversionTypeParams] = None
|
conversion_type_params: Optional[ConversionTypeParams] = None
|
||||||
cumulative_type_params: Optional[CumulativeTypeParams] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -121,7 +113,6 @@ class Metric(GraphResource):
|
|||||||
type_params: MetricTypeParams
|
type_params: MetricTypeParams
|
||||||
filter: Optional[WhereFilterIntersection] = None
|
filter: Optional[WhereFilterIntersection] = None
|
||||||
metadata: Optional[SourceFileMetadata] = None
|
metadata: Optional[SourceFileMetadata] = None
|
||||||
time_granularity: Optional[TimeGranularity] = None
|
|
||||||
resource_type: Literal[NodeType.Metric]
|
resource_type: Literal[NodeType.Metric]
|
||||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ from dbt.artifacts.resources.v1.components import (
|
|||||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||||
from dbt_common.contracts.config.base import MergeBehavior
|
from dbt_common.contracts.config.base import MergeBehavior
|
||||||
from dbt_common.contracts.constraints import ModelLevelConstraint
|
from dbt_common.contracts.constraints import ModelLevelConstraint
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -22,11 +21,6 @@ class ModelConfig(NodeConfig):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class TimeSpine(dbtClassMixin):
|
|
||||||
standard_granularity_column: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Model(CompiledResource):
|
class Model(CompiledResource):
|
||||||
resource_type: Literal[NodeType.Model]
|
resource_type: Literal[NodeType.Model]
|
||||||
@@ -38,7 +32,6 @@ class Model(CompiledResource):
|
|||||||
deprecation_date: Optional[datetime] = None
|
deprecation_date: Optional[datetime] = None
|
||||||
defer_relation: Optional[DeferRelation] = None
|
defer_relation: Optional[DeferRelation] = None
|
||||||
primary_key: List[str] = field(default_factory=list)
|
primary_key: List[str] = field(default_factory=list)
|
||||||
time_spine: Optional[TimeSpine] = None
|
|
||||||
|
|
||||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||||
dct = super().__post_serialize__(dct, context)
|
dct = super().__post_serialize__(dct, context)
|
||||||
|
|||||||
@@ -4,6 +4,10 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Literal, Optional
|
from typing import Any, Dict, List, Literal, Optional
|
||||||
|
|
||||||
|
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
||||||
|
ExportDestinationType,
|
||||||
|
)
|
||||||
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
@@ -13,9 +17,6 @@ from dbt.artifacts.resources.v1.semantic_layer_components import (
|
|||||||
)
|
)
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
|
||||||
ExportDestinationType,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -34,7 +35,6 @@ class Export(dbtClassMixin):
|
|||||||
|
|
||||||
name: str
|
name: str
|
||||||
config: ExportConfig
|
config: ExportConfig
|
||||||
unrendered_config: Dict[str, str] = field(default_factory=dict)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List, Sequence, Tuple
|
from typing import List, Sequence, Tuple
|
||||||
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||||
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
||||||
WhereFilterParser,
|
WhereFilterParser,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WhereFilter(dbtClassMixin):
|
class WhereFilter(dbtClassMixin):
|
||||||
|
|||||||
@@ -2,11 +2,6 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Optional, Sequence
|
from typing import Any, Dict, List, Optional, Sequence
|
||||||
|
|
||||||
from dbt.artifacts.resources import SourceFileMetadata
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
from dbt_semantic_interfaces.references import (
|
from dbt_semantic_interfaces.references import (
|
||||||
DimensionReference,
|
DimensionReference,
|
||||||
EntityReference,
|
EntityReference,
|
||||||
@@ -22,6 +17,12 @@ from dbt_semantic_interfaces.type_enums import (
|
|||||||
TimeGranularity,
|
TimeGranularity,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from dbt.artifacts.resources import SourceFileMetadata
|
||||||
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The classes in this file are dataclasses which are used to construct the Semantic
|
The classes in this file are dataclasses which are used to construct the Semantic
|
||||||
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
||||||
|
|||||||
@@ -18,34 +18,39 @@ class SnapshotConfig(NodeConfig):
|
|||||||
# Not using Optional because of serialization issues with a Union of str and List[str]
|
# Not using Optional because of serialization issues with a Union of str and List[str]
|
||||||
check_cols: Union[str, List[str], None] = None
|
check_cols: Union[str, List[str], None] = None
|
||||||
|
|
||||||
def final_validate(self):
|
@classmethod
|
||||||
if not self.strategy or not self.unique_key:
|
def validate(cls, data):
|
||||||
|
super().validate(data)
|
||||||
|
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||||
|
# will fail when parsing the snapshot node.
|
||||||
|
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"Snapshots must be configured with a 'strategy' and 'unique_key'."
|
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||||
|
"and 'target_schema'."
|
||||||
)
|
)
|
||||||
if self.strategy == "check":
|
if data.get("strategy") == "check":
|
||||||
if not self.check_cols:
|
if not data.get("check_cols"):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"A snapshot configured with the check strategy must "
|
"A snapshot configured with the check strategy must "
|
||||||
"specify a check_cols configuration."
|
"specify a check_cols configuration."
|
||||||
)
|
)
|
||||||
if isinstance(self.check_cols, str) and self.check_cols != "all":
|
if isinstance(data["check_cols"], str) and data["check_cols"] != "all":
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
f"Invalid value for 'check_cols': {self.check_cols}. "
|
f"Invalid value for 'check_cols': {data['check_cols']}. "
|
||||||
"Expected 'all' or a list of strings."
|
"Expected 'all' or a list of strings."
|
||||||
)
|
)
|
||||||
elif self.strategy == "timestamp":
|
elif data.get("strategy") == "timestamp":
|
||||||
if not self.updated_at:
|
if not data.get("updated_at"):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"A snapshot configured with the timestamp strategy "
|
"A snapshot configured with the timestamp strategy "
|
||||||
"must specify an updated_at configuration."
|
"must specify an updated_at configuration."
|
||||||
)
|
)
|
||||||
if self.check_cols:
|
if data.get("check_cols"):
|
||||||
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
||||||
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
||||||
# formerly supported with GenericSnapshotConfig
|
# formerly supported with GenericSnapshotConfig
|
||||||
|
|
||||||
if self.materialized and self.materialized != "snapshot":
|
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||||
|
|
||||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||||
|
|||||||
@@ -77,11 +77,8 @@ class BaseArtifactMetadata(dbtClassMixin):
|
|||||||
# remote-compile-result
|
# remote-compile-result
|
||||||
# remote-execution-result
|
# remote-execution-result
|
||||||
# remote-run-result
|
# remote-run-result
|
||||||
S = TypeVar("S", bound="VersionedSchema")
|
|
||||||
|
|
||||||
|
|
||||||
def schema_version(name: str, version: int):
|
def schema_version(name: str, version: int):
|
||||||
def inner(cls: Type[S]):
|
def inner(cls: Type[VersionedSchema]):
|
||||||
cls.dbt_schema_version = SchemaVersion(
|
cls.dbt_schema_version = SchemaVersion(
|
||||||
name=name,
|
name=name,
|
||||||
version=version,
|
version=version,
|
||||||
|
|||||||
@@ -1,11 +1,2 @@
|
|||||||
# alias to latest
|
# alias to latest
|
||||||
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
||||||
from dbt_common.contracts.metadata import (
|
|
||||||
CatalogKey,
|
|
||||||
CatalogTable,
|
|
||||||
ColumnMap,
|
|
||||||
ColumnMetadata,
|
|
||||||
StatsDict,
|
|
||||||
StatsItem,
|
|
||||||
TableMetadata,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,18 +1,71 @@
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
||||||
|
|
||||||
from dbt.artifacts.schemas.base import (
|
from dbt.artifacts.schemas.base import (
|
||||||
ArtifactMixin,
|
ArtifactMixin,
|
||||||
BaseArtifactMetadata,
|
BaseArtifactMetadata,
|
||||||
schema_version,
|
schema_version,
|
||||||
)
|
)
|
||||||
from dbt_common.contracts.metadata import CatalogTable
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
from dbt_common.utils.formatting import lowercase
|
||||||
|
|
||||||
Primitive = Union[bool, str, float, None]
|
Primitive = Union[bool, str, float, None]
|
||||||
PrimitiveDict = Dict[str, Primitive]
|
PrimitiveDict = Dict[str, Primitive]
|
||||||
|
|
||||||
|
CatalogKey = NamedTuple(
|
||||||
|
"CatalogKey", [("database", Optional[str]), ("schema", str), ("name", str)]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class StatsItem(dbtClassMixin):
|
||||||
|
id: str
|
||||||
|
label: str
|
||||||
|
value: Primitive
|
||||||
|
include: bool
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
StatsDict = Dict[str, StatsItem]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ColumnMetadata(dbtClassMixin):
|
||||||
|
type: str
|
||||||
|
index: int
|
||||||
|
name: str
|
||||||
|
comment: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
ColumnMap = Dict[str, ColumnMetadata]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TableMetadata(dbtClassMixin):
|
||||||
|
type: str
|
||||||
|
schema: str
|
||||||
|
name: str
|
||||||
|
database: Optional[str] = None
|
||||||
|
comment: Optional[str] = None
|
||||||
|
owner: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CatalogTable(dbtClassMixin):
|
||||||
|
metadata: TableMetadata
|
||||||
|
columns: ColumnMap
|
||||||
|
stats: StatsDict
|
||||||
|
# the same table with two unique IDs will just be listed two times
|
||||||
|
unique_id: Optional[str] = None
|
||||||
|
|
||||||
|
def key(self) -> CatalogKey:
|
||||||
|
return CatalogKey(
|
||||||
|
lowercase(self.metadata.database),
|
||||||
|
self.metadata.schema.lower(),
|
||||||
|
self.metadata.name.lower(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class CatalogMetadata(BaseArtifactMetadata):
|
class CatalogMetadata(BaseArtifactMetadata):
|
||||||
|
|||||||
@@ -158,8 +158,7 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def upgrade_schema_version(cls, data):
|
def upgrade_schema_version(cls, data):
|
||||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
|
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
|
||||||
"""
|
|
||||||
run_results_schema_version = get_artifact_schema_version(data)
|
run_results_schema_version = get_artifact_schema_version(data)
|
||||||
# If less than the current version (v5), preprocess contents to match latest schema version
|
# If less than the current version (v5), preprocess contents to match latest schema version
|
||||||
if run_results_schema_version <= 5:
|
if run_results_schema_version <= 5:
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
from typing import IO, List, Optional, Union
|
from typing import IO, Optional
|
||||||
|
|
||||||
from click.exceptions import ClickException
|
from click.exceptions import ClickException
|
||||||
|
|
||||||
from dbt.artifacts.schemas.catalog import CatalogArtifact
|
|
||||||
from dbt.contracts.graph.manifest import Manifest
|
|
||||||
from dbt.contracts.results import RunExecutionResult
|
|
||||||
from dbt.utils import ExitCodes
|
from dbt.utils import ExitCodes
|
||||||
|
|
||||||
|
|
||||||
@@ -26,7 +23,7 @@ class CliException(ClickException):
|
|||||||
|
|
||||||
# the typing of _file is to satisfy the signature of ClickException.show
|
# the typing of _file is to satisfy the signature of ClickException.show
|
||||||
# overriding this method prevents click from printing any exceptions to stdout
|
# overriding this method prevents click from printing any exceptions to stdout
|
||||||
def show(self, _file: Optional[IO] = None) -> None: # type: ignore[type-arg]
|
def show(self, _file: Optional[IO] = None) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -34,17 +31,7 @@ class ResultExit(CliException):
|
|||||||
"""This class wraps any exception that contains results while invoking dbt, or the
|
"""This class wraps any exception that contains results while invoking dbt, or the
|
||||||
results of an invocation that did not succeed but did not throw any exceptions."""
|
results of an invocation that did not succeed but did not throw any exceptions."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, result) -> None:
|
||||||
self,
|
|
||||||
result: Union[
|
|
||||||
bool, # debug
|
|
||||||
CatalogArtifact, # docs generate
|
|
||||||
List[str], # list/ls
|
|
||||||
Manifest, # parse
|
|
||||||
None, # clean, deps, init, source
|
|
||||||
RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation
|
|
||||||
] = None,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(ExitCodes.ModelError)
|
super().__init__(ExitCodes.ModelError)
|
||||||
self.result = result
|
self.result = result
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ from dbt.cli.resolvers import default_log_path, default_project_dir
|
|||||||
from dbt.cli.types import Command as CliCommand
|
from dbt.cli.types import Command as CliCommand
|
||||||
from dbt.config.project import read_project_flags
|
from dbt.config.project import read_project_flags
|
||||||
from dbt.contracts.project import ProjectFlags
|
from dbt.contracts.project import ProjectFlags
|
||||||
from dbt.deprecations import fire_buffered_deprecations, renamed_env_var
|
from dbt.deprecations import renamed_env_var
|
||||||
from dbt.events import ALL_EVENT_NAMES
|
from dbt.events import ALL_EVENT_NAMES
|
||||||
from dbt_common import ui
|
from dbt_common import ui
|
||||||
from dbt_common.clients import jinja
|
from dbt_common.clients import jinja
|
||||||
@@ -57,7 +57,6 @@ def convert_config(config_name, config_value):
|
|||||||
ret = WarnErrorOptions(
|
ret = WarnErrorOptions(
|
||||||
include=config_value.get("include", []),
|
include=config_value.get("include", []),
|
||||||
exclude=config_value.get("exclude", []),
|
exclude=config_value.get("exclude", []),
|
||||||
silence=config_value.get("silence", []),
|
|
||||||
valid_error_names=ALL_EVENT_NAMES,
|
valid_error_names=ALL_EVENT_NAMES,
|
||||||
)
|
)
|
||||||
return ret
|
return ret
|
||||||
@@ -92,8 +91,6 @@ class Flags:
|
|||||||
# Set the default flags.
|
# Set the default flags.
|
||||||
for key, value in FLAGS_DEFAULTS.items():
|
for key, value in FLAGS_DEFAULTS.items():
|
||||||
object.__setattr__(self, key, value)
|
object.__setattr__(self, key, value)
|
||||||
# Use to handle duplicate params in _assign_params
|
|
||||||
flags_defaults_list = list(FLAGS_DEFAULTS.keys())
|
|
||||||
|
|
||||||
if ctx is None:
|
if ctx is None:
|
||||||
ctx = get_current_context()
|
ctx = get_current_context()
|
||||||
@@ -175,29 +172,13 @@ class Flags:
|
|||||||
old_name=dep_param.envvar,
|
old_name=dep_param.envvar,
|
||||||
new_name=new_param.envvar,
|
new_name=new_param.envvar,
|
||||||
)
|
)
|
||||||
# end deprecated_params
|
|
||||||
|
|
||||||
# Set the flag value.
|
# Set the flag value.
|
||||||
is_duplicate = (
|
is_duplicate = hasattr(self, param_name.upper())
|
||||||
hasattr(self, param_name.upper())
|
|
||||||
and param_name.upper() not in flags_defaults_list
|
|
||||||
)
|
|
||||||
# First time through, set as though FLAGS_DEFAULTS hasn't been set, so not a duplicate.
|
|
||||||
# Subsequent pass (to process "parent" params) should be treated as duplicates.
|
|
||||||
if param_name.upper() in flags_defaults_list:
|
|
||||||
flags_defaults_list.remove(param_name.upper())
|
|
||||||
# Note: the following determines whether parameter came from click default,
|
|
||||||
# not from FLAGS_DEFAULTS in __init__.
|
|
||||||
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||||
is_envvar = ctx.get_parameter_source(param_name) == ParameterSource.ENVIRONMENT
|
|
||||||
|
|
||||||
flag_name = (new_name or param_name).upper()
|
flag_name = (new_name or param_name).upper()
|
||||||
|
|
||||||
# envvar flags are assigned in either parent or child context if there
|
if (is_duplicate and not is_default) or not is_duplicate:
|
||||||
# isn't an overriding cli command flag.
|
|
||||||
# If the flag has been encountered as a child cli flag, we don't
|
|
||||||
# want to overwrite with parent envvar, since the commandline flag takes precedence.
|
|
||||||
if (is_duplicate and not (is_default or is_envvar)) or not is_duplicate:
|
|
||||||
object.__setattr__(self, flag_name, param_value)
|
object.__setattr__(self, flag_name, param_value)
|
||||||
|
|
||||||
# Track default assigned params.
|
# Track default assigned params.
|
||||||
@@ -308,10 +289,6 @@ class Flags:
|
|||||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Handle arguments mutually exclusive with INLINE
|
|
||||||
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECT", "INLINE"])
|
|
||||||
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECTOR", "INLINE"])
|
|
||||||
|
|
||||||
# Support lower cased access for legacy code.
|
# Support lower cased access for legacy code.
|
||||||
params = set(
|
params = set(
|
||||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||||
@@ -338,9 +315,7 @@ class Flags:
|
|||||||
"""
|
"""
|
||||||
set_flag = None
|
set_flag = None
|
||||||
for flag in group:
|
for flag in group:
|
||||||
flag_set_by_user = (
|
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||||
hasattr(self, flag) and flag.lower() not in params_assigned_from_default
|
|
||||||
)
|
|
||||||
if flag_set_by_user and set_flag:
|
if flag_set_by_user and set_flag:
|
||||||
raise DbtUsageException(
|
raise DbtUsageException(
|
||||||
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||||
@@ -355,8 +330,6 @@ class Flags:
|
|||||||
# not get pickled when written to disk as json.
|
# not get pickled when written to disk as json.
|
||||||
object.__delattr__(self, "deprecated_env_var_warnings")
|
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||||
|
|
||||||
fire_buffered_deprecations()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||||
command_arg_list = command_params(command, args_dict)
|
command_arg_list = command_params(command, args_dict)
|
||||||
|
|||||||
@@ -218,9 +218,10 @@ def clean(ctx, **kwargs):
|
|||||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||||
from dbt.task.clean import CleanTask
|
from dbt.task.clean import CleanTask
|
||||||
|
|
||||||
with CleanTask(ctx.obj["flags"], ctx.obj["project"]) as task:
|
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||||
results = task.run()
|
|
||||||
success = task.interpret_results(results)
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
return results, success
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
@@ -273,7 +274,6 @@ def docs_generate(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@global_flags
|
@global_flags
|
||||||
@p.browser
|
@p.browser
|
||||||
@p.host
|
|
||||||
@p.port
|
@p.port
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
@@ -436,9 +436,9 @@ def deps(ctx, **kwargs):
|
|||||||
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
||||||
option_name="--add-package",
|
option_name="--add-package",
|
||||||
)
|
)
|
||||||
with DepsTask(flags, ctx.obj["project"]) as task:
|
task = DepsTask(flags, ctx.obj["project"])
|
||||||
results = task.run()
|
results = task.run()
|
||||||
success = task.interpret_results(results)
|
success = task.interpret_results(results)
|
||||||
return results, success
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
@@ -458,9 +458,10 @@ def init(ctx, **kwargs):
|
|||||||
"""Initialize a new dbt project."""
|
"""Initialize a new dbt project."""
|
||||||
from dbt.task.init import InitTask
|
from dbt.task.init import InitTask
|
||||||
|
|
||||||
with InitTask(ctx.obj["flags"]) as task:
|
task = InitTask(ctx.obj["flags"])
|
||||||
results = task.run()
|
|
||||||
success = task.interpret_results(results)
|
results = task.run()
|
||||||
|
success = task.interpret_results(results)
|
||||||
return results, success
|
return results, success
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from click import Choice, ParamType
|
from click import Choice, ParamType
|
||||||
|
|
||||||
from dbt.config.utils import normalize_warn_error_options, parse_cli_yaml_string
|
from dbt.config.utils import exclusive_primary_alt_value_setting, parse_cli_yaml_string
|
||||||
from dbt.events import ALL_EVENT_NAMES
|
from dbt.events import ALL_EVENT_NAMES
|
||||||
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
||||||
from dbt_common.exceptions import DbtValidationError
|
from dbt_common.exceptions import DbtValidationError
|
||||||
@@ -51,7 +51,12 @@ class WarnErrorOptionsType(YAML):
|
|||||||
def convert(self, value, param, ctx):
|
def convert(self, value, param, ctx):
|
||||||
# this function is being used by param in click
|
# this function is being used by param in click
|
||||||
include_exclude = super().convert(value, param, ctx)
|
include_exclude = super().convert(value, param, ctx)
|
||||||
normalize_warn_error_options(include_exclude)
|
exclusive_primary_alt_value_setting(
|
||||||
|
include_exclude, "include", "error", "warn_error_options"
|
||||||
|
)
|
||||||
|
exclusive_primary_alt_value_setting(
|
||||||
|
include_exclude, "exclude", "warn", "warn_error_options"
|
||||||
|
)
|
||||||
|
|
||||||
return WarnErrorOptions(
|
return WarnErrorOptions(
|
||||||
include=include_exclude.get("include", []),
|
include=include_exclude.get("include", []),
|
||||||
|
|||||||
@@ -135,14 +135,6 @@ full_refresh = click.option(
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
host = click.option(
|
|
||||||
"--host",
|
|
||||||
envvar="DBT_HOST",
|
|
||||||
help="host to serve dbt docs on",
|
|
||||||
type=click.STRING,
|
|
||||||
default="127.0.0.1",
|
|
||||||
)
|
|
||||||
|
|
||||||
indirect_selection = click.option(
|
indirect_selection = click.option(
|
||||||
"--indirect-selection",
|
"--indirect-selection",
|
||||||
envvar="DBT_INDIRECT_SELECTION",
|
envvar="DBT_INDIRECT_SELECTION",
|
||||||
|
|||||||
@@ -41,13 +41,7 @@ from dbt_common.events.functions import LOG_VERSION, fire_event
|
|||||||
from dbt_common.events.helpers import get_json_string_utcnow
|
from dbt_common.events.helpers import get_json_string_utcnow
|
||||||
from dbt_common.exceptions import DbtBaseException as DbtException
|
from dbt_common.exceptions import DbtBaseException as DbtException
|
||||||
from dbt_common.invocation import reset_invocation_id
|
from dbt_common.invocation import reset_invocation_id
|
||||||
from dbt_common.record import (
|
from dbt_common.record import Recorder, RecorderMode, get_record_mode_from_env
|
||||||
Recorder,
|
|
||||||
RecorderMode,
|
|
||||||
get_record_mode_from_env,
|
|
||||||
get_record_types_from_dict,
|
|
||||||
get_record_types_from_env,
|
|
||||||
)
|
|
||||||
from dbt_common.utils import cast_dict_to_dict_of_strings
|
from dbt_common.utils import cast_dict_to_dict_of_strings
|
||||||
|
|
||||||
|
|
||||||
@@ -107,23 +101,13 @@ def preflight(func):
|
|||||||
|
|
||||||
def setup_record_replay():
|
def setup_record_replay():
|
||||||
rec_mode = get_record_mode_from_env()
|
rec_mode = get_record_mode_from_env()
|
||||||
rec_types = get_record_types_from_env()
|
|
||||||
|
|
||||||
recorder: Optional[Recorder] = None
|
recorder: Optional[Recorder] = None
|
||||||
if rec_mode == RecorderMode.REPLAY:
|
if rec_mode == RecorderMode.REPLAY:
|
||||||
previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH")
|
recording_path = os.environ["DBT_REPLAY"]
|
||||||
recorder = Recorder(
|
recorder = Recorder(RecorderMode.REPLAY, recording_path)
|
||||||
RecorderMode.REPLAY, types=rec_types, previous_recording_path=previous_recording_path
|
|
||||||
)
|
|
||||||
elif rec_mode == RecorderMode.DIFF:
|
|
||||||
previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH")
|
|
||||||
# ensure types match the previous recording
|
|
||||||
types = get_record_types_from_dict(previous_recording_path)
|
|
||||||
recorder = Recorder(
|
|
||||||
RecorderMode.DIFF, types=types, previous_recording_path=previous_recording_path
|
|
||||||
)
|
|
||||||
elif rec_mode == RecorderMode.RECORD:
|
elif rec_mode == RecorderMode.RECORD:
|
||||||
recorder = Recorder(RecorderMode.RECORD, types=rec_types)
|
recorder = Recorder(RecorderMode.RECORD)
|
||||||
|
|
||||||
get_invocation_context().recorder = recorder
|
get_invocation_context().recorder = recorder
|
||||||
|
|
||||||
@@ -132,10 +116,7 @@ def tear_down_record_replay():
|
|||||||
recorder = get_invocation_context().recorder
|
recorder = get_invocation_context().recorder
|
||||||
if recorder is not None:
|
if recorder is not None:
|
||||||
if recorder.mode == RecorderMode.RECORD:
|
if recorder.mode == RecorderMode.RECORD:
|
||||||
recorder.write()
|
recorder.write("recording.json")
|
||||||
if recorder.mode == RecorderMode.DIFF:
|
|
||||||
recorder.write()
|
|
||||||
recorder.write_diffs(diff_file_name="recording_diffs.json")
|
|
||||||
elif recorder.mode == RecorderMode.REPLAY:
|
elif recorder.mode == RecorderMode.REPLAY:
|
||||||
recorder.write_diffs("replay_diffs.json")
|
recorder.write_diffs("replay_diffs.json")
|
||||||
|
|
||||||
@@ -179,11 +160,9 @@ def postflight(func):
|
|||||||
process_in_blocks=rusage.ru_inblock,
|
process_in_blocks=rusage.ru_inblock,
|
||||||
process_out_blocks=rusage.ru_oublock,
|
process_out_blocks=rusage.ru_oublock,
|
||||||
),
|
),
|
||||||
(
|
EventLevel.INFO
|
||||||
EventLevel.INFO
|
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
||||||
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
else None,
|
||||||
else None
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(
|
fire_event(
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
from dbt.artifacts.resources import RefArgs
|
from dbt.exceptions import MacroNamespaceNotStringError
|
||||||
from dbt.exceptions import MacroNamespaceNotStringError, ParsingError
|
|
||||||
from dbt_common.clients.jinja import get_environment
|
from dbt_common.clients.jinja import get_environment
|
||||||
from dbt_common.exceptions.macros import MacroNameNotStringError
|
from dbt_common.exceptions.macros import MacroNameNotStringError
|
||||||
from dbt_common.tests import test_caching_enabled
|
from dbt_common.tests import test_caching_enabled
|
||||||
from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore
|
|
||||||
|
|
||||||
_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {}
|
_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {}
|
||||||
|
|
||||||
@@ -155,39 +153,3 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
|||||||
possible_macro_calls.append(f"{package_name}.{func_name}")
|
possible_macro_calls.append(f"{package_name}.{func_name}")
|
||||||
|
|
||||||
return possible_macro_calls
|
return possible_macro_calls
|
||||||
|
|
||||||
|
|
||||||
def statically_parse_ref_or_source(expression: str) -> Union[RefArgs, List[str]]:
|
|
||||||
"""
|
|
||||||
Returns a RefArgs or List[str] object, corresponding to ref or source respectively, given an input jinja expression.
|
|
||||||
|
|
||||||
input: str representing how input node is referenced in tested model sql
|
|
||||||
* examples:
|
|
||||||
- "ref('my_model_a')"
|
|
||||||
- "ref('my_model_a', version=3)"
|
|
||||||
- "ref('package', 'my_model_a', version=3)"
|
|
||||||
- "source('my_source_schema', 'my_source_name')"
|
|
||||||
|
|
||||||
If input is not a well-formed jinja ref or source expression, a ParsingError is raised.
|
|
||||||
"""
|
|
||||||
ref_or_source: Union[RefArgs, List[str]]
|
|
||||||
|
|
||||||
try:
|
|
||||||
statically_parsed = py_extract_from_source(f"{{{{ {expression} }}}}")
|
|
||||||
except ExtractionError:
|
|
||||||
raise ParsingError(f"Invalid jinja expression: {expression}")
|
|
||||||
|
|
||||||
if statically_parsed.get("refs"):
|
|
||||||
raw_ref = list(statically_parsed["refs"])[0]
|
|
||||||
ref_or_source = RefArgs(
|
|
||||||
package=raw_ref.get("package"),
|
|
||||||
name=raw_ref.get("name"),
|
|
||||||
version=raw_ref.get("version"),
|
|
||||||
)
|
|
||||||
elif statically_parsed.get("sources"):
|
|
||||||
source_name, source_table_name = list(statically_parsed["sources"])[0]
|
|
||||||
ref_or_source = [source_name, source_table_name]
|
|
||||||
else:
|
|
||||||
raise ParsingError(f"Invalid ref or source expression: {expression}")
|
|
||||||
|
|
||||||
return ref_or_source
|
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ from dbt.contracts.graph.nodes import (
|
|||||||
InjectedCTE,
|
InjectedCTE,
|
||||||
ManifestNode,
|
ManifestNode,
|
||||||
ManifestSQLNode,
|
ManifestSQLNode,
|
||||||
ModelNode,
|
|
||||||
SeedNode,
|
SeedNode,
|
||||||
UnitTestDefinition,
|
UnitTestDefinition,
|
||||||
UnitTestNode,
|
UnitTestNode,
|
||||||
@@ -30,15 +29,12 @@ from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
|||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
DbtInternalError,
|
DbtInternalError,
|
||||||
DbtRuntimeError,
|
DbtRuntimeError,
|
||||||
ForeignKeyConstraintToSyntaxError,
|
|
||||||
GraphDependencyNotFoundError,
|
GraphDependencyNotFoundError,
|
||||||
ParsingError,
|
|
||||||
)
|
)
|
||||||
from dbt.flags import get_flags
|
from dbt.flags import get_flags
|
||||||
from dbt.graph import Graph
|
from dbt.graph import Graph
|
||||||
from dbt.node_types import ModelLanguage, NodeType
|
from dbt.node_types import ModelLanguage, NodeType
|
||||||
from dbt_common.clients.system import make_directory
|
from dbt_common.clients.system import make_directory
|
||||||
from dbt_common.contracts.constraints import ConstraintType
|
|
||||||
from dbt_common.events.contextvars import get_node_info
|
from dbt_common.events.contextvars import get_node_info
|
||||||
from dbt_common.events.format import pluralize
|
from dbt_common.events.format import pluralize
|
||||||
from dbt_common.events.functions import fire_event
|
from dbt_common.events.functions import fire_event
|
||||||
@@ -375,7 +371,7 @@ class Compiler:
|
|||||||
|
|
||||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||||
|
|
||||||
new_cte_name = self.add_ephemeral_prefix(cte_model.identifier)
|
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
||||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||||
|
|
||||||
@@ -441,31 +437,8 @@ class Compiler:
|
|||||||
relation_name = str(relation_cls.create_from(self.config, node))
|
relation_name = str(relation_cls.create_from(self.config, node))
|
||||||
node.relation_name = relation_name
|
node.relation_name = relation_name
|
||||||
|
|
||||||
# Compile 'ref' and 'source' expressions in foreign key constraints
|
|
||||||
if isinstance(node, ModelNode):
|
|
||||||
for constraint in node.all_constraints:
|
|
||||||
if constraint.type == ConstraintType.foreign_key and constraint.to:
|
|
||||||
constraint.to = self._compile_relation_for_foreign_key_constraint_to(
|
|
||||||
manifest, node, constraint.to
|
|
||||||
)
|
|
||||||
|
|
||||||
return node
|
return node
|
||||||
|
|
||||||
def _compile_relation_for_foreign_key_constraint_to(
|
|
||||||
self, manifest: Manifest, node: ManifestSQLNode, to_expression: str
|
|
||||||
) -> str:
|
|
||||||
try:
|
|
||||||
foreign_key_node = manifest.find_node_from_ref_or_source(to_expression)
|
|
||||||
except ParsingError:
|
|
||||||
raise ForeignKeyConstraintToSyntaxError(node, to_expression)
|
|
||||||
|
|
||||||
if not foreign_key_node:
|
|
||||||
raise GraphDependencyNotFoundError(node, to_expression)
|
|
||||||
|
|
||||||
adapter = get_adapter(self.config)
|
|
||||||
relation_name = str(adapter.Relation.create_from(self.config, foreign_key_node))
|
|
||||||
return relation_name
|
|
||||||
|
|
||||||
# This method doesn't actually "compile" any of the nodes. That is done by the
|
# This method doesn't actually "compile" any of the nodes. That is done by the
|
||||||
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
||||||
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
||||||
@@ -547,8 +520,6 @@ class Compiler:
|
|||||||
the node's raw_code into compiled_code, and then calls the
|
the node's raw_code into compiled_code, and then calls the
|
||||||
recursive method to "prepend" the ctes.
|
recursive method to "prepend" the ctes.
|
||||||
"""
|
"""
|
||||||
# REVIEW: UnitTestDefinition shouldn't be possible here because of the
|
|
||||||
# type of node, and it is likewise an invalid return type.
|
|
||||||
if isinstance(node, UnitTestDefinition):
|
if isinstance(node, UnitTestDefinition):
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from dbt import deprecations
|
|||||||
from dbt.adapters.contracts.connection import QueryComment
|
from dbt.adapters.contracts.connection import QueryComment
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.config.selectors import SelectorDict
|
from dbt.config.selectors import SelectorDict
|
||||||
from dbt.config.utils import normalize_warn_error_options
|
from dbt.config.utils import exclusive_primary_alt_value_setting
|
||||||
from dbt.constants import (
|
from dbt.constants import (
|
||||||
DBT_PROJECT_FILE_NAME,
|
DBT_PROJECT_FILE_NAME,
|
||||||
DEPENDENCIES_FILE_NAME,
|
DEPENDENCIES_FILE_NAME,
|
||||||
@@ -480,7 +480,6 @@ class PartialProject(RenderComponents):
|
|||||||
rendered.selectors_dict["selectors"]
|
rendered.selectors_dict["selectors"]
|
||||||
)
|
)
|
||||||
dbt_cloud = cfg.dbt_cloud
|
dbt_cloud = cfg.dbt_cloud
|
||||||
flags: Dict[str, Any] = cfg.flags
|
|
||||||
|
|
||||||
project = Project(
|
project = Project(
|
||||||
project_name=name,
|
project_name=name,
|
||||||
@@ -525,7 +524,6 @@ class PartialProject(RenderComponents):
|
|||||||
project_env_vars=project_env_vars,
|
project_env_vars=project_env_vars,
|
||||||
restrict_access=cfg.restrict_access,
|
restrict_access=cfg.restrict_access,
|
||||||
dbt_cloud=dbt_cloud,
|
dbt_cloud=dbt_cloud,
|
||||||
flags=flags,
|
|
||||||
)
|
)
|
||||||
# sanity check - this means an internal issue
|
# sanity check - this means an internal issue
|
||||||
project.validate()
|
project.validate()
|
||||||
@@ -570,6 +568,11 @@ class PartialProject(RenderComponents):
|
|||||||
) = package_and_project_data_from_root(project_root)
|
) = package_and_project_data_from_root(project_root)
|
||||||
selectors_dict = selector_data_from_root(project_root)
|
selectors_dict = selector_data_from_root(project_root)
|
||||||
|
|
||||||
|
if "flags" in project_dict:
|
||||||
|
# We don't want to include "flags" in the Project,
|
||||||
|
# it goes in ProjectFlags
|
||||||
|
project_dict.pop("flags")
|
||||||
|
|
||||||
return cls.from_dicts(
|
return cls.from_dicts(
|
||||||
project_root=project_root,
|
project_root=project_root,
|
||||||
project_dict=project_dict,
|
project_dict=project_dict,
|
||||||
@@ -642,7 +645,6 @@ class Project:
|
|||||||
project_env_vars: Dict[str, Any]
|
project_env_vars: Dict[str, Any]
|
||||||
restrict_access: bool
|
restrict_access: bool
|
||||||
dbt_cloud: Dict[str, Any]
|
dbt_cloud: Dict[str, Any]
|
||||||
flags: Dict[str, Any]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all_source_paths(self) -> List[str]:
|
def all_source_paths(self) -> List[str]:
|
||||||
@@ -722,7 +724,6 @@ class Project:
|
|||||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||||
"restrict-access": self.restrict_access,
|
"restrict-access": self.restrict_access,
|
||||||
"dbt-cloud": self.dbt_cloud,
|
"dbt-cloud": self.dbt_cloud,
|
||||||
"flags": self.flags,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if self.query_comment:
|
if self.query_comment:
|
||||||
@@ -820,15 +821,20 @@ def read_project_flags(project_dir: str, profiles_dir: str) -> ProjectFlags:
|
|||||||
|
|
||||||
if profile_project_flags:
|
if profile_project_flags:
|
||||||
# This can't use WARN_ERROR or WARN_ERROR_OPTIONS because they're in
|
# This can't use WARN_ERROR or WARN_ERROR_OPTIONS because they're in
|
||||||
# the config that we're loading. Uses special "buffer" method and fired after flags are initialized in preflight.
|
# the config that we're loading. Uses special "warn" method.
|
||||||
deprecations.buffer("project-flags-moved")
|
deprecations.warn("project-flags-moved")
|
||||||
project_flags = profile_project_flags
|
project_flags = profile_project_flags
|
||||||
|
|
||||||
if project_flags is not None:
|
if project_flags is not None:
|
||||||
# handle collapsing `include` and `error` as well as collapsing `exclude` and `warn`
|
# handle collapsing `include` and `error` as well as collapsing `exclude` and `warn`
|
||||||
# for warn_error_options
|
# for warn_error_options
|
||||||
warn_error_options = project_flags.get("warn_error_options", {})
|
warn_error_options = project_flags.get("warn_error_options")
|
||||||
normalize_warn_error_options(warn_error_options)
|
exclusive_primary_alt_value_setting(
|
||||||
|
warn_error_options, "include", "error", "warn_error_options"
|
||||||
|
)
|
||||||
|
exclusive_primary_alt_value_setting(
|
||||||
|
warn_error_options, "exclude", "warn", "warn_error_options"
|
||||||
|
)
|
||||||
|
|
||||||
ProjectFlags.validate(project_flags)
|
ProjectFlags.validate(project_flags)
|
||||||
return ProjectFlags.from_dict(project_flags)
|
return ProjectFlags.from_dict(project_flags)
|
||||||
|
|||||||
@@ -193,7 +193,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
log_cache_events=log_cache_events,
|
log_cache_events=log_cache_events,
|
||||||
dependencies=dependencies,
|
dependencies=dependencies,
|
||||||
dbt_cloud=project.dbt_cloud,
|
dbt_cloud=project.dbt_cloud,
|
||||||
flags=project.flags,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Called by 'load_projects' in this class
|
# Called by 'load_projects' in this class
|
||||||
@@ -291,9 +290,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
project_name=self.project_name,
|
project_name=self.project_name,
|
||||||
project_id=self.hashed_name(),
|
project_id=self.hashed_name(),
|
||||||
user_id=tracking.active_user.id if tracking.active_user else None,
|
user_id=tracking.active_user.id if tracking.active_user else None,
|
||||||
send_anonymous_usage_stats=(
|
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
|
||||||
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
|
if tracking.active_user
|
||||||
),
|
else None,
|
||||||
adapter_type=self.credentials.type,
|
adapter_type=self.credentials.type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -49,18 +49,5 @@ def exclusive_primary_alt_value_setting(
|
|||||||
f"Only `{alt}` or `{primary}` can be specified{where}, not both"
|
f"Only `{alt}` or `{primary}` can be specified{where}, not both"
|
||||||
)
|
)
|
||||||
|
|
||||||
if alt in dictionary:
|
if alt_options:
|
||||||
alt_value = dictionary.pop(alt)
|
dictionary[primary] = alt_options
|
||||||
dictionary[primary] = alt_value
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_warn_error_options(warn_error_options: Dict[str, Any]) -> None:
|
|
||||||
exclusive_primary_alt_value_setting(
|
|
||||||
warn_error_options, "include", "error", "warn_error_options"
|
|
||||||
)
|
|
||||||
exclusive_primary_alt_value_setting(
|
|
||||||
warn_error_options, "exclude", "warn", "warn_error_options"
|
|
||||||
)
|
|
||||||
for key in ("include", "exclude", "silence"):
|
|
||||||
if key in warn_error_options and warn_error_options[key] is None:
|
|
||||||
warn_error_options[key] = []
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
|
||||||
|
|
||||||
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
||||||
|
|
||||||
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
||||||
@@ -17,8 +15,5 @@ DEPENDENCIES_FILE_NAME = "dependencies.yml"
|
|||||||
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
||||||
MANIFEST_FILE_NAME = "manifest.json"
|
MANIFEST_FILE_NAME = "manifest.json"
|
||||||
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
||||||
LEGACY_TIME_SPINE_MODEL_NAME = "metricflow_time_spine"
|
|
||||||
LEGACY_TIME_SPINE_GRANULARITY = TimeGranularity.DAY
|
|
||||||
MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY = TimeGranularity.DAY
|
|
||||||
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
||||||
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
||||||
|
|||||||
@@ -27,7 +27,8 @@ class ConfigSource:
|
|||||||
def __init__(self, project):
|
def __init__(self, project):
|
||||||
self.project = project
|
self.project = project
|
||||||
|
|
||||||
def get_config_dict(self, resource_type: NodeType): ...
|
def get_config_dict(self, resource_type: NodeType):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
class UnrenderedConfig(ConfigSource):
|
class UnrenderedConfig(ConfigSource):
|
||||||
@@ -129,12 +130,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
|||||||
return self._project_configs(self._active_project, fqn, resource_type)
|
return self._project_configs(self._active_project, fqn, resource_type)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _update_from_config(
|
def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T:
|
||||||
self, result: T, partial: Dict[str, Any], validate: bool = False
|
...
|
||||||
) -> T: ...
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def initial_result(self, resource_type: NodeType, base: bool) -> T: ...
|
def initial_result(self, resource_type: NodeType, base: bool) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
def calculate_node_config(
|
def calculate_node_config(
|
||||||
self,
|
self,
|
||||||
@@ -180,7 +181,8 @@ class BaseContextConfigGenerator(Generic[T]):
|
|||||||
project_name: str,
|
project_name: str,
|
||||||
base: bool,
|
base: bool,
|
||||||
patch_config_dict: Optional[Dict[str, Any]] = None,
|
patch_config_dict: Optional[Dict[str, Any]] = None,
|
||||||
) -> Dict[str, Any]: ...
|
) -> Dict[str, Any]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from dbt.adapters.exceptions import (
|
|||||||
RelationWrongTypeError,
|
RelationWrongTypeError,
|
||||||
)
|
)
|
||||||
from dbt.adapters.exceptions.cache import CacheInconsistencyError
|
from dbt.adapters.exceptions.cache import CacheInconsistencyError
|
||||||
from dbt.events.types import JinjaLogWarning, SnapshotTimestampWarning
|
from dbt.events.types import JinjaLogWarning
|
||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
AmbiguousAliasError,
|
AmbiguousAliasError,
|
||||||
AmbiguousCatalogMatchError,
|
AmbiguousCatalogMatchError,
|
||||||
@@ -116,17 +116,6 @@ def raise_fail_fast_error(msg, node=None) -> NoReturn:
|
|||||||
raise FailFastError(msg, node=node)
|
raise FailFastError(msg, node=node)
|
||||||
|
|
||||||
|
|
||||||
def warn_snapshot_timestamp_data_types(
|
|
||||||
snapshot_time_data_type: str, updated_at_data_type: str
|
|
||||||
) -> None:
|
|
||||||
warn_or_error(
|
|
||||||
SnapshotTimestampWarning(
|
|
||||||
snapshot_time_data_type=snapshot_time_data_type,
|
|
||||||
updated_at_data_type=updated_at_data_type,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Update this when a new function should be added to the
|
# Update this when a new function should be added to the
|
||||||
# dbt context's `exceptions` key!
|
# dbt context's `exceptions` key!
|
||||||
CONTEXT_EXPORTS = {
|
CONTEXT_EXPORTS = {
|
||||||
@@ -152,7 +141,6 @@ CONTEXT_EXPORTS = {
|
|||||||
raise_contract_error,
|
raise_contract_error,
|
||||||
column_type_missing,
|
column_type_missing,
|
||||||
raise_fail_fast_error,
|
raise_fail_fast_error,
|
||||||
warn_snapshot_timestamp_data_types,
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -239,7 +239,8 @@ class BaseRefResolver(BaseResolver):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def resolve(
|
def resolve(
|
||||||
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
||||||
) -> RelationProxy: ...
|
) -> RelationProxy:
|
||||||
|
...
|
||||||
|
|
||||||
def _repack_args(
|
def _repack_args(
|
||||||
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
||||||
@@ -305,7 +306,8 @@ class BaseSourceResolver(BaseResolver):
|
|||||||
|
|
||||||
class BaseMetricResolver(BaseResolver):
|
class BaseMetricResolver(BaseResolver):
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ...
|
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
|
||||||
|
...
|
||||||
|
|
||||||
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
||||||
if package is None:
|
if package is None:
|
||||||
@@ -339,7 +341,8 @@ class BaseMetricResolver(BaseResolver):
|
|||||||
|
|
||||||
|
|
||||||
class Config(Protocol):
|
class Config(Protocol):
|
||||||
def __init__(self, model, context_config: Optional[ContextConfig]): ...
|
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
# Implementation of "config(..)" calls in models
|
# Implementation of "config(..)" calls in models
|
||||||
@@ -974,8 +977,7 @@ class ProviderContext(ManifestContext):
|
|||||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise LoadAgateTableValueError(e, node=self.model)
|
raise LoadAgateTableValueError(e, node=self.model)
|
||||||
# this is used by some adapters
|
table.original_abspath = os.path.abspath(path)
|
||||||
table.original_abspath = os.path.abspath(path) # type: ignore
|
|
||||||
return table
|
return table
|
||||||
|
|
||||||
@contextproperty()
|
@contextproperty()
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user