mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-22 05:01:28 +00:00
Compare commits
74 Commits
update-ind
...
qmalcolm--
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f9bb6f47b | ||
|
|
21a46332f1 | ||
|
|
ff2726c3b5 | ||
|
|
014444dc18 | ||
|
|
25c2042dc9 | ||
|
|
0a160fc27a | ||
|
|
c598741262 | ||
|
|
f9c2b9398f | ||
|
|
cab6dabbc7 | ||
|
|
e1621ebc54 | ||
|
|
cd90d4493c | ||
|
|
560d151dcd | ||
|
|
229c537748 | ||
|
|
79ad0a3243 | ||
|
|
c668846404 | ||
|
|
c4958de166 | ||
|
|
33161a3035 | ||
|
|
471b816dcd | ||
|
|
bef2d20c21 | ||
|
|
2a26fabfdf | ||
|
|
4c7d922a6d | ||
|
|
b03291548a | ||
|
|
a7af3b3831 | ||
|
|
6e4564ab05 | ||
|
|
1aeff2c58f | ||
|
|
601fee0d5f | ||
|
|
88b8b10df1 | ||
|
|
4ea0e1007c | ||
|
|
a309283a7c | ||
|
|
b10fa79ae8 | ||
|
|
37e2725038 | ||
|
|
37fd299ad0 | ||
|
|
a94027acea | ||
|
|
b59c9075e2 | ||
|
|
c215697a02 | ||
|
|
d936a630c1 | ||
|
|
11ee2b9c42 | ||
|
|
64c59476f4 | ||
|
|
2bae05b8ed | ||
|
|
ca163c3d6e | ||
|
|
9a796aa202 | ||
|
|
51ff85bb2d | ||
|
|
d389ff1450 | ||
|
|
4415731da4 | ||
|
|
0fdc83af9d | ||
|
|
71a8a41104 | ||
|
|
da19d7ba9f | ||
|
|
1475abb1cb | ||
|
|
27b2f965dd | ||
|
|
100352d6b4 | ||
|
|
8ee8b2560a | ||
|
|
d4a6482091 | ||
|
|
8639290108 | ||
|
|
e699f5d042 | ||
|
|
e977b3eee5 | ||
|
|
c5be8e2a93 | ||
|
|
bff116dbed | ||
|
|
4df120e40e | ||
|
|
e53420c1d0 | ||
|
|
88ccc8a447 | ||
|
|
a98059967d | ||
|
|
b680c7ae95 | ||
|
|
a677abd5e8 | ||
|
|
8c850b58cb | ||
|
|
a34267f54b | ||
|
|
155482851a | ||
|
|
81386a7a43 | ||
|
|
d8e38c1a1d | ||
|
|
3e37d77780 | ||
|
|
e0783c2922 | ||
|
|
c2d4643f9d | ||
|
|
84456f50f6 | ||
|
|
fb10bb4aea | ||
|
|
366d4ad04a |
6
.changes/unreleased/Docs-20240311-140344.yaml
Normal file
6
.changes/unreleased/Docs-20240311-140344.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Enable display of unit tests
|
||||||
|
time: 2024-03-11T14:03:44.490834-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "501"
|
||||||
6
.changes/unreleased/Docs-20240501-021050.yaml
Normal file
6
.changes/unreleased/Docs-20240501-021050.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Unit tests not rendering
|
||||||
|
time: 2024-05-01T02:10:50.987412+02:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "506"
|
||||||
6
.changes/unreleased/Docs-20240516-223036.yaml
Normal file
6
.changes/unreleased/Docs-20240516-223036.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Add support for Saved Query node
|
||||||
|
time: 2024-05-16T22:30:36.206492-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "486"
|
||||||
6
.changes/unreleased/Docs-20240613-151048.yaml
Normal file
6
.changes/unreleased/Docs-20240613-151048.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Fix npm security vulnerabilities as of June 2024
|
||||||
|
time: 2024-06-13T15:10:48.301989+01:00
|
||||||
|
custom:
|
||||||
|
Author: aranke
|
||||||
|
Issue: "513"
|
||||||
7
.changes/unreleased/Features-20240522-000309.yaml
Normal file
7
.changes/unreleased/Features-20240522-000309.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Maximally parallelize dbt clone
|
||||||
|
in clone command"
|
||||||
|
time: 2024-05-22T00:03:09.765977-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "7914"
|
||||||
6
.changes/unreleased/Features-20240527-124405.yaml
Normal file
6
.changes/unreleased/Features-20240527-124405.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add --host flag to dbt docs serve, defaulting to '127.0.0.1'
|
||||||
|
time: 2024-05-27T12:44:05.040843-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10229"
|
||||||
6
.changes/unreleased/Features-20240531-150816.yaml
Normal file
6
.changes/unreleased/Features-20240531-150816.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Update data_test to accept arbitrary config options
|
||||||
|
time: 2024-05-31T15:08:16.431966-05:00
|
||||||
|
custom:
|
||||||
|
Author: McKnight-42
|
||||||
|
Issue: "10197"
|
||||||
6
.changes/unreleased/Features-20240606-112334.yaml
Normal file
6
.changes/unreleased/Features-20240606-112334.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: add pre_model and post_model hook calls to data and unit tests to be able to provide extra config options
|
||||||
|
time: 2024-06-06T11:23:34.758675-05:00
|
||||||
|
custom:
|
||||||
|
Author: McKnight-42
|
||||||
|
Issue: "10198"
|
||||||
6
.changes/unreleased/Features-20240617-103948.yaml
Normal file
6
.changes/unreleased/Features-20240617-103948.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: add --empty value to jinja context as flags.EMPTY
|
||||||
|
time: 2024-06-17T10:39:48.275801-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10317"
|
||||||
6
.changes/unreleased/Features-20240625-095107.yaml
Normal file
6
.changes/unreleased/Features-20240625-095107.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support cumulative_type_params & sub-daily granularities in semantic manifest.
|
||||||
|
time: 2024-06-25T09:51:07.983248-07:00
|
||||||
|
custom:
|
||||||
|
Author: courtneyholcomb
|
||||||
|
Issue: "10360"
|
||||||
6
.changes/unreleased/Features-20240627-162953.yaml
Normal file
6
.changes/unreleased/Features-20240627-162953.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add time_granularity to metric spec.
|
||||||
|
time: 2024-06-27T16:29:53.500917-07:00
|
||||||
|
custom:
|
||||||
|
Author: courtneyholcomb
|
||||||
|
Issue: "10376"
|
||||||
6
.changes/unreleased/Features-20240712-214546.yaml
Normal file
6
.changes/unreleased/Features-20240712-214546.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support standard schema/database fields for snapshots
|
||||||
|
time: 2024-07-12T21:45:46.06011-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10301"
|
||||||
6
.changes/unreleased/Features-20240719-161841.yaml
Normal file
6
.changes/unreleased/Features-20240719-161841.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support ref and source in foreign key constraint expressions, bump dbt-common minimum to 1.6
|
||||||
|
time: 2024-07-19T16:18:41.434278-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8062"
|
||||||
6
.changes/unreleased/Features-20240722-202238.yaml
Normal file
6
.changes/unreleased/Features-20240722-202238.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Support new semantic layer time spine configs to enable sub-daily granularity.
|
||||||
|
time: 2024-07-22T20:22:38.258249-07:00
|
||||||
|
custom:
|
||||||
|
Author: courtneyholcomb
|
||||||
|
Issue: "10475"
|
||||||
6
.changes/unreleased/Fixes-20240113-073615.yaml
Normal file
6
.changes/unreleased/Fixes-20240113-073615.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Convert "Skipping model due to fail_fast" message to DEBUG level
|
||||||
|
time: 2024-01-13T07:36:15.836294-00:00
|
||||||
|
custom:
|
||||||
|
Author: scottgigante,nevdelap
|
||||||
|
Issue: "8774"
|
||||||
7
.changes/unreleased/Fixes-20240522-182855.yaml
Normal file
7
.changes/unreleased/Fixes-20240522-182855.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: 'Fix: Order-insensitive unit test equality assertion for expected/actual with
|
||||||
|
multiple nulls'
|
||||||
|
time: 2024-05-22T18:28:55.91733-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10167"
|
||||||
6
.changes/unreleased/Fixes-20240523-204251.yaml
Normal file
6
.changes/unreleased/Fixes-20240523-204251.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Renaming or removing a contracted model should raise a BreakingChange warning/error
|
||||||
|
time: 2024-05-23T20:42:51.033946-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10116"
|
||||||
6
.changes/unreleased/Fixes-20240524-131135.yaml
Normal file
6
.changes/unreleased/Fixes-20240524-131135.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: prefer disabled project nodes to external node
|
||||||
|
time: 2024-05-24T13:11:35.440443-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10224"
|
||||||
6
.changes/unreleased/Fixes-20240605-111652.yaml
Normal file
6
.changes/unreleased/Fixes-20240605-111652.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix issues with selectors and inline nodes
|
||||||
|
time: 2024-06-05T11:16:52.187667-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: 8943 9269
|
||||||
6
.changes/unreleased/Fixes-20240607-134648.yaml
Normal file
6
.changes/unreleased/Fixes-20240607-134648.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix snapshot config to work in yaml files
|
||||||
|
time: 2024-06-07T13:46:48.383215-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "4000"
|
||||||
6
.changes/unreleased/Fixes-20240610-132130.yaml
Normal file
6
.changes/unreleased/Fixes-20240610-132130.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Improve handling of error when loading schema file list
|
||||||
|
time: 2024-06-10T13:21:30.963371-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10284"
|
||||||
6
.changes/unreleased/Fixes-20240612-124256.yaml
Normal file
6
.changes/unreleased/Fixes-20240612-124256.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Saved Query node fail during skip
|
||||||
|
time: 2024-06-12T12:42:56.329073-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "10029"
|
||||||
6
.changes/unreleased/Fixes-20240612-152139.yaml
Normal file
6
.changes/unreleased/Fixes-20240612-152139.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Implement state:modified for saved queries
|
||||||
|
time: 2024-06-12T15:21:39.851426-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10294"
|
||||||
6
.changes/unreleased/Fixes-20240613-183117.yaml
Normal file
6
.changes/unreleased/Fixes-20240613-183117.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: DOn't warn on `unit_test` config paths that are properly used
|
||||||
|
time: 2024-06-13T18:31:17.486497-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "10311"
|
||||||
6
.changes/unreleased/Fixes-20240624-171729.yaml
Normal file
6
.changes/unreleased/Fixes-20240624-171729.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags
|
||||||
|
time: 2024-06-24T17:17:29.464865-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "10160"
|
||||||
7
.changes/unreleased/Fixes-20240625-171737.yaml
Normal file
7
.changes/unreleased/Fixes-20240625-171737.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Attempt to provide test fixture tables with all values to set types correctly
|
||||||
|
for comparisong with source tables
|
||||||
|
time: 2024-06-25T17:17:37.514619-07:00
|
||||||
|
custom:
|
||||||
|
Author: versusfacit
|
||||||
|
Issue: "10365"
|
||||||
6
.changes/unreleased/Fixes-20240627-154448.yaml
Normal file
6
.changes/unreleased/Fixes-20240627-154448.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Limit data_tests deprecation to root_project
|
||||||
|
time: 2024-06-27T15:44:48.579869-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "9835"
|
||||||
6
.changes/unreleased/Fixes-20240709-172440.yaml
Normal file
6
.changes/unreleased/Fixes-20240709-172440.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: CLI flags should take precedence over env var flags
|
||||||
|
time: 2024-07-09T17:24:40.918977-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10304"
|
||||||
6
.changes/unreleased/Fixes-20240714-100254.yaml
Normal file
6
.changes/unreleased/Fixes-20240714-100254.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix typing for artifact schemas
|
||||||
|
time: 2024-07-14T10:02:54.452099+09:00
|
||||||
|
custom:
|
||||||
|
Author: nakamichiworks
|
||||||
|
Issue: "10442"
|
||||||
6
.changes/unreleased/Fixes-20240716-133703.yaml
Normal file
6
.changes/unreleased/Fixes-20240716-133703.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix over deletion of generated_metrics in partial parsing
|
||||||
|
time: 2024-07-16T13:37:03.49651-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10450"
|
||||||
6
.changes/unreleased/Fixes-20240716-171427.yaml
Normal file
6
.changes/unreleased/Fixes-20240716-171427.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix error constructing warn_error_options
|
||||||
|
time: 2024-07-16T17:14:27.837171-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10452"
|
||||||
6
.changes/unreleased/Fixes-20240731-095152.yaml
Normal file
6
.changes/unreleased/Fixes-20240731-095152.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: fix all_constraints access, disabled node parsing of non-uniquely named resources
|
||||||
|
time: 2024-07-31T09:51:52.751135-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark gshank
|
||||||
|
Issue: "10509"
|
||||||
6
.changes/unreleased/Under the Hood-20240529-102814.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240529-102814.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Enable record filtering by type.
|
||||||
|
time: 2024-05-29T10:28:14.547624-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "10240"
|
||||||
6
.changes/unreleased/Under the Hood-20240618-140652.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240618-140652.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove IntermediateSnapshotNode
|
||||||
|
time: 2024-06-18T14:06:52.618602-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10326"
|
||||||
6
.changes/unreleased/Under the Hood-20240701-131750.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240701-131750.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Additional logging for skipped ephemeral models
|
||||||
|
time: 2024-07-01T13:17:50.827788-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "10389"
|
||||||
6
.changes/unreleased/Under the Hood-20240716-184859.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240716-184859.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: bump black to 24.3.0
|
||||||
|
time: 2024-07-16T18:48:59.651834-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10454"
|
||||||
6
.changes/unreleased/Under the Hood-20240716-205703.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240716-205703.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: generate protos with protoc version 5.26.1
|
||||||
|
time: 2024-07-16T20:57:03.332448-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10457"
|
||||||
1
.flake8
1
.flake8
@@ -7,6 +7,7 @@ ignore =
|
|||||||
W503 # makes Flake8 work like black
|
W503 # makes Flake8 work like black
|
||||||
W504
|
W504
|
||||||
E203 # makes Flake8 work like black
|
E203 # makes Flake8 work like black
|
||||||
|
E704 # makes Flake8 work like black
|
||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test/
|
exclude = test/
|
||||||
|
|||||||
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
Normal file
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: 📄 Code docs
|
||||||
|
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
|
||||||
|
title: "[Code docs] <title>"
|
||||||
|
labels: ["triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this code docs issue!
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Please describe the issue and your proposals.
|
||||||
|
description: |
|
||||||
|
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||||
|
|
||||||
|
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
|
- name: Documentation
|
||||||
|
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
|
||||||
|
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
|
||||||
- name: Ask the community for help
|
- name: Ask the community for help
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||||
about: Need help troubleshooting? Check out our guide on how to ask
|
about: Need help troubleshooting? Check out our guide on how to ask
|
||||||
|
|||||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
resolves #
|
Resolves #
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above if applicable.
|
Include the number of the issue addressed by this PR above, if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
@@ -26,8 +26,8 @@ resolves #
|
|||||||
|
|
||||||
### Checklist
|
### Checklist
|
||||||
|
|
||||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
||||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
||||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
||||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
||||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
||||||
|
|||||||
2
.github/workflows/check-artifact-changes.yml
vendored
2
.github/workflows/check-artifact-changes.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
||||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
||||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR."
|
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: CI check passed
|
- name: CI check passed
|
||||||
|
|||||||
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@@ -247,3 +247,24 @@ jobs:
|
|||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||||
|
|
||||||
|
testing-slack-notification:
|
||||||
|
# sends notifications to #slackbot-test
|
||||||
|
name: Testing - Slack Notification
|
||||||
|
if: ${{ failure() && inputs.test_run && !inputs.nightly_release }}
|
||||||
|
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
bump-version-generate-changelog,
|
||||||
|
build-test-package,
|
||||||
|
github-release,
|
||||||
|
pypi-release,
|
||||||
|
docker-release,
|
||||||
|
]
|
||||||
|
|
||||||
|
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||||
|
with:
|
||||||
|
status: "failure"
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
[settings]
|
[settings]
|
||||||
profile=black
|
profile=black
|
||||||
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
||||||
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interface
|
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces
|
||||||
|
|||||||
@@ -15,16 +15,19 @@ repos:
|
|||||||
args: [--unsafe]
|
args: [--unsafe]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
exclude: schemas/dbt/manifest/
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude_types:
|
exclude_types:
|
||||||
- "markdown"
|
- "markdown"
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
rev: 5.12.0
|
# rev must match what's in dev-requirements.txt
|
||||||
|
rev: 5.13.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.3.0
|
# rev must match what's in dev-requirements.txt
|
||||||
|
rev: 24.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- id: black
|
- id: black
|
||||||
@@ -34,6 +37,7 @@ repos:
|
|||||||
- "--check"
|
- "--check"
|
||||||
- "--diff"
|
- "--diff"
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
|
# rev must match what's in dev-requirements.txt
|
||||||
rev: 4.0.1
|
rev: 4.0.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
@@ -41,6 +45,7 @@ repos:
|
|||||||
alias: flake8-check
|
alias: flake8-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
# rev must match what's in dev-requirements.txt
|
||||||
rev: v1.4.1
|
rev: v1.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
|
|||||||
@@ -170,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# run all unit tests in a file
|
# run all unit tests in a file
|
||||||
python3 -m pytest tests/unit/test_base_column.py
|
python3 -m pytest tests/unit/test_invocation_id.py
|
||||||
# run a specific unit test
|
# run a specific unit test
|
||||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id
|
||||||
# run specific Postgres functional tests
|
# run specific Postgres functional tests
|
||||||
python3 -m pytest tests/functional/sources
|
python3 -m pytest tests/functional/sources
|
||||||
```
|
```
|
||||||
|
|||||||
4
Makefile
4
Makefile
@@ -144,3 +144,7 @@ help: ## Show this help message.
|
|||||||
@echo
|
@echo
|
||||||
@echo 'options:'
|
@echo 'options:'
|
||||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||||
|
|
||||||
|
.PHONY: json_schema
|
||||||
|
json_schema: ## Update generated JSON schema using code changes.
|
||||||
|
scripts/collect-artifact-schema.py --path schemas
|
||||||
|
|||||||
26
codecov.yml
26
codecov.yml
@@ -1,6 +1,7 @@
|
|||||||
ignore:
|
ignore:
|
||||||
- ".github"
|
- ".github"
|
||||||
- ".changes"
|
- ".changes"
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
@@ -11,3 +12,28 @@ coverage:
|
|||||||
default:
|
default:
|
||||||
target: auto
|
target: auto
|
||||||
threshold: 80%
|
threshold: 80%
|
||||||
|
|
||||||
|
comment:
|
||||||
|
layout: "header, diff, flags, components" # show component info in the PR comment
|
||||||
|
|
||||||
|
component_management:
|
||||||
|
default_rules: # default rules that will be inherited by all components
|
||||||
|
statuses:
|
||||||
|
- type: project # in this case every component that doens't have a status defined will have a project type one
|
||||||
|
target: auto
|
||||||
|
threshold: 0.1%
|
||||||
|
- type: patch
|
||||||
|
target: 80%
|
||||||
|
individual_components:
|
||||||
|
- component_id: unittests
|
||||||
|
name: "Unit Tests"
|
||||||
|
flag_regexes:
|
||||||
|
- "unit"
|
||||||
|
statuses:
|
||||||
|
- type: patch
|
||||||
|
target: 80%
|
||||||
|
threshold: 5%
|
||||||
|
- component_id: integrationtests
|
||||||
|
name: "Integration Tests"
|
||||||
|
flag_regexes:
|
||||||
|
- "integration"
|
||||||
|
|||||||
@@ -29,6 +29,10 @@ All existing resources are defined under `dbt/artifacts/resources/v1`.
|
|||||||
|
|
||||||
## Making changes to dbt/artifacts
|
## Making changes to dbt/artifacts
|
||||||
|
|
||||||
|
### All changes
|
||||||
|
|
||||||
|
All changes to any fields will require a manual update to [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) to ensure live checking continues to work.
|
||||||
|
|
||||||
### Non-breaking changes
|
### Non-breaking changes
|
||||||
|
|
||||||
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsO
|
|||||||
from dbt.artifacts.resources.v1.metric import (
|
from dbt.artifacts.resources.v1.metric import (
|
||||||
ConstantPropertyInput,
|
ConstantPropertyInput,
|
||||||
ConversionTypeParams,
|
ConversionTypeParams,
|
||||||
|
CumulativeTypeParams,
|
||||||
Metric,
|
Metric,
|
||||||
MetricConfig,
|
MetricConfig,
|
||||||
MetricInput,
|
MetricInput,
|
||||||
@@ -45,7 +46,7 @@ from dbt.artifacts.resources.v1.metric import (
|
|||||||
MetricTimeWindow,
|
MetricTimeWindow,
|
||||||
MetricTypeParams,
|
MetricTypeParams,
|
||||||
)
|
)
|
||||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
from dbt.artifacts.resources.v1.model import Model, ModelConfig, TimeSpine
|
||||||
from dbt.artifacts.resources.v1.owner import Owner
|
from dbt.artifacts.resources.v1.owner import Owner
|
||||||
from dbt.artifacts.resources.v1.saved_query import (
|
from dbt.artifacts.resources.v1.saved_query import (
|
||||||
Export,
|
Export,
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
|||||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||||
from dbt_common.contracts.util import Mergeable
|
from dbt_common.contracts.util import Mergeable
|
||||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||||
|
|
||||||
NodeVersion = Union[str, float]
|
NodeVersion = Union[str, float]
|
||||||
|
|
||||||
@@ -66,6 +67,7 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
|||||||
quote: Optional[bool] = None
|
quote: Optional[bool] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
granularity: Optional[TimeGranularity] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -2,13 +2,6 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Literal, Optional
|
from typing import Any, Dict, List, Literal, Optional
|
||||||
|
|
||||||
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
|
||||||
from dbt_semantic_interfaces.type_enums import (
|
|
||||||
ConversionCalculationType,
|
|
||||||
MetricType,
|
|
||||||
TimeGranularity,
|
|
||||||
)
|
|
||||||
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
@@ -18,6 +11,13 @@ from dbt.artifacts.resources.v1.semantic_layer_components import (
|
|||||||
)
|
)
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
||||||
|
from dbt_semantic_interfaces.type_enums import (
|
||||||
|
ConversionCalculationType,
|
||||||
|
MetricType,
|
||||||
|
PeriodAggregation,
|
||||||
|
TimeGranularity,
|
||||||
|
)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The following classes are dataclasses which are used to construct the Metric
|
The following classes are dataclasses which are used to construct the Metric
|
||||||
@@ -80,6 +80,13 @@ class ConversionTypeParams(dbtClassMixin):
|
|||||||
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CumulativeTypeParams(dbtClassMixin):
|
||||||
|
window: Optional[MetricTimeWindow] = None
|
||||||
|
grain_to_date: Optional[TimeGranularity] = None
|
||||||
|
period_agg: PeriodAggregation = PeriodAggregation.FIRST
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MetricTypeParams(dbtClassMixin):
|
class MetricTypeParams(dbtClassMixin):
|
||||||
measure: Optional[MetricInputMeasure] = None
|
measure: Optional[MetricInputMeasure] = None
|
||||||
@@ -91,6 +98,7 @@ class MetricTypeParams(dbtClassMixin):
|
|||||||
grain_to_date: Optional[TimeGranularity] = None
|
grain_to_date: Optional[TimeGranularity] = None
|
||||||
metrics: Optional[List[MetricInput]] = None
|
metrics: Optional[List[MetricInput]] = None
|
||||||
conversion_type_params: Optional[ConversionTypeParams] = None
|
conversion_type_params: Optional[ConversionTypeParams] = None
|
||||||
|
cumulative_type_params: Optional[CumulativeTypeParams] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -113,6 +121,7 @@ class Metric(GraphResource):
|
|||||||
type_params: MetricTypeParams
|
type_params: MetricTypeParams
|
||||||
filter: Optional[WhereFilterIntersection] = None
|
filter: Optional[WhereFilterIntersection] = None
|
||||||
metadata: Optional[SourceFileMetadata] = None
|
metadata: Optional[SourceFileMetadata] = None
|
||||||
|
time_granularity: Optional[TimeGranularity] = None
|
||||||
resource_type: Literal[NodeType.Metric]
|
resource_type: Literal[NodeType.Metric]
|
||||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ from dbt.artifacts.resources.v1.components import (
|
|||||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||||
from dbt_common.contracts.config.base import MergeBehavior
|
from dbt_common.contracts.config.base import MergeBehavior
|
||||||
from dbt_common.contracts.constraints import ModelLevelConstraint
|
from dbt_common.contracts.constraints import ModelLevelConstraint
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -21,6 +22,11 @@ class ModelConfig(NodeConfig):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TimeSpine(dbtClassMixin):
|
||||||
|
standard_granularity_column: str
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Model(CompiledResource):
|
class Model(CompiledResource):
|
||||||
resource_type: Literal[NodeType.Model]
|
resource_type: Literal[NodeType.Model]
|
||||||
@@ -32,6 +38,7 @@ class Model(CompiledResource):
|
|||||||
deprecation_date: Optional[datetime] = None
|
deprecation_date: Optional[datetime] = None
|
||||||
defer_relation: Optional[DeferRelation] = None
|
defer_relation: Optional[DeferRelation] = None
|
||||||
primary_key: List[str] = field(default_factory=list)
|
primary_key: List[str] = field(default_factory=list)
|
||||||
|
time_spine: Optional[TimeSpine] = None
|
||||||
|
|
||||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||||
dct = super().__post_serialize__(dct, context)
|
dct = super().__post_serialize__(dct, context)
|
||||||
|
|||||||
@@ -4,10 +4,6 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Literal, Optional
|
from typing import Any, Dict, List, Literal, Optional
|
||||||
|
|
||||||
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
|
||||||
ExportDestinationType,
|
|
||||||
)
|
|
||||||
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
from dbt.artifacts.resources.types import NodeType
|
from dbt.artifacts.resources.types import NodeType
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
@@ -17,6 +13,9 @@ from dbt.artifacts.resources.v1.semantic_layer_components import (
|
|||||||
)
|
)
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
|
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
||||||
|
ExportDestinationType,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List, Sequence, Tuple
|
from typing import List, Sequence, Tuple
|
||||||
|
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||||
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
||||||
WhereFilterParser,
|
WhereFilterParser,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WhereFilter(dbtClassMixin):
|
class WhereFilter(dbtClassMixin):
|
||||||
|
|||||||
@@ -2,6 +2,11 @@ import time
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Dict, List, Optional, Sequence
|
from typing import Any, Dict, List, Optional, Sequence
|
||||||
|
|
||||||
|
from dbt.artifacts.resources import SourceFileMetadata
|
||||||
|
from dbt.artifacts.resources.base import GraphResource
|
||||||
|
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||||
|
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||||
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_semantic_interfaces.references import (
|
from dbt_semantic_interfaces.references import (
|
||||||
DimensionReference,
|
DimensionReference,
|
||||||
EntityReference,
|
EntityReference,
|
||||||
@@ -17,12 +22,6 @@ from dbt_semantic_interfaces.type_enums import (
|
|||||||
TimeGranularity,
|
TimeGranularity,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt.artifacts.resources import SourceFileMetadata
|
|
||||||
from dbt.artifacts.resources.base import GraphResource
|
|
||||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
|
||||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The classes in this file are dataclasses which are used to construct the Semantic
|
The classes in this file are dataclasses which are used to construct the Semantic
|
||||||
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
||||||
|
|||||||
@@ -18,39 +18,34 @@ class SnapshotConfig(NodeConfig):
|
|||||||
# Not using Optional because of serialization issues with a Union of str and List[str]
|
# Not using Optional because of serialization issues with a Union of str and List[str]
|
||||||
check_cols: Union[str, List[str], None] = None
|
check_cols: Union[str, List[str], None] = None
|
||||||
|
|
||||||
@classmethod
|
def final_validate(self):
|
||||||
def validate(cls, data):
|
if not self.strategy or not self.unique_key:
|
||||||
super().validate(data)
|
|
||||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
|
||||||
# will fail when parsing the snapshot node.
|
|
||||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
"Snapshots must be configured with a 'strategy' and 'unique_key'."
|
||||||
"and 'target_schema'."
|
|
||||||
)
|
)
|
||||||
if data.get("strategy") == "check":
|
if self.strategy == "check":
|
||||||
if not data.get("check_cols"):
|
if not self.check_cols:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"A snapshot configured with the check strategy must "
|
"A snapshot configured with the check strategy must "
|
||||||
"specify a check_cols configuration."
|
"specify a check_cols configuration."
|
||||||
)
|
)
|
||||||
if isinstance(data["check_cols"], str) and data["check_cols"] != "all":
|
if isinstance(self.check_cols, str) and self.check_cols != "all":
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
f"Invalid value for 'check_cols': {data['check_cols']}. "
|
f"Invalid value for 'check_cols': {self.check_cols}. "
|
||||||
"Expected 'all' or a list of strings."
|
"Expected 'all' or a list of strings."
|
||||||
)
|
)
|
||||||
elif data.get("strategy") == "timestamp":
|
elif self.strategy == "timestamp":
|
||||||
if not data.get("updated_at"):
|
if not self.updated_at:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"A snapshot configured with the timestamp strategy "
|
"A snapshot configured with the timestamp strategy "
|
||||||
"must specify an updated_at configuration."
|
"must specify an updated_at configuration."
|
||||||
)
|
)
|
||||||
if data.get("check_cols"):
|
if self.check_cols:
|
||||||
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
||||||
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
||||||
# formerly supported with GenericSnapshotConfig
|
# formerly supported with GenericSnapshotConfig
|
||||||
|
|
||||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
if self.materialized and self.materialized != "snapshot":
|
||||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||||
|
|
||||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||||
|
|||||||
@@ -77,8 +77,11 @@ class BaseArtifactMetadata(dbtClassMixin):
|
|||||||
# remote-compile-result
|
# remote-compile-result
|
||||||
# remote-execution-result
|
# remote-execution-result
|
||||||
# remote-run-result
|
# remote-run-result
|
||||||
|
S = TypeVar("S", bound="VersionedSchema")
|
||||||
|
|
||||||
|
|
||||||
def schema_version(name: str, version: int):
|
def schema_version(name: str, version: int):
|
||||||
def inner(cls: Type[VersionedSchema]):
|
def inner(cls: Type[S]):
|
||||||
cls.dbt_schema_version = SchemaVersion(
|
cls.dbt_schema_version = SchemaVersion(
|
||||||
name=name,
|
name=name,
|
||||||
version=version,
|
version=version,
|
||||||
|
|||||||
@@ -1,2 +1,11 @@
|
|||||||
# alias to latest
|
# alias to latest
|
||||||
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
||||||
|
from dbt_common.contracts.metadata import (
|
||||||
|
CatalogKey,
|
||||||
|
CatalogTable,
|
||||||
|
ColumnMap,
|
||||||
|
ColumnMetadata,
|
||||||
|
StatsDict,
|
||||||
|
StatsItem,
|
||||||
|
TableMetadata,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,71 +1,18 @@
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
from dbt.artifacts.schemas.base import (
|
from dbt.artifacts.schemas.base import (
|
||||||
ArtifactMixin,
|
ArtifactMixin,
|
||||||
BaseArtifactMetadata,
|
BaseArtifactMetadata,
|
||||||
schema_version,
|
schema_version,
|
||||||
)
|
)
|
||||||
|
from dbt_common.contracts.metadata import CatalogTable
|
||||||
from dbt_common.dataclass_schema import dbtClassMixin
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_common.utils.formatting import lowercase
|
|
||||||
|
|
||||||
Primitive = Union[bool, str, float, None]
|
Primitive = Union[bool, str, float, None]
|
||||||
PrimitiveDict = Dict[str, Primitive]
|
PrimitiveDict = Dict[str, Primitive]
|
||||||
|
|
||||||
CatalogKey = NamedTuple(
|
|
||||||
"CatalogKey", [("database", Optional[str]), ("schema", str), ("name", str)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class StatsItem(dbtClassMixin):
|
|
||||||
id: str
|
|
||||||
label: str
|
|
||||||
value: Primitive
|
|
||||||
include: bool
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
StatsDict = Dict[str, StatsItem]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ColumnMetadata(dbtClassMixin):
|
|
||||||
type: str
|
|
||||||
index: int
|
|
||||||
name: str
|
|
||||||
comment: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
ColumnMap = Dict[str, ColumnMetadata]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class TableMetadata(dbtClassMixin):
|
|
||||||
type: str
|
|
||||||
schema: str
|
|
||||||
name: str
|
|
||||||
database: Optional[str] = None
|
|
||||||
comment: Optional[str] = None
|
|
||||||
owner: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class CatalogTable(dbtClassMixin):
|
|
||||||
metadata: TableMetadata
|
|
||||||
columns: ColumnMap
|
|
||||||
stats: StatsDict
|
|
||||||
# the same table with two unique IDs will just be listed two times
|
|
||||||
unique_id: Optional[str] = None
|
|
||||||
|
|
||||||
def key(self) -> CatalogKey:
|
|
||||||
return CatalogKey(
|
|
||||||
lowercase(self.metadata.database),
|
|
||||||
self.metadata.schema.lower(),
|
|
||||||
self.metadata.name.lower(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class CatalogMetadata(BaseArtifactMetadata):
|
class CatalogMetadata(BaseArtifactMetadata):
|
||||||
|
|||||||
@@ -158,7 +158,8 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def upgrade_schema_version(cls, data):
|
def upgrade_schema_version(cls, data):
|
||||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
|
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
|
||||||
|
"""
|
||||||
run_results_schema_version = get_artifact_schema_version(data)
|
run_results_schema_version = get_artifact_schema_version(data)
|
||||||
# If less than the current version (v5), preprocess contents to match latest schema version
|
# If less than the current version (v5), preprocess contents to match latest schema version
|
||||||
if run_results_schema_version <= 5:
|
if run_results_schema_version <= 5:
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ def convert_config(config_name, config_value):
|
|||||||
ret = WarnErrorOptions(
|
ret = WarnErrorOptions(
|
||||||
include=config_value.get("include", []),
|
include=config_value.get("include", []),
|
||||||
exclude=config_value.get("exclude", []),
|
exclude=config_value.get("exclude", []),
|
||||||
|
silence=config_value.get("silence", []),
|
||||||
valid_error_names=ALL_EVENT_NAMES,
|
valid_error_names=ALL_EVENT_NAMES,
|
||||||
)
|
)
|
||||||
return ret
|
return ret
|
||||||
@@ -91,6 +92,8 @@ class Flags:
|
|||||||
# Set the default flags.
|
# Set the default flags.
|
||||||
for key, value in FLAGS_DEFAULTS.items():
|
for key, value in FLAGS_DEFAULTS.items():
|
||||||
object.__setattr__(self, key, value)
|
object.__setattr__(self, key, value)
|
||||||
|
# Use to handle duplicate params in _assign_params
|
||||||
|
flags_defaults_list = list(FLAGS_DEFAULTS.keys())
|
||||||
|
|
||||||
if ctx is None:
|
if ctx is None:
|
||||||
ctx = get_current_context()
|
ctx = get_current_context()
|
||||||
@@ -172,13 +175,29 @@ class Flags:
|
|||||||
old_name=dep_param.envvar,
|
old_name=dep_param.envvar,
|
||||||
new_name=new_param.envvar,
|
new_name=new_param.envvar,
|
||||||
)
|
)
|
||||||
|
# end deprecated_params
|
||||||
|
|
||||||
# Set the flag value.
|
# Set the flag value.
|
||||||
is_duplicate = hasattr(self, param_name.upper())
|
is_duplicate = (
|
||||||
|
hasattr(self, param_name.upper())
|
||||||
|
and param_name.upper() not in flags_defaults_list
|
||||||
|
)
|
||||||
|
# First time through, set as though FLAGS_DEFAULTS hasn't been set, so not a duplicate.
|
||||||
|
# Subsequent pass (to process "parent" params) should be treated as duplicates.
|
||||||
|
if param_name.upper() in flags_defaults_list:
|
||||||
|
flags_defaults_list.remove(param_name.upper())
|
||||||
|
# Note: the following determines whether parameter came from click default,
|
||||||
|
# not from FLAGS_DEFAULTS in __init__.
|
||||||
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||||
|
is_envvar = ctx.get_parameter_source(param_name) == ParameterSource.ENVIRONMENT
|
||||||
|
|
||||||
flag_name = (new_name or param_name).upper()
|
flag_name = (new_name or param_name).upper()
|
||||||
|
|
||||||
if (is_duplicate and not is_default) or not is_duplicate:
|
# envvar flags are assigned in either parent or child context if there
|
||||||
|
# isn't an overriding cli command flag.
|
||||||
|
# If the flag has been encountered as a child cli flag, we don't
|
||||||
|
# want to overwrite with parent envvar, since the commandline flag takes precedence.
|
||||||
|
if (is_duplicate and not (is_default or is_envvar)) or not is_duplicate:
|
||||||
object.__setattr__(self, flag_name, param_value)
|
object.__setattr__(self, flag_name, param_value)
|
||||||
|
|
||||||
# Track default assigned params.
|
# Track default assigned params.
|
||||||
@@ -289,6 +308,10 @@ class Flags:
|
|||||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Handle arguments mutually exclusive with INLINE
|
||||||
|
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECT", "INLINE"])
|
||||||
|
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECTOR", "INLINE"])
|
||||||
|
|
||||||
# Support lower cased access for legacy code.
|
# Support lower cased access for legacy code.
|
||||||
params = set(
|
params = set(
|
||||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||||
@@ -315,7 +338,9 @@ class Flags:
|
|||||||
"""
|
"""
|
||||||
set_flag = None
|
set_flag = None
|
||||||
for flag in group:
|
for flag in group:
|
||||||
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
flag_set_by_user = (
|
||||||
|
hasattr(self, flag) and flag.lower() not in params_assigned_from_default
|
||||||
|
)
|
||||||
if flag_set_by_user and set_flag:
|
if flag_set_by_user and set_flag:
|
||||||
raise DbtUsageException(
|
raise DbtUsageException(
|
||||||
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||||
|
|||||||
@@ -274,6 +274,7 @@ def docs_generate(ctx, **kwargs):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
@global_flags
|
@global_flags
|
||||||
@p.browser
|
@p.browser
|
||||||
|
@p.host
|
||||||
@p.port
|
@p.port
|
||||||
@p.profiles_dir
|
@p.profiles_dir
|
||||||
@p.project_dir
|
@p.project_dir
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from click import Choice, ParamType
|
from click import Choice, ParamType
|
||||||
|
|
||||||
from dbt.config.utils import exclusive_primary_alt_value_setting, parse_cli_yaml_string
|
from dbt.config.utils import normalize_warn_error_options, parse_cli_yaml_string
|
||||||
from dbt.events import ALL_EVENT_NAMES
|
from dbt.events import ALL_EVENT_NAMES
|
||||||
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
||||||
from dbt_common.exceptions import DbtValidationError
|
from dbt_common.exceptions import DbtValidationError
|
||||||
@@ -51,12 +51,7 @@ class WarnErrorOptionsType(YAML):
|
|||||||
def convert(self, value, param, ctx):
|
def convert(self, value, param, ctx):
|
||||||
# this function is being used by param in click
|
# this function is being used by param in click
|
||||||
include_exclude = super().convert(value, param, ctx)
|
include_exclude = super().convert(value, param, ctx)
|
||||||
exclusive_primary_alt_value_setting(
|
normalize_warn_error_options(include_exclude)
|
||||||
include_exclude, "include", "error", "warn_error_options"
|
|
||||||
)
|
|
||||||
exclusive_primary_alt_value_setting(
|
|
||||||
include_exclude, "exclude", "warn", "warn_error_options"
|
|
||||||
)
|
|
||||||
|
|
||||||
return WarnErrorOptions(
|
return WarnErrorOptions(
|
||||||
include=include_exclude.get("include", []),
|
include=include_exclude.get("include", []),
|
||||||
|
|||||||
@@ -135,6 +135,14 @@ full_refresh = click.option(
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
host = click.option(
|
||||||
|
"--host",
|
||||||
|
envvar="DBT_HOST",
|
||||||
|
help="host to serve dbt docs on",
|
||||||
|
type=click.STRING,
|
||||||
|
default="127.0.0.1",
|
||||||
|
)
|
||||||
|
|
||||||
indirect_selection = click.option(
|
indirect_selection = click.option(
|
||||||
"--indirect-selection",
|
"--indirect-selection",
|
||||||
envvar="DBT_INDIRECT_SELECTION",
|
envvar="DBT_INDIRECT_SELECTION",
|
||||||
|
|||||||
@@ -41,7 +41,13 @@ from dbt_common.events.functions import LOG_VERSION, fire_event
|
|||||||
from dbt_common.events.helpers import get_json_string_utcnow
|
from dbt_common.events.helpers import get_json_string_utcnow
|
||||||
from dbt_common.exceptions import DbtBaseException as DbtException
|
from dbt_common.exceptions import DbtBaseException as DbtException
|
||||||
from dbt_common.invocation import reset_invocation_id
|
from dbt_common.invocation import reset_invocation_id
|
||||||
from dbt_common.record import Recorder, RecorderMode, get_record_mode_from_env
|
from dbt_common.record import (
|
||||||
|
Recorder,
|
||||||
|
RecorderMode,
|
||||||
|
get_record_mode_from_env,
|
||||||
|
get_record_types_from_dict,
|
||||||
|
get_record_types_from_env,
|
||||||
|
)
|
||||||
from dbt_common.utils import cast_dict_to_dict_of_strings
|
from dbt_common.utils import cast_dict_to_dict_of_strings
|
||||||
|
|
||||||
|
|
||||||
@@ -101,13 +107,23 @@ def preflight(func):
|
|||||||
|
|
||||||
def setup_record_replay():
|
def setup_record_replay():
|
||||||
rec_mode = get_record_mode_from_env()
|
rec_mode = get_record_mode_from_env()
|
||||||
|
rec_types = get_record_types_from_env()
|
||||||
|
|
||||||
recorder: Optional[Recorder] = None
|
recorder: Optional[Recorder] = None
|
||||||
if rec_mode == RecorderMode.REPLAY:
|
if rec_mode == RecorderMode.REPLAY:
|
||||||
recording_path = os.environ["DBT_REPLAY"]
|
previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH")
|
||||||
recorder = Recorder(RecorderMode.REPLAY, recording_path)
|
recorder = Recorder(
|
||||||
|
RecorderMode.REPLAY, types=rec_types, previous_recording_path=previous_recording_path
|
||||||
|
)
|
||||||
|
elif rec_mode == RecorderMode.DIFF:
|
||||||
|
previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH")
|
||||||
|
# ensure types match the previous recording
|
||||||
|
types = get_record_types_from_dict(previous_recording_path)
|
||||||
|
recorder = Recorder(
|
||||||
|
RecorderMode.DIFF, types=types, previous_recording_path=previous_recording_path
|
||||||
|
)
|
||||||
elif rec_mode == RecorderMode.RECORD:
|
elif rec_mode == RecorderMode.RECORD:
|
||||||
recorder = Recorder(RecorderMode.RECORD)
|
recorder = Recorder(RecorderMode.RECORD, types=rec_types)
|
||||||
|
|
||||||
get_invocation_context().recorder = recorder
|
get_invocation_context().recorder = recorder
|
||||||
|
|
||||||
@@ -116,7 +132,10 @@ def tear_down_record_replay():
|
|||||||
recorder = get_invocation_context().recorder
|
recorder = get_invocation_context().recorder
|
||||||
if recorder is not None:
|
if recorder is not None:
|
||||||
if recorder.mode == RecorderMode.RECORD:
|
if recorder.mode == RecorderMode.RECORD:
|
||||||
recorder.write("recording.json")
|
recorder.write()
|
||||||
|
if recorder.mode == RecorderMode.DIFF:
|
||||||
|
recorder.write()
|
||||||
|
recorder.write_diffs(diff_file_name="recording_diffs.json")
|
||||||
elif recorder.mode == RecorderMode.REPLAY:
|
elif recorder.mode == RecorderMode.REPLAY:
|
||||||
recorder.write_diffs("replay_diffs.json")
|
recorder.write_diffs("replay_diffs.json")
|
||||||
|
|
||||||
@@ -160,9 +179,11 @@ def postflight(func):
|
|||||||
process_in_blocks=rusage.ru_inblock,
|
process_in_blocks=rusage.ru_inblock,
|
||||||
process_out_blocks=rusage.ru_oublock,
|
process_out_blocks=rusage.ru_oublock,
|
||||||
),
|
),
|
||||||
|
(
|
||||||
EventLevel.INFO
|
EventLevel.INFO
|
||||||
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
||||||
else None,
|
else None
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(
|
fire_event(
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
from dbt.exceptions import MacroNamespaceNotStringError
|
from dbt.artifacts.resources import RefArgs
|
||||||
|
from dbt.exceptions import MacroNamespaceNotStringError, ParsingError
|
||||||
from dbt_common.clients.jinja import get_environment
|
from dbt_common.clients.jinja import get_environment
|
||||||
from dbt_common.exceptions.macros import MacroNameNotStringError
|
from dbt_common.exceptions.macros import MacroNameNotStringError
|
||||||
from dbt_common.tests import test_caching_enabled
|
from dbt_common.tests import test_caching_enabled
|
||||||
|
from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore
|
||||||
|
|
||||||
_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {}
|
_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {}
|
||||||
|
|
||||||
@@ -153,3 +155,39 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
|||||||
possible_macro_calls.append(f"{package_name}.{func_name}")
|
possible_macro_calls.append(f"{package_name}.{func_name}")
|
||||||
|
|
||||||
return possible_macro_calls
|
return possible_macro_calls
|
||||||
|
|
||||||
|
|
||||||
|
def statically_parse_ref_or_source(expression: str) -> Union[RefArgs, List[str]]:
|
||||||
|
"""
|
||||||
|
Returns a RefArgs or List[str] object, corresponding to ref or source respectively, given an input jinja expression.
|
||||||
|
|
||||||
|
input: str representing how input node is referenced in tested model sql
|
||||||
|
* examples:
|
||||||
|
- "ref('my_model_a')"
|
||||||
|
- "ref('my_model_a', version=3)"
|
||||||
|
- "ref('package', 'my_model_a', version=3)"
|
||||||
|
- "source('my_source_schema', 'my_source_name')"
|
||||||
|
|
||||||
|
If input is not a well-formed jinja ref or source expression, a ParsingError is raised.
|
||||||
|
"""
|
||||||
|
ref_or_source: Union[RefArgs, List[str]]
|
||||||
|
|
||||||
|
try:
|
||||||
|
statically_parsed = py_extract_from_source(f"{{{{ {expression} }}}}")
|
||||||
|
except ExtractionError:
|
||||||
|
raise ParsingError(f"Invalid jinja expression: {expression}")
|
||||||
|
|
||||||
|
if statically_parsed.get("refs"):
|
||||||
|
raw_ref = list(statically_parsed["refs"])[0]
|
||||||
|
ref_or_source = RefArgs(
|
||||||
|
package=raw_ref.get("package"),
|
||||||
|
name=raw_ref.get("name"),
|
||||||
|
version=raw_ref.get("version"),
|
||||||
|
)
|
||||||
|
elif statically_parsed.get("sources"):
|
||||||
|
source_name, source_table_name = list(statically_parsed["sources"])[0]
|
||||||
|
ref_or_source = [source_name, source_table_name]
|
||||||
|
else:
|
||||||
|
raise ParsingError(f"Invalid ref or source expression: {expression}")
|
||||||
|
|
||||||
|
return ref_or_source
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from dbt.contracts.graph.nodes import (
|
|||||||
InjectedCTE,
|
InjectedCTE,
|
||||||
ManifestNode,
|
ManifestNode,
|
||||||
ManifestSQLNode,
|
ManifestSQLNode,
|
||||||
|
ModelNode,
|
||||||
SeedNode,
|
SeedNode,
|
||||||
UnitTestDefinition,
|
UnitTestDefinition,
|
||||||
UnitTestNode,
|
UnitTestNode,
|
||||||
@@ -29,12 +30,15 @@ from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
|||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
DbtInternalError,
|
DbtInternalError,
|
||||||
DbtRuntimeError,
|
DbtRuntimeError,
|
||||||
|
ForeignKeyConstraintToSyntaxError,
|
||||||
GraphDependencyNotFoundError,
|
GraphDependencyNotFoundError,
|
||||||
|
ParsingError,
|
||||||
)
|
)
|
||||||
from dbt.flags import get_flags
|
from dbt.flags import get_flags
|
||||||
from dbt.graph import Graph
|
from dbt.graph import Graph
|
||||||
from dbt.node_types import ModelLanguage, NodeType
|
from dbt.node_types import ModelLanguage, NodeType
|
||||||
from dbt_common.clients.system import make_directory
|
from dbt_common.clients.system import make_directory
|
||||||
|
from dbt_common.contracts.constraints import ConstraintType
|
||||||
from dbt_common.events.contextvars import get_node_info
|
from dbt_common.events.contextvars import get_node_info
|
||||||
from dbt_common.events.format import pluralize
|
from dbt_common.events.format import pluralize
|
||||||
from dbt_common.events.functions import fire_event
|
from dbt_common.events.functions import fire_event
|
||||||
@@ -437,8 +441,31 @@ class Compiler:
|
|||||||
relation_name = str(relation_cls.create_from(self.config, node))
|
relation_name = str(relation_cls.create_from(self.config, node))
|
||||||
node.relation_name = relation_name
|
node.relation_name = relation_name
|
||||||
|
|
||||||
|
# Compile 'ref' and 'source' expressions in foreign key constraints
|
||||||
|
if isinstance(node, ModelNode):
|
||||||
|
for constraint in node.all_constraints:
|
||||||
|
if constraint.type == ConstraintType.foreign_key and constraint.to:
|
||||||
|
constraint.to = self._compile_relation_for_foreign_key_constraint_to(
|
||||||
|
manifest, node, constraint.to
|
||||||
|
)
|
||||||
|
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
def _compile_relation_for_foreign_key_constraint_to(
|
||||||
|
self, manifest: Manifest, node: ManifestSQLNode, to_expression: str
|
||||||
|
) -> str:
|
||||||
|
try:
|
||||||
|
foreign_key_node = manifest.find_node_from_ref_or_source(to_expression)
|
||||||
|
except ParsingError:
|
||||||
|
raise ForeignKeyConstraintToSyntaxError(node, to_expression)
|
||||||
|
|
||||||
|
if not foreign_key_node:
|
||||||
|
raise GraphDependencyNotFoundError(node, to_expression)
|
||||||
|
|
||||||
|
adapter = get_adapter(self.config)
|
||||||
|
relation_name = str(adapter.Relation.create_from(self.config, foreign_key_node))
|
||||||
|
return relation_name
|
||||||
|
|
||||||
# This method doesn't actually "compile" any of the nodes. That is done by the
|
# This method doesn't actually "compile" any of the nodes. That is done by the
|
||||||
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
||||||
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from dbt import deprecations
|
|||||||
from dbt.adapters.contracts.connection import QueryComment
|
from dbt.adapters.contracts.connection import QueryComment
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.config.selectors import SelectorDict
|
from dbt.config.selectors import SelectorDict
|
||||||
from dbt.config.utils import exclusive_primary_alt_value_setting
|
from dbt.config.utils import normalize_warn_error_options
|
||||||
from dbt.constants import (
|
from dbt.constants import (
|
||||||
DBT_PROJECT_FILE_NAME,
|
DBT_PROJECT_FILE_NAME,
|
||||||
DEPENDENCIES_FILE_NAME,
|
DEPENDENCIES_FILE_NAME,
|
||||||
@@ -828,13 +828,8 @@ def read_project_flags(project_dir: str, profiles_dir: str) -> ProjectFlags:
|
|||||||
if project_flags is not None:
|
if project_flags is not None:
|
||||||
# handle collapsing `include` and `error` as well as collapsing `exclude` and `warn`
|
# handle collapsing `include` and `error` as well as collapsing `exclude` and `warn`
|
||||||
# for warn_error_options
|
# for warn_error_options
|
||||||
warn_error_options = project_flags.get("warn_error_options")
|
warn_error_options = project_flags.get("warn_error_options", {})
|
||||||
exclusive_primary_alt_value_setting(
|
normalize_warn_error_options(warn_error_options)
|
||||||
warn_error_options, "include", "error", "warn_error_options"
|
|
||||||
)
|
|
||||||
exclusive_primary_alt_value_setting(
|
|
||||||
warn_error_options, "exclude", "warn", "warn_error_options"
|
|
||||||
)
|
|
||||||
|
|
||||||
ProjectFlags.validate(project_flags)
|
ProjectFlags.validate(project_flags)
|
||||||
return ProjectFlags.from_dict(project_flags)
|
return ProjectFlags.from_dict(project_flags)
|
||||||
|
|||||||
@@ -290,9 +290,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
project_name=self.project_name,
|
project_name=self.project_name,
|
||||||
project_id=self.hashed_name(),
|
project_id=self.hashed_name(),
|
||||||
user_id=tracking.active_user.id if tracking.active_user else None,
|
user_id=tracking.active_user.id if tracking.active_user else None,
|
||||||
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
|
send_anonymous_usage_stats=(
|
||||||
if tracking.active_user
|
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
|
||||||
else None,
|
),
|
||||||
adapter_type=self.credentials.type,
|
adapter_type=self.credentials.type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -49,5 +49,18 @@ def exclusive_primary_alt_value_setting(
|
|||||||
f"Only `{alt}` or `{primary}` can be specified{where}, not both"
|
f"Only `{alt}` or `{primary}` can be specified{where}, not both"
|
||||||
)
|
)
|
||||||
|
|
||||||
if alt_options:
|
if alt in dictionary:
|
||||||
dictionary[primary] = alt_options
|
alt_value = dictionary.pop(alt)
|
||||||
|
dictionary[primary] = alt_value
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_warn_error_options(warn_error_options: Dict[str, Any]) -> None:
|
||||||
|
exclusive_primary_alt_value_setting(
|
||||||
|
warn_error_options, "include", "error", "warn_error_options"
|
||||||
|
)
|
||||||
|
exclusive_primary_alt_value_setting(
|
||||||
|
warn_error_options, "exclude", "warn", "warn_error_options"
|
||||||
|
)
|
||||||
|
for key in ("include", "exclude", "silence"):
|
||||||
|
if key in warn_error_options and warn_error_options[key] is None:
|
||||||
|
warn_error_options[key] = []
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||||
|
|
||||||
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
||||||
|
|
||||||
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
||||||
@@ -15,5 +17,8 @@ DEPENDENCIES_FILE_NAME = "dependencies.yml"
|
|||||||
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
||||||
MANIFEST_FILE_NAME = "manifest.json"
|
MANIFEST_FILE_NAME = "manifest.json"
|
||||||
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
||||||
|
LEGACY_TIME_SPINE_MODEL_NAME = "metricflow_time_spine"
|
||||||
|
LEGACY_TIME_SPINE_GRANULARITY = TimeGranularity.DAY
|
||||||
|
MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY = TimeGranularity.DAY
|
||||||
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
||||||
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
||||||
|
|||||||
@@ -27,8 +27,7 @@ class ConfigSource:
|
|||||||
def __init__(self, project):
|
def __init__(self, project):
|
||||||
self.project = project
|
self.project = project
|
||||||
|
|
||||||
def get_config_dict(self, resource_type: NodeType):
|
def get_config_dict(self, resource_type: NodeType): ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class UnrenderedConfig(ConfigSource):
|
class UnrenderedConfig(ConfigSource):
|
||||||
@@ -130,12 +129,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
|||||||
return self._project_configs(self._active_project, fqn, resource_type)
|
return self._project_configs(self._active_project, fqn, resource_type)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T:
|
def _update_from_config(
|
||||||
...
|
self, result: T, partial: Dict[str, Any], validate: bool = False
|
||||||
|
) -> T: ...
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def initial_result(self, resource_type: NodeType, base: bool) -> T:
|
def initial_result(self, resource_type: NodeType, base: bool) -> T: ...
|
||||||
...
|
|
||||||
|
|
||||||
def calculate_node_config(
|
def calculate_node_config(
|
||||||
self,
|
self,
|
||||||
@@ -181,8 +180,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
|||||||
project_name: str,
|
project_name: str,
|
||||||
base: bool,
|
base: bool,
|
||||||
patch_config_dict: Optional[Dict[str, Any]] = None,
|
patch_config_dict: Optional[Dict[str, Any]] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||||
|
|||||||
@@ -239,8 +239,7 @@ class BaseRefResolver(BaseResolver):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def resolve(
|
def resolve(
|
||||||
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
||||||
) -> RelationProxy:
|
) -> RelationProxy: ...
|
||||||
...
|
|
||||||
|
|
||||||
def _repack_args(
|
def _repack_args(
|
||||||
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
||||||
@@ -306,8 +305,7 @@ class BaseSourceResolver(BaseResolver):
|
|||||||
|
|
||||||
class BaseMetricResolver(BaseResolver):
|
class BaseMetricResolver(BaseResolver):
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
|
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ...
|
||||||
...
|
|
||||||
|
|
||||||
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
||||||
if package is None:
|
if package is None:
|
||||||
@@ -341,8 +339,7 @@ class BaseMetricResolver(BaseResolver):
|
|||||||
|
|
||||||
|
|
||||||
class Config(Protocol):
|
class Config(Protocol):
|
||||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
def __init__(self, model, context_config: Optional[ContextConfig]): ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
# Implementation of "config(..)" calls in models
|
# Implementation of "config(..)" calls in models
|
||||||
@@ -977,7 +974,7 @@ class ProviderContext(ManifestContext):
|
|||||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise LoadAgateTableValueError(e, node=self.model)
|
raise LoadAgateTableValueError(e, node=self.model)
|
||||||
table.original_abspath = os.path.abspath(path)
|
|
||||||
return table
|
return table
|
||||||
|
|
||||||
@contextproperty()
|
@contextproperty()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
## Artifacts
|
## Artifacts
|
||||||
|
|
||||||
### Generating JSON schemas
|
### Generating JSON schemas
|
||||||
A helper script, `sripts/collect-artifact-schema.py` is available to generate json schemas corresponding to versioned artifacts (`ArtifactMixin`s).
|
A helper script, `scripts/collect-artifact-schema.py` is available to generate json schemas corresponding to versioned artifacts (`ArtifactMixin`s).
|
||||||
|
|
||||||
This script is necessary to run when a new artifact schema version is created, or when changes are made to existing artifact versions, and writes json schema to `schema/dbt/<artifact>/v<version>.json`.
|
This script is necessary to run when a new artifact schema version is created, or when changes are made to existing artifact versions, and writes json schema to `schema/dbt/<artifact>/v<version>.json`.
|
||||||
|
|
||||||
|
|||||||
@@ -192,8 +192,13 @@ class SchemaSourceFile(BaseSourceFile):
|
|||||||
sources: List[str] = field(default_factory=list)
|
sources: List[str] = field(default_factory=list)
|
||||||
exposures: List[str] = field(default_factory=list)
|
exposures: List[str] = field(default_factory=list)
|
||||||
metrics: List[str] = field(default_factory=list)
|
metrics: List[str] = field(default_factory=list)
|
||||||
# metrics generated from semantic_model measures
|
# The following field will no longer be used. Leaving
|
||||||
|
# here to avoid breaking existing projects. To be removed
|
||||||
|
# later if possible.
|
||||||
generated_metrics: List[str] = field(default_factory=list)
|
generated_metrics: List[str] = field(default_factory=list)
|
||||||
|
# metrics generated from semantic_model measures. The key is
|
||||||
|
# the name of the semantic_model, so that we can find it later.
|
||||||
|
metrics_from_measures: Dict[str, Any] = field(default_factory=dict)
|
||||||
groups: List[str] = field(default_factory=list)
|
groups: List[str] = field(default_factory=list)
|
||||||
# node patches contain models, seeds, snapshots, analyses
|
# node patches contain models, seeds, snapshots, analyses
|
||||||
ndp: List[str] = field(default_factory=list)
|
ndp: List[str] = field(default_factory=list)
|
||||||
@@ -259,6 +264,40 @@ class SchemaSourceFile(BaseSourceFile):
|
|||||||
return self.data_tests[yaml_key][name]
|
return self.data_tests[yaml_key][name]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
def add_metrics_from_measures(self, semantic_model_name: str, metric_unique_id: str):
|
||||||
|
if self.generated_metrics:
|
||||||
|
# Probably not needed, but for safety sake, convert the
|
||||||
|
# old generated_metrics to metrics_from_measures.
|
||||||
|
self.fix_metrics_from_measures()
|
||||||
|
if semantic_model_name not in self.metrics_from_measures:
|
||||||
|
self.metrics_from_measures[semantic_model_name] = []
|
||||||
|
self.metrics_from_measures[semantic_model_name].append(metric_unique_id)
|
||||||
|
|
||||||
|
def fix_metrics_from_measures(self):
|
||||||
|
# Temporary method to fix up existing projects with a partial parse file.
|
||||||
|
# This should only be called if SchemaSourceFile in a msgpack
|
||||||
|
# pack manifest has an existing "generated_metrics" list, to turn it
|
||||||
|
# it into a "metrics_from_measures" dictionary, so that we can
|
||||||
|
# correctly partially parse.
|
||||||
|
# This code can be removed when "generated_metrics" is removed.
|
||||||
|
generated_metrics = self.generated_metrics
|
||||||
|
self.generated_metrics = [] # Should never be needed again
|
||||||
|
# For each metric_unique_id we loop through the semantic models
|
||||||
|
# looking for the name of the "measure" which generated the metric.
|
||||||
|
# When it's found, add it to "metrics_from_measures", with a key
|
||||||
|
# of the semantic_model name, and a list of metrics.
|
||||||
|
for metric_unique_id in generated_metrics:
|
||||||
|
parts = metric_unique_id.split(".")
|
||||||
|
# get the metric_name
|
||||||
|
metric_name = parts[-1]
|
||||||
|
if "semantic_models" in self.dict_from_yaml:
|
||||||
|
for sem_model in self.dict_from_yaml["semantic_models"]:
|
||||||
|
if "measures" in sem_model:
|
||||||
|
for measure in sem_model["measures"]:
|
||||||
|
if measure["name"] == metric_name:
|
||||||
|
self.add_metrics_from_measures(sem_model["name"], metric_unique_id)
|
||||||
|
break
|
||||||
|
|
||||||
def get_key_and_name_for_test(self, test_unique_id):
|
def get_key_and_name_for_test(self, test_unique_id):
|
||||||
yaml_key = None
|
yaml_key = None
|
||||||
block_name = None
|
block_name = None
|
||||||
|
|||||||
@@ -29,11 +29,13 @@ from dbt.adapters.exceptions import (
|
|||||||
DuplicateMacroInPackageError,
|
DuplicateMacroInPackageError,
|
||||||
DuplicateMaterializationNameError,
|
DuplicateMaterializationNameError,
|
||||||
)
|
)
|
||||||
|
from dbt.adapters.factory import get_adapter_package_names
|
||||||
|
|
||||||
# to preserve import paths
|
# to preserve import paths
|
||||||
from dbt.artifacts.resources import BaseResource, DeferRelation, NodeVersion
|
from dbt.artifacts.resources import BaseResource, DeferRelation, NodeVersion, RefArgs
|
||||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||||
from dbt.artifacts.schemas.manifest import ManifestMetadata, UniqueID, WritableManifest
|
from dbt.artifacts.schemas.manifest import ManifestMetadata, UniqueID, WritableManifest
|
||||||
|
from dbt.clients.jinja_static import statically_parse_ref_or_source
|
||||||
from dbt.contracts.files import (
|
from dbt.contracts.files import (
|
||||||
AnySourceFile,
|
AnySourceFile,
|
||||||
FileHash,
|
FileHash,
|
||||||
@@ -53,7 +55,6 @@ from dbt.contracts.graph.nodes import (
|
|||||||
ManifestNode,
|
ManifestNode,
|
||||||
Metric,
|
Metric,
|
||||||
ModelNode,
|
ModelNode,
|
||||||
ResultNode,
|
|
||||||
SavedQuery,
|
SavedQuery,
|
||||||
SeedNode,
|
SeedNode,
|
||||||
SemanticModel,
|
SemanticModel,
|
||||||
@@ -412,11 +413,11 @@ class DisabledLookup(dbtClassMixin):
|
|||||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||||
self.populate(manifest)
|
self.populate(manifest)
|
||||||
|
|
||||||
def populate(self, manifest):
|
def populate(self, manifest: "Manifest"):
|
||||||
for node in list(chain.from_iterable(manifest.disabled.values())):
|
for node in list(chain.from_iterable(manifest.disabled.values())):
|
||||||
self.add_node(node)
|
self.add_node(node)
|
||||||
|
|
||||||
def add_node(self, node):
|
def add_node(self, node: GraphMemberNode) -> None:
|
||||||
if node.search_name not in self.storage:
|
if node.search_name not in self.storage:
|
||||||
self.storage[node.search_name] = {}
|
self.storage[node.search_name] = {}
|
||||||
if node.package_name not in self.storage[node.search_name]:
|
if node.package_name not in self.storage[node.search_name]:
|
||||||
@@ -426,8 +427,12 @@ class DisabledLookup(dbtClassMixin):
|
|||||||
# This should return a list of disabled nodes. It's different from
|
# This should return a list of disabled nodes. It's different from
|
||||||
# the other Lookup functions in that it returns full nodes, not just unique_ids
|
# the other Lookup functions in that it returns full nodes, not just unique_ids
|
||||||
def find(
|
def find(
|
||||||
self, search_name, package: Optional[PackageName], version: Optional[NodeVersion] = None
|
self,
|
||||||
):
|
search_name,
|
||||||
|
package: Optional[PackageName],
|
||||||
|
version: Optional[NodeVersion] = None,
|
||||||
|
resource_types: Optional[List[NodeType]] = None,
|
||||||
|
) -> Optional[List[Any]]:
|
||||||
if version:
|
if version:
|
||||||
search_name = f"{search_name}.v{version}"
|
search_name = f"{search_name}.v{version}"
|
||||||
|
|
||||||
@@ -436,16 +441,29 @@ class DisabledLookup(dbtClassMixin):
|
|||||||
|
|
||||||
pkg_dct: Mapping[PackageName, List[Any]] = self.storage[search_name]
|
pkg_dct: Mapping[PackageName, List[Any]] = self.storage[search_name]
|
||||||
|
|
||||||
|
nodes = []
|
||||||
if package is None:
|
if package is None:
|
||||||
if not pkg_dct:
|
if not pkg_dct:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return next(iter(pkg_dct.values()))
|
nodes = next(iter(pkg_dct.values()))
|
||||||
elif package in pkg_dct:
|
elif package in pkg_dct:
|
||||||
return pkg_dct[package]
|
nodes = pkg_dct[package]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if resource_types is None:
|
||||||
|
return nodes
|
||||||
|
else:
|
||||||
|
new_nodes = []
|
||||||
|
for node in nodes:
|
||||||
|
if node.resource_type in resource_types:
|
||||||
|
new_nodes.append(node)
|
||||||
|
if not new_nodes:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return new_nodes
|
||||||
|
|
||||||
|
|
||||||
class AnalysisLookup(RefableLookup):
|
class AnalysisLookup(RefableLookup):
|
||||||
_lookup_types: ClassVar[set] = set([NodeType.Analysis])
|
_lookup_types: ClassVar[set] = set([NodeType.Analysis])
|
||||||
@@ -720,9 +738,6 @@ class MacroMethods:
|
|||||||
filter: Optional[Callable[[MacroCandidate], bool]] = None,
|
filter: Optional[Callable[[MacroCandidate], bool]] = None,
|
||||||
) -> CandidateList:
|
) -> CandidateList:
|
||||||
"""Find macros by their name."""
|
"""Find macros by their name."""
|
||||||
# avoid an import cycle
|
|
||||||
from dbt.adapters.factory import get_adapter_package_names
|
|
||||||
|
|
||||||
candidates: CandidateList = CandidateList()
|
candidates: CandidateList = CandidateList()
|
||||||
|
|
||||||
macros_by_name = self.get_macros_by_name()
|
macros_by_name = self.get_macros_by_name()
|
||||||
@@ -988,6 +1003,7 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
self.metrics.values(),
|
self.metrics.values(),
|
||||||
self.semantic_models.values(),
|
self.semantic_models.values(),
|
||||||
self.saved_queries.values(),
|
self.saved_queries.values(),
|
||||||
|
self.unit_tests.values(),
|
||||||
)
|
)
|
||||||
for resource in all_resources:
|
for resource in all_resources:
|
||||||
resource_type_plural = resource.resource_type.pluralize()
|
resource_type_plural = resource.resource_type.pluralize()
|
||||||
@@ -1094,6 +1110,7 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
metrics=cls._map_resources_to_map_nodes(writable_manifest.metrics),
|
metrics=cls._map_resources_to_map_nodes(writable_manifest.metrics),
|
||||||
groups=cls._map_resources_to_map_nodes(writable_manifest.groups),
|
groups=cls._map_resources_to_map_nodes(writable_manifest.groups),
|
||||||
semantic_models=cls._map_resources_to_map_nodes(writable_manifest.semantic_models),
|
semantic_models=cls._map_resources_to_map_nodes(writable_manifest.semantic_models),
|
||||||
|
saved_queries=cls._map_resources_to_map_nodes(writable_manifest.saved_queries),
|
||||||
selectors={
|
selectors={
|
||||||
selector_id: selector
|
selector_id: selector
|
||||||
for selector_id, selector in writable_manifest.selectors.items()
|
for selector_id, selector in writable_manifest.selectors.items()
|
||||||
@@ -1295,7 +1312,12 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
|
|
||||||
# it's possible that the node is disabled
|
# it's possible that the node is disabled
|
||||||
if disabled is None:
|
if disabled is None:
|
||||||
disabled = self.disabled_lookup.find(target_model_name, pkg, target_model_version)
|
disabled = self.disabled_lookup.find(
|
||||||
|
target_model_name,
|
||||||
|
pkg,
|
||||||
|
version=target_model_version,
|
||||||
|
resource_types=REFABLE_NODE_TYPES,
|
||||||
|
)
|
||||||
|
|
||||||
if disabled:
|
if disabled:
|
||||||
return Disabled(disabled[0])
|
return Disabled(disabled[0])
|
||||||
@@ -1566,13 +1588,15 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
self.exposures[exposure.unique_id] = exposure
|
self.exposures[exposure.unique_id] = exposure
|
||||||
source_file.exposures.append(exposure.unique_id)
|
source_file.exposures.append(exposure.unique_id)
|
||||||
|
|
||||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric, generated: bool = False):
|
def add_metric(
|
||||||
|
self, source_file: SchemaSourceFile, metric: Metric, generated_from: Optional[str] = None
|
||||||
|
):
|
||||||
_check_duplicates(metric, self.metrics)
|
_check_duplicates(metric, self.metrics)
|
||||||
self.metrics[metric.unique_id] = metric
|
self.metrics[metric.unique_id] = metric
|
||||||
if not generated:
|
if not generated_from:
|
||||||
source_file.metrics.append(metric.unique_id)
|
source_file.metrics.append(metric.unique_id)
|
||||||
else:
|
else:
|
||||||
source_file.generated_metrics.append(metric.unique_id)
|
source_file.add_metrics_from_measures(generated_from, metric.unique_id)
|
||||||
|
|
||||||
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
def add_group(self, source_file: SchemaSourceFile, group: Group):
|
||||||
_check_duplicates(group, self.groups)
|
_check_duplicates(group, self.groups)
|
||||||
@@ -1586,7 +1610,7 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
else:
|
else:
|
||||||
self.disabled[node.unique_id] = [node]
|
self.disabled[node.unique_id] = [node]
|
||||||
|
|
||||||
def add_disabled(self, source_file: AnySourceFile, node: ResultNode, test_from=None):
|
def add_disabled(self, source_file: AnySourceFile, node: GraphMemberNode, test_from=None):
|
||||||
self.add_disabled_nofile(node)
|
self.add_disabled_nofile(node)
|
||||||
if isinstance(source_file, SchemaSourceFile):
|
if isinstance(source_file, SchemaSourceFile):
|
||||||
if isinstance(node, GenericTestNode):
|
if isinstance(node, GenericTestNode):
|
||||||
@@ -1634,6 +1658,22 @@ class Manifest(MacroMethods, dbtClassMixin):
|
|||||||
|
|
||||||
# end of methods formerly in ParseResult
|
# end of methods formerly in ParseResult
|
||||||
|
|
||||||
|
def find_node_from_ref_or_source(
|
||||||
|
self, expression: str
|
||||||
|
) -> Optional[Union[ModelNode, SourceDefinition]]:
|
||||||
|
ref_or_source = statically_parse_ref_or_source(expression)
|
||||||
|
|
||||||
|
node = None
|
||||||
|
if isinstance(ref_or_source, RefArgs):
|
||||||
|
node = self.ref_lookup.find(
|
||||||
|
ref_or_source.name, ref_or_source.package, ref_or_source.version, self
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
source_name, source_table_name = ref_or_source[0], ref_or_source[1]
|
||||||
|
node = self.source_lookup.find(f"{source_name}.{source_table_name}", None, self)
|
||||||
|
|
||||||
|
return node
|
||||||
|
|
||||||
# Provide support for copy.deepcopy() - we just need to avoid the lock!
|
# Provide support for copy.deepcopy() - we just need to avoid the lock!
|
||||||
# pickle and deepcopy use this. It returns a callable object used to
|
# pickle and deepcopy use this. It returns a callable object used to
|
||||||
# create the initial version of the object and a tuple of arguments
|
# create the initial version of the object and a tuple of arguments
|
||||||
@@ -1677,9 +1717,9 @@ class MacroManifest(MacroMethods):
|
|||||||
self.macros = macros
|
self.macros = macros
|
||||||
self.metadata = ManifestMetadata(
|
self.metadata = ManifestMetadata(
|
||||||
user_id=tracking.active_user.id if tracking.active_user else None,
|
user_id=tracking.active_user.id if tracking.active_user else None,
|
||||||
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
|
send_anonymous_usage_stats=(
|
||||||
if tracking.active_user
|
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
|
||||||
else None,
|
),
|
||||||
)
|
)
|
||||||
# This is returned by the 'graph' context property
|
# This is returned by the 'graph' context property
|
||||||
# in the ProviderContext class.
|
# in the ProviderContext class.
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from typing import Any, Dict, Iterator, List
|
from typing import Any, Dict, Iterator, List
|
||||||
|
|
||||||
from dbt_semantic_interfaces.type_enums import MetricType
|
|
||||||
|
|
||||||
from dbt.contracts.graph.manifest import Manifest, Metric
|
from dbt.contracts.graph.manifest import Manifest, Metric
|
||||||
|
from dbt_semantic_interfaces.type_enums import MetricType
|
||||||
|
|
||||||
DERIVED_METRICS = [MetricType.DERIVED, MetricType.RATIO]
|
DERIVED_METRICS = [MetricType.DERIVED, MetricType.RATIO]
|
||||||
BASE_METRICS = [MetricType.SIMPLE, MetricType.CUMULATIVE, MetricType.CONVERSION]
|
BASE_METRICS = [MetricType.SIMPLE, MetricType.CUMULATIVE, MetricType.CONVERSION]
|
||||||
|
|||||||
@@ -39,12 +39,6 @@ class UnitTestNodeConfig(NodeConfig):
|
|||||||
expected_sql: Optional[str] = None
|
expected_sql: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class EmptySnapshotConfig(NodeConfig):
|
|
||||||
materialized: str = "snapshot"
|
|
||||||
unique_key: Optional[str] = None # override NodeConfig unique_key definition
|
|
||||||
|
|
||||||
|
|
||||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||||
NodeType.Metric: MetricConfig,
|
NodeType.Metric: MetricConfig,
|
||||||
NodeType.SemanticModel: SemanticModelConfig,
|
NodeType.SemanticModel: SemanticModelConfig,
|
||||||
@@ -62,7 +56,6 @@ RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
|||||||
# base resource types are like resource types, except nothing has mandatory
|
# base resource types are like resource types, except nothing has mandatory
|
||||||
# configs.
|
# configs.
|
||||||
BASE_RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = RESOURCE_TYPES.copy()
|
BASE_RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = RESOURCE_TYPES.copy()
|
||||||
BASE_RESOURCE_TYPES.update({NodeType.Snapshot: EmptySnapshotConfig})
|
|
||||||
|
|
||||||
|
|
||||||
def get_config_for(resource_type: NodeType, base=False) -> Type[BaseConfig]:
|
def get_config_for(resource_type: NodeType, base=False) -> Type[BaseConfig]:
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ from typing import (
|
|||||||
from mashumaro.types import SerializableType
|
from mashumaro.types import SerializableType
|
||||||
|
|
||||||
from dbt import deprecations
|
from dbt import deprecations
|
||||||
|
from dbt.adapters.base import ConstraintSupport
|
||||||
|
from dbt.adapters.factory import get_adapter_constraint_support
|
||||||
from dbt.artifacts.resources import Analysis as AnalysisResource
|
from dbt.artifacts.resources import Analysis as AnalysisResource
|
||||||
from dbt.artifacts.resources import (
|
from dbt.artifacts.resources import (
|
||||||
BaseResource,
|
BaseResource,
|
||||||
@@ -56,8 +58,9 @@ from dbt.artifacts.resources import SingularTest as SingularTestResource
|
|||||||
from dbt.artifacts.resources import Snapshot as SnapshotResource
|
from dbt.artifacts.resources import Snapshot as SnapshotResource
|
||||||
from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource
|
from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource
|
||||||
from dbt.artifacts.resources import SqlOperation as SqlOperationResource
|
from dbt.artifacts.resources import SqlOperation as SqlOperationResource
|
||||||
|
from dbt.artifacts.resources import TimeSpine
|
||||||
from dbt.artifacts.resources import UnitTestDefinition as UnitTestDefinitionResource
|
from dbt.artifacts.resources import UnitTestDefinition as UnitTestDefinitionResource
|
||||||
from dbt.contracts.graph.model_config import EmptySnapshotConfig, UnitTestNodeConfig
|
from dbt.contracts.graph.model_config import UnitTestNodeConfig
|
||||||
from dbt.contracts.graph.node_args import ModelNodeArgs
|
from dbt.contracts.graph.node_args import ModelNodeArgs
|
||||||
from dbt.contracts.graph.unparsed import (
|
from dbt.contracts.graph.unparsed import (
|
||||||
HasYamlMetadata,
|
HasYamlMetadata,
|
||||||
@@ -83,7 +86,11 @@ from dbt.node_types import (
|
|||||||
NodeType,
|
NodeType,
|
||||||
)
|
)
|
||||||
from dbt_common.clients.system import write_file
|
from dbt_common.clients.system import write_file
|
||||||
from dbt_common.contracts.constraints import ConstraintType
|
from dbt_common.contracts.constraints import (
|
||||||
|
ColumnLevelConstraint,
|
||||||
|
ConstraintType,
|
||||||
|
ModelLevelConstraint,
|
||||||
|
)
|
||||||
from dbt_common.events.contextvars import set_log_contextvars
|
from dbt_common.events.contextvars import set_log_contextvars
|
||||||
from dbt_common.events.functions import warn_or_error
|
from dbt_common.events.functions import warn_or_error
|
||||||
|
|
||||||
@@ -469,6 +476,13 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
def is_latest_version(self) -> bool:
|
def is_latest_version(self) -> bool:
|
||||||
return self.version is not None and self.version == self.latest_version
|
return self.version is not None and self.version == self.latest_version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_past_deprecation_date(self) -> bool:
|
||||||
|
return (
|
||||||
|
self.deprecation_date is not None
|
||||||
|
and self.deprecation_date < datetime.now().astimezone()
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def search_name(self):
|
def search_name(self):
|
||||||
if self.version is None:
|
if self.version is None:
|
||||||
@@ -480,6 +494,18 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
def materialization_enforces_constraints(self) -> bool:
|
def materialization_enforces_constraints(self) -> bool:
|
||||||
return self.config.materialized in ["table", "incremental"]
|
return self.config.materialized in ["table", "incremental"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all_constraints(self) -> List[Union[ModelLevelConstraint, ColumnLevelConstraint]]:
|
||||||
|
constraints: List[Union[ModelLevelConstraint, ColumnLevelConstraint]] = []
|
||||||
|
for model_level_constraint in self.constraints:
|
||||||
|
constraints.append(model_level_constraint)
|
||||||
|
|
||||||
|
for column in self.columns.values():
|
||||||
|
for column_level_constraint in column.constraints:
|
||||||
|
constraints.append(column_level_constraint)
|
||||||
|
|
||||||
|
return constraints
|
||||||
|
|
||||||
def infer_primary_key(self, data_tests: List["GenericTestNode"]) -> List[str]:
|
def infer_primary_key(self, data_tests: List["GenericTestNode"]) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Infers the columns that can be used as primary key of a model in the following order:
|
Infers the columns that can be used as primary key of a model in the following order:
|
||||||
@@ -570,6 +596,42 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
data = contract_state.encode("utf-8")
|
data = contract_state.encode("utf-8")
|
||||||
self.contract.checksum = hashlib.new("sha256", data).hexdigest()
|
self.contract.checksum = hashlib.new("sha256", data).hexdigest()
|
||||||
|
|
||||||
|
def same_contract_removed(self) -> bool:
|
||||||
|
"""
|
||||||
|
self: the removed (deleted, renamed, or disabled) model node
|
||||||
|
"""
|
||||||
|
# If the contract wasn't previously enforced, no contract change has occurred
|
||||||
|
if self.contract.enforced is False:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Removed node is past its deprecation_date, so deletion does not constitute a contract change
|
||||||
|
if self.is_past_deprecation_date:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Disabled, deleted, or renamed node with previously enforced contract.
|
||||||
|
if not self.config.enabled:
|
||||||
|
breaking_change = f"Contracted model '{self.unique_id}' was disabled."
|
||||||
|
else:
|
||||||
|
breaking_change = f"Contracted model '{self.unique_id}' was deleted or renamed."
|
||||||
|
|
||||||
|
if self.version is None:
|
||||||
|
warn_or_error(
|
||||||
|
UnversionedBreakingChange(
|
||||||
|
breaking_changes=[breaking_change],
|
||||||
|
model_name=self.name,
|
||||||
|
model_file_path=self.original_file_path,
|
||||||
|
),
|
||||||
|
node=self,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
raise (
|
||||||
|
ContractBreakingChangeError(
|
||||||
|
breaking_changes=[breaking_change],
|
||||||
|
node=self,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def same_contract(self, old, adapter_type=None) -> bool:
|
def same_contract(self, old, adapter_type=None) -> bool:
|
||||||
# If the contract wasn't previously enforced:
|
# If the contract wasn't previously enforced:
|
||||||
if old.contract.enforced is False and self.contract.enforced is False:
|
if old.contract.enforced is False and self.contract.enforced is False:
|
||||||
@@ -591,9 +653,9 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
contract_enforced_disabled: bool = False
|
contract_enforced_disabled: bool = False
|
||||||
columns_removed: List[str] = []
|
columns_removed: List[str] = []
|
||||||
column_type_changes: List[Dict[str, str]] = []
|
column_type_changes: List[Dict[str, str]] = []
|
||||||
enforced_column_constraint_removed: List[
|
enforced_column_constraint_removed: List[Dict[str, str]] = (
|
||||||
Dict[str, str]
|
[]
|
||||||
] = [] # column_name, constraint_type
|
) # column_name, constraint_type
|
||||||
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
|
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
|
||||||
materialization_changed: List[str] = []
|
materialization_changed: List[str] = []
|
||||||
|
|
||||||
@@ -601,10 +663,6 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
# Breaking change: the contract was previously enforced, and it no longer is
|
# Breaking change: the contract was previously enforced, and it no longer is
|
||||||
contract_enforced_disabled = True
|
contract_enforced_disabled = True
|
||||||
|
|
||||||
# TODO: this avoid the circular imports but isn't ideal
|
|
||||||
from dbt.adapters.base import ConstraintSupport
|
|
||||||
from dbt.adapters.factory import get_adapter_constraint_support
|
|
||||||
|
|
||||||
constraint_support = get_adapter_constraint_support(adapter_type)
|
constraint_support = get_adapter_constraint_support(adapter_type)
|
||||||
column_constraints_exist = False
|
column_constraints_exist = False
|
||||||
|
|
||||||
@@ -1000,19 +1058,6 @@ class UnitTestFileFixture(BaseNode):
|
|||||||
# ====================================
|
# ====================================
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class IntermediateSnapshotNode(CompiledNode):
|
|
||||||
# at an intermediate stage in parsing, where we've built something better
|
|
||||||
# than an unparsed node for rendering in parse mode, it's pretty possible
|
|
||||||
# that we won't have critical snapshot-related information that is only
|
|
||||||
# defined in config blocks. To fix that, we have an intermediate type that
|
|
||||||
# uses a regular node config, which the snapshot parser will then convert
|
|
||||||
# into a full ParsedSnapshotNode after rendering. Note: it currently does
|
|
||||||
# not work to set snapshot config in schema files because of the validation.
|
|
||||||
resource_type: Literal[NodeType.Snapshot]
|
|
||||||
config: EmptySnapshotConfig = field(default_factory=EmptySnapshotConfig)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class SnapshotNode(SnapshotResource, CompiledNode):
|
class SnapshotNode(SnapshotResource, CompiledNode):
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -1092,7 +1137,7 @@ class UnpatchedSourceDefinition(BaseNode):
|
|||||||
def get_source_representation(self):
|
def get_source_representation(self):
|
||||||
return f'source("{self.source.name}", "{self.table.name}")'
|
return f'source("{self.source.name}", "{self.table.name}")'
|
||||||
|
|
||||||
def validate_data_tests(self):
|
def validate_data_tests(self, is_root_project: bool):
|
||||||
"""
|
"""
|
||||||
sources parse tests differently than models, so we need to do some validation
|
sources parse tests differently than models, so we need to do some validation
|
||||||
here where it's done in the PatchParser for other nodes
|
here where it's done in the PatchParser for other nodes
|
||||||
@@ -1103,6 +1148,7 @@ class UnpatchedSourceDefinition(BaseNode):
|
|||||||
"Invalid test config: cannot have both 'tests' and 'data_tests' defined"
|
"Invalid test config: cannot have both 'tests' and 'data_tests' defined"
|
||||||
)
|
)
|
||||||
if self.tests:
|
if self.tests:
|
||||||
|
if is_root_project:
|
||||||
deprecations.warn(
|
deprecations.warn(
|
||||||
"project-test-config",
|
"project-test-config",
|
||||||
deprecated_path="tests",
|
deprecated_path="tests",
|
||||||
@@ -1118,6 +1164,7 @@ class UnpatchedSourceDefinition(BaseNode):
|
|||||||
"Invalid test config: cannot have both 'tests' and 'data_tests' defined"
|
"Invalid test config: cannot have both 'tests' and 'data_tests' defined"
|
||||||
)
|
)
|
||||||
if column.tests:
|
if column.tests:
|
||||||
|
if is_root_project:
|
||||||
deprecations.warn(
|
deprecations.warn(
|
||||||
"project-test-config",
|
"project-test-config",
|
||||||
deprecated_path="tests",
|
deprecated_path="tests",
|
||||||
@@ -1140,7 +1187,6 @@ class UnpatchedSourceDefinition(BaseNode):
|
|||||||
return [] if self.table.columns is None else self.table.columns
|
return [] if self.table.columns is None else self.table.columns
|
||||||
|
|
||||||
def get_tests(self) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]:
|
def get_tests(self) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]:
|
||||||
self.validate_data_tests()
|
|
||||||
for data_test in self.data_tests:
|
for data_test in self.data_tests:
|
||||||
yield normalize_test(data_test), None
|
yield normalize_test(data_test), None
|
||||||
|
|
||||||
@@ -1521,12 +1567,11 @@ class SavedQuery(NodeInfoMixin, GraphNode, SavedQueryResource):
|
|||||||
return self.group == old.group
|
return self.group == old.group
|
||||||
|
|
||||||
def same_exports(self, old: "SavedQuery") -> bool:
|
def same_exports(self, old: "SavedQuery") -> bool:
|
||||||
# TODO: This isn't currently used in `same_contents` (nor called anywhere else)
|
|
||||||
if len(self.exports) != len(old.exports):
|
if len(self.exports) != len(old.exports):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# exports should be in the same order, so we zip them for easy iteration
|
# exports should be in the same order, so we zip them for easy iteration
|
||||||
for (old_export, new_export) in zip(old.exports, self.exports):
|
for old_export, new_export in zip(old.exports, self.exports):
|
||||||
if not (
|
if not (
|
||||||
old_export.name == new_export.name
|
old_export.name == new_export.name
|
||||||
and old_export.config.export_as == new_export.config.export_as
|
and old_export.config.export_as == new_export.config.export_as
|
||||||
@@ -1551,6 +1596,7 @@ class SavedQuery(NodeInfoMixin, GraphNode, SavedQueryResource):
|
|||||||
and self.same_label(old)
|
and self.same_label(old)
|
||||||
and self.same_config(old)
|
and self.same_config(old)
|
||||||
and self.same_group(old)
|
and self.same_group(old)
|
||||||
|
and self.same_exports(old)
|
||||||
and True
|
and True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1580,6 +1626,7 @@ class ParsedNodePatch(ParsedPatch):
|
|||||||
latest_version: Optional[NodeVersion]
|
latest_version: Optional[NodeVersion]
|
||||||
constraints: List[Dict[str, Any]]
|
constraints: List[Dict[str, Any]]
|
||||||
deprecation_date: Optional[datetime]
|
deprecation_date: Optional[datetime]
|
||||||
|
time_spine: Optional[TimeSpine] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -1,4 +1,19 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from dbt.constants import (
|
||||||
|
LEGACY_TIME_SPINE_GRANULARITY,
|
||||||
|
LEGACY_TIME_SPINE_MODEL_NAME,
|
||||||
|
MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY,
|
||||||
|
)
|
||||||
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.contracts.graph.nodes import ModelNode
|
||||||
|
from dbt.events.types import SemanticValidationFailure
|
||||||
|
from dbt.exceptions import ParsingError
|
||||||
|
from dbt_common.clients.system import write_file
|
||||||
|
from dbt_common.events.base_types import EventLevel
|
||||||
|
from dbt_common.events.functions import fire_event
|
||||||
from dbt_semantic_interfaces.implementations.metric import PydanticMetric
|
from dbt_semantic_interfaces.implementations.metric import PydanticMetric
|
||||||
|
from dbt_semantic_interfaces.implementations.node_relation import PydanticNodeRelation
|
||||||
from dbt_semantic_interfaces.implementations.project_configuration import (
|
from dbt_semantic_interfaces.implementations.project_configuration import (
|
||||||
PydanticProjectConfiguration,
|
PydanticProjectConfiguration,
|
||||||
)
|
)
|
||||||
@@ -7,23 +22,21 @@ from dbt_semantic_interfaces.implementations.semantic_manifest import (
|
|||||||
PydanticSemanticManifest,
|
PydanticSemanticManifest,
|
||||||
)
|
)
|
||||||
from dbt_semantic_interfaces.implementations.semantic_model import PydanticSemanticModel
|
from dbt_semantic_interfaces.implementations.semantic_model import PydanticSemanticModel
|
||||||
|
from dbt_semantic_interfaces.implementations.time_spine import (
|
||||||
|
PydanticTimeSpine,
|
||||||
|
PydanticTimeSpinePrimaryColumn,
|
||||||
|
)
|
||||||
from dbt_semantic_interfaces.implementations.time_spine_table_configuration import (
|
from dbt_semantic_interfaces.implementations.time_spine_table_configuration import (
|
||||||
PydanticTimeSpineTableConfiguration,
|
PydanticTimeSpineTableConfiguration as LegacyTimeSpine,
|
||||||
)
|
)
|
||||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||||
from dbt_semantic_interfaces.validations.semantic_manifest_validator import (
|
from dbt_semantic_interfaces.validations.semantic_manifest_validator import (
|
||||||
SemanticManifestValidator,
|
SemanticManifestValidator,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt.events.types import SemanticValidationFailure
|
|
||||||
from dbt.exceptions import ParsingError
|
|
||||||
from dbt_common.clients.system import write_file
|
|
||||||
from dbt_common.events.base_types import EventLevel
|
|
||||||
from dbt_common.events.functions import fire_event
|
|
||||||
|
|
||||||
|
|
||||||
class SemanticManifest:
|
class SemanticManifest:
|
||||||
def __init__(self, manifest) -> None:
|
def __init__(self, manifest: Manifest) -> None:
|
||||||
self.manifest = manifest
|
self.manifest = manifest
|
||||||
|
|
||||||
def validate(self) -> bool:
|
def validate(self) -> bool:
|
||||||
@@ -59,8 +72,50 @@ class SemanticManifest:
|
|||||||
write_file(file_path, json)
|
write_file(file_path, json)
|
||||||
|
|
||||||
def _get_pydantic_semantic_manifest(self) -> PydanticSemanticManifest:
|
def _get_pydantic_semantic_manifest(self) -> PydanticSemanticManifest:
|
||||||
|
pydantic_time_spines: List[PydanticTimeSpine] = []
|
||||||
|
minimum_time_spine_granularity: Optional[TimeGranularity] = None
|
||||||
|
for node in self.manifest.nodes.values():
|
||||||
|
if not (isinstance(node, ModelNode) and node.time_spine):
|
||||||
|
continue
|
||||||
|
time_spine = node.time_spine
|
||||||
|
standard_granularity_column = None
|
||||||
|
for column in node.columns.values():
|
||||||
|
if column.name == time_spine.standard_granularity_column:
|
||||||
|
standard_granularity_column = column
|
||||||
|
break
|
||||||
|
# Assertions needed for type checking
|
||||||
|
if not standard_granularity_column:
|
||||||
|
raise ParsingError(
|
||||||
|
"Expected to find time spine standard granularity column in model columns, but did not. "
|
||||||
|
"This should have been caught in YAML parsing."
|
||||||
|
)
|
||||||
|
if not standard_granularity_column.granularity:
|
||||||
|
raise ParsingError(
|
||||||
|
"Expected to find granularity set for time spine standard granularity column, but did not. "
|
||||||
|
"This should have been caught in YAML parsing."
|
||||||
|
)
|
||||||
|
pydantic_time_spine = PydanticTimeSpine(
|
||||||
|
node_relation=PydanticNodeRelation(
|
||||||
|
alias=node.alias,
|
||||||
|
schema_name=node.schema,
|
||||||
|
database=node.database,
|
||||||
|
relation_name=node.relation_name,
|
||||||
|
),
|
||||||
|
primary_column=PydanticTimeSpinePrimaryColumn(
|
||||||
|
name=time_spine.standard_granularity_column,
|
||||||
|
time_granularity=standard_granularity_column.granularity,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
pydantic_time_spines.append(pydantic_time_spine)
|
||||||
|
if (
|
||||||
|
not minimum_time_spine_granularity
|
||||||
|
or standard_granularity_column.granularity.to_int()
|
||||||
|
< minimum_time_spine_granularity.to_int()
|
||||||
|
):
|
||||||
|
minimum_time_spine_granularity = standard_granularity_column.granularity
|
||||||
|
|
||||||
project_config = PydanticProjectConfiguration(
|
project_config = PydanticProjectConfiguration(
|
||||||
time_spine_table_configurations=[],
|
time_spine_table_configurations=[], time_spines=pydantic_time_spines
|
||||||
)
|
)
|
||||||
pydantic_semantic_manifest = PydanticSemanticManifest(
|
pydantic_semantic_manifest = PydanticSemanticManifest(
|
||||||
metrics=[], semantic_models=[], project_configuration=project_config
|
metrics=[], semantic_models=[], project_configuration=project_config
|
||||||
@@ -79,25 +134,39 @@ class SemanticManifest:
|
|||||||
PydanticSavedQuery.parse_obj(saved_query.to_dict())
|
PydanticSavedQuery.parse_obj(saved_query.to_dict())
|
||||||
)
|
)
|
||||||
|
|
||||||
# Look for time-spine table model and create time spine table configuration
|
|
||||||
if self.manifest.semantic_models:
|
if self.manifest.semantic_models:
|
||||||
# Get model for time_spine_table
|
legacy_time_spine_model = self.manifest.ref_lookup.find(
|
||||||
time_spine_model_name = "metricflow_time_spine"
|
LEGACY_TIME_SPINE_MODEL_NAME, None, None, self.manifest
|
||||||
model = self.manifest.ref_lookup.find(time_spine_model_name, None, None, self.manifest)
|
|
||||||
if not model:
|
|
||||||
raise ParsingError(
|
|
||||||
"The semantic layer requires a 'metricflow_time_spine' model in the project, but none was found. "
|
|
||||||
"Guidance on creating this model can be found on our docs site ("
|
|
||||||
"https://docs.getdbt.com/docs/build/metricflow-time-spine) "
|
|
||||||
)
|
)
|
||||||
# Create time_spine_table_config, set it in project_config, and add to semantic manifest
|
if legacy_time_spine_model:
|
||||||
time_spine_table_config = PydanticTimeSpineTableConfiguration(
|
if (
|
||||||
location=model.relation_name,
|
not minimum_time_spine_granularity
|
||||||
|
or LEGACY_TIME_SPINE_GRANULARITY.to_int()
|
||||||
|
< minimum_time_spine_granularity.to_int()
|
||||||
|
):
|
||||||
|
minimum_time_spine_granularity = LEGACY_TIME_SPINE_GRANULARITY
|
||||||
|
|
||||||
|
# If no time spines have been configured at DAY or smaller AND legacy time spine model does not exist, error.
|
||||||
|
if (
|
||||||
|
not minimum_time_spine_granularity
|
||||||
|
or minimum_time_spine_granularity.to_int()
|
||||||
|
> MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY.to_int()
|
||||||
|
):
|
||||||
|
raise ParsingError(
|
||||||
|
"The semantic layer requires a time spine model with granularity DAY or smaller in the project, "
|
||||||
|
"but none was found. Guidance on creating this model can be found on our docs site "
|
||||||
|
"(https://docs.getdbt.com/docs/build/metricflow-time-spine)." # TODO: update docs link when available!
|
||||||
|
)
|
||||||
|
|
||||||
|
# For backward compatibility: if legacy time spine exists, include it in the manifest.
|
||||||
|
if legacy_time_spine_model:
|
||||||
|
legacy_time_spine = LegacyTimeSpine(
|
||||||
|
location=legacy_time_spine_model.relation_name,
|
||||||
column_name="date_day",
|
column_name="date_day",
|
||||||
grain=TimeGranularity.DAY,
|
grain=LEGACY_TIME_SPINE_GRANULARITY,
|
||||||
)
|
)
|
||||||
pydantic_semantic_manifest.project_configuration.time_spine_table_configurations = [
|
pydantic_semantic_manifest.project_configuration.time_spine_table_configurations = [
|
||||||
time_spine_table_config
|
legacy_time_spine
|
||||||
]
|
]
|
||||||
|
|
||||||
return pydantic_semantic_manifest
|
return pydantic_semantic_manifest
|
||||||
|
|||||||
@@ -4,8 +4,6 @@ from dataclasses import dataclass, field
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Literal, Optional, Sequence, Union
|
from typing import Any, Dict, List, Literal, Optional, Sequence, Union
|
||||||
|
|
||||||
from dbt_semantic_interfaces.type_enums import ConversionCalculationType
|
|
||||||
|
|
||||||
# trigger the PathEncoder
|
# trigger the PathEncoder
|
||||||
import dbt_common.helper_types # noqa:F401
|
import dbt_common.helper_types # noqa:F401
|
||||||
from dbt import deprecations
|
from dbt import deprecations
|
||||||
@@ -39,6 +37,10 @@ from dbt_common.dataclass_schema import (
|
|||||||
dbtClassMixin,
|
dbtClassMixin,
|
||||||
)
|
)
|
||||||
from dbt_common.exceptions import DbtInternalError
|
from dbt_common.exceptions import DbtInternalError
|
||||||
|
from dbt_semantic_interfaces.type_enums import (
|
||||||
|
ConversionCalculationType,
|
||||||
|
PeriodAggregation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -114,6 +116,7 @@ class HasColumnAndTestProps(HasColumnProps):
|
|||||||
class UnparsedColumn(HasColumnAndTestProps):
|
class UnparsedColumn(HasColumnAndTestProps):
|
||||||
quote: Optional[bool] = None
|
quote: Optional[bool] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
granularity: Optional[str] = None # str is really a TimeGranularity Enum
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -204,6 +207,11 @@ class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasColumnAndTestProps, HasYa
|
|||||||
access: Optional[str] = None
|
access: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UnparsedTimeSpine(dbtClassMixin):
|
||||||
|
standard_granularity_column: str
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class UnparsedModelUpdate(UnparsedNodeUpdate):
|
class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||||
quote_columns: Optional[bool] = None
|
quote_columns: Optional[bool] = None
|
||||||
@@ -211,6 +219,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
|||||||
latest_version: Optional[NodeVersion] = None
|
latest_version: Optional[NodeVersion] = None
|
||||||
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
||||||
deprecation_date: Optional[datetime.datetime] = None
|
deprecation_date: Optional[datetime.datetime] = None
|
||||||
|
time_spine: Optional[UnparsedTimeSpine] = None
|
||||||
|
|
||||||
def __post_init__(self) -> None:
|
def __post_init__(self) -> None:
|
||||||
if self.latest_version:
|
if self.latest_version:
|
||||||
@@ -232,6 +241,26 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
|||||||
|
|
||||||
self.deprecation_date = normalize_date(self.deprecation_date)
|
self.deprecation_date = normalize_date(self.deprecation_date)
|
||||||
|
|
||||||
|
if self.time_spine:
|
||||||
|
columns = (
|
||||||
|
self.get_columns_for_version(self.latest_version)
|
||||||
|
if self.latest_version
|
||||||
|
else self.columns
|
||||||
|
)
|
||||||
|
column_names_to_columns = {column.name: column for column in columns}
|
||||||
|
if self.time_spine.standard_granularity_column not in column_names_to_columns:
|
||||||
|
raise ParsingError(
|
||||||
|
f"Time spine standard granularity column must be defined on the model. Got invalid "
|
||||||
|
f"column name '{self.time_spine.standard_granularity_column}' for model '{self.name}'. Valid names"
|
||||||
|
f"{' for latest version' if self.latest_version else ''}: {list(column_names_to_columns.keys())}."
|
||||||
|
)
|
||||||
|
column = column_names_to_columns[self.time_spine.standard_granularity_column]
|
||||||
|
if not column.granularity:
|
||||||
|
raise ParsingError(
|
||||||
|
f"Time spine standard granularity column must have a granularity defined. Please add one for "
|
||||||
|
f"column '{self.time_spine.standard_granularity_column}' in model '{self.name}'."
|
||||||
|
)
|
||||||
|
|
||||||
def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
|
def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
|
||||||
if version not in self._version_map:
|
if version not in self._version_map:
|
||||||
raise DbtInternalError(
|
raise DbtInternalError(
|
||||||
@@ -532,6 +561,13 @@ class UnparsedConversionTypeParams(dbtClassMixin):
|
|||||||
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UnparsedCumulativeTypeParams(dbtClassMixin):
|
||||||
|
window: Optional[str] = None
|
||||||
|
grain_to_date: Optional[str] = None
|
||||||
|
period_agg: str = PeriodAggregation.FIRST.value
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class UnparsedMetricTypeParams(dbtClassMixin):
|
class UnparsedMetricTypeParams(dbtClassMixin):
|
||||||
measure: Optional[Union[UnparsedMetricInputMeasure, str]] = None
|
measure: Optional[Union[UnparsedMetricInputMeasure, str]] = None
|
||||||
@@ -542,6 +578,7 @@ class UnparsedMetricTypeParams(dbtClassMixin):
|
|||||||
grain_to_date: Optional[str] = None # str is really a TimeGranularity Enum
|
grain_to_date: Optional[str] = None # str is really a TimeGranularity Enum
|
||||||
metrics: Optional[List[Union[UnparsedMetricInput, str]]] = None
|
metrics: Optional[List[Union[UnparsedMetricInput, str]]] = None
|
||||||
conversion_type_params: Optional[UnparsedConversionTypeParams] = None
|
conversion_type_params: Optional[UnparsedConversionTypeParams] = None
|
||||||
|
cumulative_type_params: Optional[UnparsedCumulativeTypeParams] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -553,6 +590,7 @@ class UnparsedMetric(dbtClassMixin):
|
|||||||
description: str = ""
|
description: str = ""
|
||||||
# Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
|
# Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
|
||||||
filter: Union[str, List[str], None] = None
|
filter: Union[str, List[str], None] = None
|
||||||
|
time_granularity: Optional[str] = None
|
||||||
# metadata: Optional[Unparsedetadata] = None # TODO
|
# metadata: Optional[Unparsedetadata] = None # TODO
|
||||||
meta: Dict[str, Any] = field(default_factory=dict)
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
|||||||
@@ -1903,7 +1903,19 @@ message EndOfRunSummaryMsg {
|
|||||||
EndOfRunSummary data = 2;
|
EndOfRunSummary data = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skipped Z031, Z032, Z033
|
// Skipped Z031, Z032
|
||||||
|
|
||||||
|
// Z033
|
||||||
|
message MarkSkippedChildren {
|
||||||
|
string unique_id = 1;
|
||||||
|
string status = 2;
|
||||||
|
RunResultMsg run_result = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message MarkSkippedChildrenMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
MarkSkippedChildren data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
// Z034
|
// Z034
|
||||||
message LogSkipBecauseError {
|
message LogSkipBecauseError {
|
||||||
@@ -1911,6 +1923,7 @@ message LogSkipBecauseError {
|
|||||||
string relation = 2;
|
string relation = 2;
|
||||||
int32 index = 3;
|
int32 index = 3;
|
||||||
int32 total = 4;
|
int32 total = 4;
|
||||||
|
string status = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message LogSkipBecauseErrorMsg {
|
message LogSkipBecauseErrorMsg {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -74,9 +74,7 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
|||||||
log_level = (
|
log_level = (
|
||||||
EventLevel.ERROR
|
EventLevel.ERROR
|
||||||
if flags.QUIET
|
if flags.QUIET
|
||||||
else EventLevel.DEBUG
|
else EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL)
|
||||||
if flags.DEBUG
|
|
||||||
else EventLevel(flags.LOG_LEVEL)
|
|
||||||
)
|
)
|
||||||
console_config = get_stdout_config(
|
console_config = get_stdout_config(
|
||||||
line_format,
|
line_format,
|
||||||
|
|||||||
@@ -1856,7 +1856,21 @@ class EndOfRunSummary(InfoLevel):
|
|||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
# Skipped Z031, Z032, Z033
|
# Skipped Z031, Z032
|
||||||
|
|
||||||
|
|
||||||
|
class MarkSkippedChildren(DebugLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "Z033"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
msg = (
|
||||||
|
f"Marking all children of '{self.unique_id}' to be skipped "
|
||||||
|
f"because of status '{self.status}'. "
|
||||||
|
)
|
||||||
|
if self.run_result.message:
|
||||||
|
msg = msg + f" Reason: {self.run_result.message}."
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
class LogSkipBecauseError(ErrorLevel):
|
class LogSkipBecauseError(ErrorLevel):
|
||||||
@@ -1864,7 +1878,7 @@ class LogSkipBecauseError(ErrorLevel):
|
|||||||
return "Z034"
|
return "Z034"
|
||||||
|
|
||||||
def message(self) -> str:
|
def message(self) -> str:
|
||||||
msg = f"SKIP relation {self.schema}.{self.relation} due to ephemeral model error"
|
msg = f"SKIP relation {self.schema}.{self.relation} due to ephemeral model status '{self.status}'"
|
||||||
return format_fancy_output_line(
|
return format_fancy_output_line(
|
||||||
msg=msg, status=red("ERROR SKIP"), index=self.index, total=self.total
|
msg=msg, status=red("ERROR SKIP"), index=self.index, total=self.total
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -136,6 +136,18 @@ class GraphDependencyNotFoundError(CompilationError):
|
|||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
class ForeignKeyConstraintToSyntaxError(CompilationError):
|
||||||
|
def __init__(self, node, expression: str) -> None:
|
||||||
|
self.expression = expression
|
||||||
|
self.node = node
|
||||||
|
super().__init__(msg=self.get_message())
|
||||||
|
|
||||||
|
def get_message(self) -> str:
|
||||||
|
msg = f"'{self.node.unique_id}' defines a foreign key constraint 'to' expression which is not valid 'ref' or 'source' syntax: {self.expression}."
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
# client level exceptions
|
# client level exceptions
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -68,6 +68,7 @@ def get_flag_dict():
|
|||||||
"target_path",
|
"target_path",
|
||||||
"log_path",
|
"log_path",
|
||||||
"invocation_command",
|
"invocation_command",
|
||||||
|
"empty",
|
||||||
}
|
}
|
||||||
return {key: getattr(GLOBAL_FLAGS, key.upper(), None) for key in flag_attr}
|
return {key: getattr(GLOBAL_FLAGS, key.upper(), None) for key in flag_attr}
|
||||||
|
|
||||||
|
|||||||
@@ -25,8 +25,15 @@ class GraphQueue:
|
|||||||
the same time, as there is an unlocked race!
|
the same time, as there is an unlocked race!
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, graph: nx.DiGraph, manifest: Manifest, selected: Set[UniqueId]) -> None:
|
def __init__(
|
||||||
self.graph = graph
|
self,
|
||||||
|
graph: nx.DiGraph,
|
||||||
|
manifest: Manifest,
|
||||||
|
selected: Set[UniqueId],
|
||||||
|
preserve_edges: bool = True,
|
||||||
|
) -> None:
|
||||||
|
# 'create_empty_copy' returns a copy of the graph G with all of the edges removed, and leaves nodes intact.
|
||||||
|
self.graph = graph if preserve_edges else nx.classes.function.create_empty_copy(graph)
|
||||||
self.manifest = manifest
|
self.manifest = manifest
|
||||||
self._selected = selected
|
self._selected = selected
|
||||||
# store the queue as a priority queue.
|
# store the queue as a priority queue.
|
||||||
|
|||||||
@@ -319,7 +319,7 @@ class NodeSelector(MethodManager):
|
|||||||
|
|
||||||
return filtered_nodes
|
return filtered_nodes
|
||||||
|
|
||||||
def get_graph_queue(self, spec: SelectionSpec) -> GraphQueue:
|
def get_graph_queue(self, spec: SelectionSpec, preserve_edges: bool = True) -> GraphQueue:
|
||||||
"""Returns a queue over nodes in the graph that tracks progress of
|
"""Returns a queue over nodes in the graph that tracks progress of
|
||||||
dependecies.
|
dependecies.
|
||||||
"""
|
"""
|
||||||
@@ -330,7 +330,7 @@ class NodeSelector(MethodManager):
|
|||||||
# Construct a new graph using the selected_nodes
|
# Construct a new graph using the selected_nodes
|
||||||
new_graph = self.full_graph.get_subset_graph(selected_nodes)
|
new_graph = self.full_graph.get_subset_graph(selected_nodes)
|
||||||
# should we give a way here for consumers to mutate the graph?
|
# should we give a way here for consumers to mutate the graph?
|
||||||
return GraphQueue(new_graph.graph, self.manifest, selected_nodes)
|
return GraphQueue(new_graph.graph, self.manifest, selected_nodes, preserve_edges)
|
||||||
|
|
||||||
|
|
||||||
class ResourceTypeSelector(NodeSelector):
|
class ResourceTypeSelector(NodeSelector):
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ def is_selected_node(fqn: List[str], node_selector: str, is_versioned: bool) ->
|
|||||||
|
|
||||||
|
|
||||||
SelectorTarget = Union[
|
SelectorTarget = Union[
|
||||||
SourceDefinition, ManifestNode, Exposure, Metric, SemanticModel, UnitTestDefinition
|
SourceDefinition, ManifestNode, Exposure, Metric, SemanticModel, UnitTestDefinition, SavedQuery
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -202,6 +202,7 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
|||||||
self.metric_nodes(included_nodes),
|
self.metric_nodes(included_nodes),
|
||||||
self.unit_tests(included_nodes),
|
self.unit_tests(included_nodes),
|
||||||
self.semantic_model_nodes(included_nodes),
|
self.semantic_model_nodes(included_nodes),
|
||||||
|
self.saved_query_nodes(included_nodes),
|
||||||
)
|
)
|
||||||
|
|
||||||
def configurable_nodes(
|
def configurable_nodes(
|
||||||
@@ -680,7 +681,8 @@ class StateSelectorMethod(SelectorMethod):
|
|||||||
self, old: Optional[SelectorTarget], new: SelectorTarget, adapter_type: str
|
self, old: Optional[SelectorTarget], new: SelectorTarget, adapter_type: str
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if isinstance(
|
if isinstance(
|
||||||
new, (SourceDefinition, Exposure, Metric, SemanticModel, UnitTestDefinition)
|
new,
|
||||||
|
(SourceDefinition, Exposure, Metric, SemanticModel, UnitTestDefinition, SavedQuery),
|
||||||
):
|
):
|
||||||
# these all overwrite `same_contents`
|
# these all overwrite `same_contents`
|
||||||
different_contents = not new.same_contents(old) # type: ignore
|
different_contents = not new.same_contents(old) # type: ignore
|
||||||
@@ -719,7 +721,9 @@ class StateSelectorMethod(SelectorMethod):
|
|||||||
) -> Callable[[Optional[SelectorTarget], SelectorTarget], bool]:
|
) -> Callable[[Optional[SelectorTarget], SelectorTarget], bool]:
|
||||||
# get a function that compares two selector target based on compare method provided
|
# get a function that compares two selector target based on compare method provided
|
||||||
def check_modified_contract(old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
def check_modified_contract(old: Optional[SelectorTarget], new: SelectorTarget) -> bool:
|
||||||
if hasattr(new, compare_method):
|
if new is None and hasattr(old, compare_method + "_removed"):
|
||||||
|
return getattr(old, compare_method + "_removed")()
|
||||||
|
elif hasattr(new, compare_method):
|
||||||
# when old body does not exist or old and new are not the same
|
# when old body does not exist or old and new are not the same
|
||||||
return not old or not getattr(new, compare_method)(old, adapter_type) # type: ignore
|
return not old or not getattr(new, compare_method)(old, adapter_type) # type: ignore
|
||||||
else:
|
else:
|
||||||
@@ -773,6 +777,8 @@ class StateSelectorMethod(SelectorMethod):
|
|||||||
previous_node = SemanticModel.from_resource(manifest.semantic_models[unique_id])
|
previous_node = SemanticModel.from_resource(manifest.semantic_models[unique_id])
|
||||||
elif unique_id in manifest.unit_tests:
|
elif unique_id in manifest.unit_tests:
|
||||||
previous_node = UnitTestDefinition.from_resource(manifest.unit_tests[unique_id])
|
previous_node = UnitTestDefinition.from_resource(manifest.unit_tests[unique_id])
|
||||||
|
elif unique_id in manifest.saved_queries:
|
||||||
|
previous_node = SavedQuery.from_resource(manifest.saved_queries[unique_id])
|
||||||
|
|
||||||
keyword_args = {}
|
keyword_args = {}
|
||||||
if checker.__name__ in [
|
if checker.__name__ in [
|
||||||
@@ -785,6 +791,22 @@ class StateSelectorMethod(SelectorMethod):
|
|||||||
if checker(previous_node, node, **keyword_args): # type: ignore
|
if checker(previous_node, node, **keyword_args): # type: ignore
|
||||||
yield unique_id
|
yield unique_id
|
||||||
|
|
||||||
|
# checkers that can handle removed nodes
|
||||||
|
if checker.__name__ in ["check_modified_contract"]:
|
||||||
|
# ignore included_nodes, since those cannot contain removed nodes
|
||||||
|
for previous_unique_id, previous_node in manifest.nodes.items():
|
||||||
|
# detect removed (deleted, renamed, or disabled) nodes
|
||||||
|
removed_node = None
|
||||||
|
if previous_unique_id in self.manifest.disabled.keys():
|
||||||
|
removed_node = self.manifest.disabled[previous_unique_id][0]
|
||||||
|
elif previous_unique_id not in self.manifest.nodes.keys():
|
||||||
|
removed_node = previous_node
|
||||||
|
|
||||||
|
if removed_node:
|
||||||
|
# do not yield -- removed nodes should never be selected for downstream execution
|
||||||
|
# as they are not part of the current project's manifest.nodes
|
||||||
|
checker(removed_node, None, **keyword_args) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class ResultSelectorMethod(SelectorMethod):
|
class ResultSelectorMethod(SelectorMethod):
|
||||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from dbt import hooks, utils
|
|||||||
from dbt.adapters.factory import get_adapter # noqa: F401
|
from dbt.adapters.factory import get_adapter # noqa: F401
|
||||||
from dbt.artifacts.resources import Contract
|
from dbt.artifacts.resources import Contract
|
||||||
from dbt.clients.jinja import MacroGenerator, get_rendered
|
from dbt.clients.jinja import MacroGenerator, get_rendered
|
||||||
from dbt.config import Project, RuntimeConfig
|
from dbt.config import RuntimeConfig
|
||||||
from dbt.context.context_config import ContextConfig
|
from dbt.context.context_config import ContextConfig
|
||||||
from dbt.context.providers import (
|
from dbt.context.providers import (
|
||||||
generate_generate_name_macro_context,
|
generate_generate_name_macro_context,
|
||||||
@@ -32,7 +32,6 @@ from dbt_common.dataclass_schema import ValidationError
|
|||||||
FinalValue = TypeVar("FinalValue", bound=BaseNode)
|
FinalValue = TypeVar("FinalValue", bound=BaseNode)
|
||||||
IntermediateValue = TypeVar("IntermediateValue", bound=BaseNode)
|
IntermediateValue = TypeVar("IntermediateValue", bound=BaseNode)
|
||||||
|
|
||||||
IntermediateNode = TypeVar("IntermediateNode", bound=Any)
|
|
||||||
FinalNode = TypeVar("FinalNode", bound=ManifestNode)
|
FinalNode = TypeVar("FinalNode", bound=ManifestNode)
|
||||||
|
|
||||||
|
|
||||||
@@ -40,9 +39,9 @@ ConfiguredBlockType = TypeVar("ConfiguredBlockType", bound=FileBlock)
|
|||||||
|
|
||||||
|
|
||||||
class BaseParser(Generic[FinalValue]):
|
class BaseParser(Generic[FinalValue]):
|
||||||
def __init__(self, project: Project, manifest: Manifest) -> None:
|
def __init__(self, project: RuntimeConfig, manifest: Manifest) -> None:
|
||||||
self.project = project
|
self.project: RuntimeConfig = project
|
||||||
self.manifest = manifest
|
self.manifest: Manifest = manifest
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def parse_file(self, block: FileBlock) -> None:
|
def parse_file(self, block: FileBlock) -> None:
|
||||||
@@ -64,7 +63,7 @@ class BaseParser(Generic[FinalValue]):
|
|||||||
class Parser(BaseParser[FinalValue], Generic[FinalValue]):
|
class Parser(BaseParser[FinalValue], Generic[FinalValue]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
project: Project,
|
project: RuntimeConfig,
|
||||||
manifest: Manifest,
|
manifest: Manifest,
|
||||||
root_project: RuntimeConfig,
|
root_project: RuntimeConfig,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -73,6 +72,7 @@ class Parser(BaseParser[FinalValue], Generic[FinalValue]):
|
|||||||
|
|
||||||
|
|
||||||
class RelationUpdate:
|
class RelationUpdate:
|
||||||
|
# "component" is database, schema or alias
|
||||||
def __init__(self, config: RuntimeConfig, manifest: Manifest, component: str) -> None:
|
def __init__(self, config: RuntimeConfig, manifest: Manifest, component: str) -> None:
|
||||||
default_macro = manifest.find_generate_macro_by_name(
|
default_macro = manifest.find_generate_macro_by_name(
|
||||||
component=component,
|
component=component,
|
||||||
@@ -118,16 +118,17 @@ class RelationUpdate:
|
|||||||
|
|
||||||
class ConfiguredParser(
|
class ConfiguredParser(
|
||||||
Parser[FinalNode],
|
Parser[FinalNode],
|
||||||
Generic[ConfiguredBlockType, IntermediateNode, FinalNode],
|
Generic[ConfiguredBlockType, FinalNode],
|
||||||
):
|
):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
project: Project,
|
project: RuntimeConfig,
|
||||||
manifest: Manifest,
|
manifest: Manifest,
|
||||||
root_project: RuntimeConfig,
|
root_project: RuntimeConfig,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(project, manifest, root_project)
|
super().__init__(project, manifest, root_project)
|
||||||
|
|
||||||
|
# this sets callables from RelationUpdate
|
||||||
self._update_node_database = RelationUpdate(
|
self._update_node_database = RelationUpdate(
|
||||||
manifest=manifest, config=root_project, component="database"
|
manifest=manifest, config=root_project, component="database"
|
||||||
)
|
)
|
||||||
@@ -144,7 +145,7 @@ class ConfiguredParser(
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def parse_from_dict(self, dict, validate=True) -> IntermediateNode:
|
def parse_from_dict(self, dict, validate=True) -> FinalNode:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abc.abstractproperty
|
@abc.abstractproperty
|
||||||
@@ -208,7 +209,7 @@ class ConfiguredParser(
|
|||||||
fqn: List[str],
|
fqn: List[str],
|
||||||
name=None,
|
name=None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> IntermediateNode:
|
) -> FinalNode:
|
||||||
"""Create the node that will be passed in to the parser context for
|
"""Create the node that will be passed in to the parser context for
|
||||||
"rendering". Some information may be partial, as it'll be updated by
|
"rendering". Some information may be partial, as it'll be updated by
|
||||||
config() and any ref()/source() calls discovered during rendering.
|
config() and any ref()/source() calls discovered during rendering.
|
||||||
@@ -253,10 +254,10 @@ class ConfiguredParser(
|
|||||||
)
|
)
|
||||||
raise DictParseError(exc, node=node)
|
raise DictParseError(exc, node=node)
|
||||||
|
|
||||||
def _context_for(self, parsed_node: IntermediateNode, config: ContextConfig) -> Dict[str, Any]:
|
def _context_for(self, parsed_node: FinalNode, config: ContextConfig) -> Dict[str, Any]:
|
||||||
return generate_parser_model_context(parsed_node, self.root_project, self.manifest, config)
|
return generate_parser_model_context(parsed_node, self.root_project, self.manifest, config)
|
||||||
|
|
||||||
def render_with_context(self, parsed_node: IntermediateNode, config: ContextConfig):
|
def render_with_context(self, parsed_node: FinalNode, config: ContextConfig):
|
||||||
# Given the parsed node and a ContextConfig to use during parsing,
|
# Given the parsed node and a ContextConfig to use during parsing,
|
||||||
# render the node's sql with macro capture enabled.
|
# render the node's sql with macro capture enabled.
|
||||||
# Note: this mutates the config object when config calls are rendered.
|
# Note: this mutates the config object when config calls are rendered.
|
||||||
@@ -271,7 +272,7 @@ class ConfiguredParser(
|
|||||||
# updating the config with new config passed in, then re-creating the
|
# updating the config with new config passed in, then re-creating the
|
||||||
# config from the dict in the node.
|
# config from the dict in the node.
|
||||||
def update_parsed_node_config_dict(
|
def update_parsed_node_config_dict(
|
||||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
self, parsed_node: FinalNode, config_dict: Dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
# Overwrite node config
|
# Overwrite node config
|
||||||
final_config_dict = parsed_node.config.to_dict(omit_none=True)
|
final_config_dict = parsed_node.config.to_dict(omit_none=True)
|
||||||
@@ -281,7 +282,7 @@ class ConfiguredParser(
|
|||||||
parsed_node.config = parsed_node.config.from_dict(final_config_dict)
|
parsed_node.config = parsed_node.config.from_dict(final_config_dict)
|
||||||
|
|
||||||
def update_parsed_node_relation_names(
|
def update_parsed_node_relation_names(
|
||||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
self, parsed_node: FinalNode, config_dict: Dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# These call the RelationUpdate callable to go through generate_name macros
|
# These call the RelationUpdate callable to go through generate_name macros
|
||||||
@@ -289,7 +290,10 @@ class ConfiguredParser(
|
|||||||
self._update_node_schema(parsed_node, config_dict.get("schema"))
|
self._update_node_schema(parsed_node, config_dict.get("schema"))
|
||||||
self._update_node_alias(parsed_node, config_dict.get("alias"))
|
self._update_node_alias(parsed_node, config_dict.get("alias"))
|
||||||
|
|
||||||
# Snapshot nodes use special "target_database" and "target_schema" fields for some reason
|
# Snapshot nodes use special "target_database" and "target_schema" fields
|
||||||
|
# for backward compatibility
|
||||||
|
# We have to do getattr here because saved_query parser calls this method with
|
||||||
|
# Export object instead of a node.
|
||||||
if getattr(parsed_node, "resource_type", None) == NodeType.Snapshot:
|
if getattr(parsed_node, "resource_type", None) == NodeType.Snapshot:
|
||||||
if "target_database" in config_dict and config_dict["target_database"]:
|
if "target_database" in config_dict and config_dict["target_database"]:
|
||||||
parsed_node.database = config_dict["target_database"]
|
parsed_node.database = config_dict["target_database"]
|
||||||
@@ -300,7 +304,7 @@ class ConfiguredParser(
|
|||||||
|
|
||||||
def update_parsed_node_config(
|
def update_parsed_node_config(
|
||||||
self,
|
self,
|
||||||
parsed_node: IntermediateNode,
|
parsed_node: FinalNode,
|
||||||
config: ContextConfig,
|
config: ContextConfig,
|
||||||
context=None,
|
context=None,
|
||||||
patch_config_dict=None,
|
patch_config_dict=None,
|
||||||
@@ -334,6 +338,7 @@ class ConfiguredParser(
|
|||||||
# If we have access in the config, copy to node level
|
# If we have access in the config, copy to node level
|
||||||
if parsed_node.resource_type == NodeType.Model and config_dict.get("access", None):
|
if parsed_node.resource_type == NodeType.Model and config_dict.get("access", None):
|
||||||
if AccessType.is_valid(config_dict["access"]):
|
if AccessType.is_valid(config_dict["access"]):
|
||||||
|
assert hasattr(parsed_node, "access")
|
||||||
parsed_node.access = AccessType(config_dict["access"])
|
parsed_node.access = AccessType(config_dict["access"])
|
||||||
else:
|
else:
|
||||||
raise InvalidAccessTypeError(
|
raise InvalidAccessTypeError(
|
||||||
@@ -360,6 +365,8 @@ class ConfiguredParser(
|
|||||||
if "contract" in config_dict and config_dict["contract"]:
|
if "contract" in config_dict and config_dict["contract"]:
|
||||||
contract_dct = config_dict["contract"]
|
contract_dct = config_dict["contract"]
|
||||||
Contract.validate(contract_dct)
|
Contract.validate(contract_dct)
|
||||||
|
# Seed node has contract config (from NodeConfig) but no contract in SeedNode
|
||||||
|
if hasattr(parsed_node, "contract"):
|
||||||
parsed_node.contract = Contract.from_dict(contract_dct)
|
parsed_node.contract = Contract.from_dict(contract_dct)
|
||||||
|
|
||||||
# unrendered_config is used to compare the original database/schema/alias
|
# unrendered_config is used to compare the original database/schema/alias
|
||||||
@@ -382,6 +389,7 @@ class ConfiguredParser(
|
|||||||
|
|
||||||
# at this point, we've collected our hooks. Use the node context to
|
# at this point, we've collected our hooks. Use the node context to
|
||||||
# render each hook and collect refs/sources
|
# render each hook and collect refs/sources
|
||||||
|
assert hasattr(parsed_node.config, "pre_hook") and hasattr(parsed_node.config, "post_hook")
|
||||||
hooks = list(itertools.chain(parsed_node.config.pre_hook, parsed_node.config.post_hook))
|
hooks = list(itertools.chain(parsed_node.config.pre_hook, parsed_node.config.post_hook))
|
||||||
# skip context rebuilding if there aren't any hooks
|
# skip context rebuilding if there aren't any hooks
|
||||||
if not hooks:
|
if not hooks:
|
||||||
@@ -413,7 +421,7 @@ class ConfiguredParser(
|
|||||||
self._mangle_hooks(config_dict)
|
self._mangle_hooks(config_dict)
|
||||||
return config_dict
|
return config_dict
|
||||||
|
|
||||||
def render_update(self, node: IntermediateNode, config: ContextConfig) -> None:
|
def render_update(self, node: FinalNode, config: ContextConfig) -> None:
|
||||||
try:
|
try:
|
||||||
context = self.render_with_context(node, config)
|
context = self.render_with_context(node, config)
|
||||||
self.update_parsed_node_config(node, config, context=context)
|
self.update_parsed_node_config(node, config, context=context)
|
||||||
@@ -440,9 +448,8 @@ class ConfiguredParser(
|
|||||||
fqn=fqn,
|
fqn=fqn,
|
||||||
)
|
)
|
||||||
self.render_update(node, config)
|
self.render_update(node, config)
|
||||||
result = self.transform(node)
|
self.add_result_node(block, node)
|
||||||
self.add_result_node(block, result)
|
return node
|
||||||
return result
|
|
||||||
|
|
||||||
def _update_node_relation_name(self, node: ManifestNode):
|
def _update_node_relation_name(self, node: ManifestNode):
|
||||||
# Seed and Snapshot nodes and Models that are not ephemeral,
|
# Seed and Snapshot nodes and Models that are not ephemeral,
|
||||||
@@ -461,26 +468,18 @@ class ConfiguredParser(
|
|||||||
def parse_file(self, file_block: FileBlock) -> None:
|
def parse_file(self, file_block: FileBlock) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def transform(self, node: IntermediateNode) -> FinalNode:
|
class SimpleParser(
|
||||||
|
ConfiguredParser[ConfiguredBlockType, FinalNode],
|
||||||
|
Generic[ConfiguredBlockType, FinalNode],
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SimpleParser(
|
class SQLParser(ConfiguredParser[FileBlock, FinalNode], Generic[FinalNode]):
|
||||||
ConfiguredParser[ConfiguredBlockType, FinalNode, FinalNode],
|
|
||||||
Generic[ConfiguredBlockType, FinalNode],
|
|
||||||
):
|
|
||||||
def transform(self, node):
|
|
||||||
return node
|
|
||||||
|
|
||||||
|
|
||||||
class SQLParser(
|
|
||||||
ConfiguredParser[FileBlock, IntermediateNode, FinalNode], Generic[IntermediateNode, FinalNode]
|
|
||||||
):
|
|
||||||
def parse_file(self, file_block: FileBlock) -> None:
|
def parse_file(self, file_block: FileBlock) -> None:
|
||||||
self.parse_node(file_block)
|
self.parse_node(file_block)
|
||||||
|
|
||||||
|
|
||||||
class SimpleSQLParser(SQLParser[FinalNode, FinalNode]):
|
class SimpleSQLParser(SQLParser[FinalNode]):
|
||||||
def transform(self, node):
|
pass
|
||||||
return node
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from dbt.exceptions import ParsingError
|
|||||||
from dbt.parser.search import FileBlock
|
from dbt.parser.search import FileBlock
|
||||||
from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType
|
from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType
|
||||||
from dbt_common.exceptions import DbtInternalError
|
from dbt_common.exceptions import DbtInternalError
|
||||||
|
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||||
|
|
||||||
|
|
||||||
def trimmed(inp: str) -> str:
|
def trimmed(inp: str) -> str:
|
||||||
@@ -185,13 +186,12 @@ class ParserRef:
|
|||||||
self.column_info: Dict[str, ColumnInfo] = {}
|
self.column_info: Dict[str, ColumnInfo] = {}
|
||||||
|
|
||||||
def _add(self, column: HasColumnProps) -> None:
|
def _add(self, column: HasColumnProps) -> None:
|
||||||
tags: List[str] = []
|
tags: List[str] = getattr(column, "tags", [])
|
||||||
tags.extend(getattr(column, "tags", ()))
|
quote: Optional[bool] = None
|
||||||
quote: Optional[bool]
|
granularity: Optional[TimeGranularity] = None
|
||||||
if isinstance(column, UnparsedColumn):
|
if isinstance(column, UnparsedColumn):
|
||||||
quote = column.quote
|
quote = column.quote
|
||||||
else:
|
granularity = TimeGranularity(column.granularity) if column.granularity else None
|
||||||
quote = None
|
|
||||||
|
|
||||||
if any(
|
if any(
|
||||||
c
|
c
|
||||||
@@ -209,6 +209,7 @@ class ParserRef:
|
|||||||
tags=tags,
|
tags=tags,
|
||||||
quote=quote,
|
quote=quote,
|
||||||
_extra=column.extra,
|
_extra=column.extra,
|
||||||
|
granularity=granularity,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -114,7 +114,8 @@ class TestBuilder(Generic[Testable]):
|
|||||||
self.package_name: str = package_name
|
self.package_name: str = package_name
|
||||||
self.target: Testable = target
|
self.target: Testable = target
|
||||||
self.version: Optional[NodeVersion] = version
|
self.version: Optional[NodeVersion] = version
|
||||||
|
self.render_ctx: Dict[str, Any] = render_ctx
|
||||||
|
self.column_name: Optional[str] = column_name
|
||||||
self.args["model"] = self.build_model_str()
|
self.args["model"] = self.build_model_str()
|
||||||
|
|
||||||
match = self.TEST_NAME_PATTERN.match(test_name)
|
match = self.TEST_NAME_PATTERN.match(test_name)
|
||||||
@@ -125,39 +126,12 @@ class TestBuilder(Generic[Testable]):
|
|||||||
self.name: str = groups["test_name"]
|
self.name: str = groups["test_name"]
|
||||||
self.namespace: str = groups["test_namespace"]
|
self.namespace: str = groups["test_namespace"]
|
||||||
self.config: Dict[str, Any] = {}
|
self.config: Dict[str, Any] = {}
|
||||||
|
# Process legacy args
|
||||||
|
self.config.update(self._process_legacy_args())
|
||||||
|
|
||||||
# This code removes keys identified as config args from the test entry
|
# Process config args if present
|
||||||
# dictionary. The keys remaining in the 'args' dictionary will be
|
|
||||||
# "kwargs", or keyword args that are passed to the test macro.
|
|
||||||
# The "kwargs" are not rendered into strings until compilation time.
|
|
||||||
# The "configs" are rendered here (since they were not rendered back
|
|
||||||
# in the 'get_key_dicts' methods in the schema parsers).
|
|
||||||
for key in self.CONFIG_ARGS:
|
|
||||||
value = self.args.pop(key, None)
|
|
||||||
# 'modifier' config could be either top level arg or in config
|
|
||||||
if value and "config" in self.args and key in self.args["config"]:
|
|
||||||
raise SameKeyNestedError()
|
|
||||||
if not value and "config" in self.args:
|
|
||||||
value = self.args["config"].pop(key, None)
|
|
||||||
if isinstance(value, str):
|
|
||||||
|
|
||||||
try:
|
|
||||||
value = get_rendered(value, render_ctx, native=True)
|
|
||||||
except UndefinedMacroError as e:
|
|
||||||
|
|
||||||
raise CustomMacroPopulatingConfigValueError(
|
|
||||||
target_name=self.target.name,
|
|
||||||
column_name=column_name,
|
|
||||||
name=self.name,
|
|
||||||
key=key,
|
|
||||||
err_msg=e.msg,
|
|
||||||
)
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
self.config[key] = value
|
|
||||||
|
|
||||||
if "config" in self.args:
|
if "config" in self.args:
|
||||||
del self.args["config"]
|
self.config.update(self._render_values(self.args.pop("config", {})))
|
||||||
|
|
||||||
if self.namespace is not None:
|
if self.namespace is not None:
|
||||||
self.package_name = self.namespace
|
self.package_name = self.namespace
|
||||||
@@ -182,6 +156,36 @@ class TestBuilder(Generic[Testable]):
|
|||||||
if short_name != full_name and "alias" not in self.config:
|
if short_name != full_name and "alias" not in self.config:
|
||||||
self.config["alias"] = short_name
|
self.config["alias"] = short_name
|
||||||
|
|
||||||
|
def _process_legacy_args(self):
|
||||||
|
config = {}
|
||||||
|
for key in self.CONFIG_ARGS:
|
||||||
|
value = self.args.pop(key, None)
|
||||||
|
if value and "config" in self.args and key in self.args["config"]:
|
||||||
|
raise SameKeyNestedError()
|
||||||
|
if not value and "config" in self.args:
|
||||||
|
value = self.args["config"].pop(key, None)
|
||||||
|
config[key] = value
|
||||||
|
|
||||||
|
return self._render_values(config)
|
||||||
|
|
||||||
|
def _render_values(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
rendered_config = {}
|
||||||
|
for key, value in config.items():
|
||||||
|
if isinstance(value, str):
|
||||||
|
try:
|
||||||
|
value = get_rendered(value, self.render_ctx, native=True)
|
||||||
|
except UndefinedMacroError as e:
|
||||||
|
raise CustomMacroPopulatingConfigValueError(
|
||||||
|
target_name=self.target.name,
|
||||||
|
column_name=self.column_name,
|
||||||
|
name=self.name,
|
||||||
|
key=key,
|
||||||
|
err_msg=e.msg,
|
||||||
|
)
|
||||||
|
if value is not None:
|
||||||
|
rendered_config[key] = value
|
||||||
|
return rendered_config
|
||||||
|
|
||||||
def _bad_type(self) -> TypeError:
|
def _bad_type(self) -> TypeError:
|
||||||
return TypeError('invalid target type "{}"'.format(type(self.target)))
|
return TypeError('invalid target type "{}"'.format(type(self.target)))
|
||||||
|
|
||||||
|
|||||||
@@ -66,8 +66,6 @@ class HookSearcher(Iterable[HookBlock]):
|
|||||||
|
|
||||||
|
|
||||||
class HookParser(SimpleParser[HookBlock, HookNode]):
|
class HookParser(SimpleParser[HookBlock, HookNode]):
|
||||||
def transform(self, node):
|
|
||||||
return node
|
|
||||||
|
|
||||||
# Hooks are only in the dbt_project.yml file for the project
|
# Hooks are only in the dbt_project.yml file for the project
|
||||||
def get_path(self) -> FilePath:
|
def get_path(self) -> FilePath:
|
||||||
|
|||||||
@@ -10,8 +10,6 @@ from itertools import chain
|
|||||||
from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, Type, Union
|
from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, Type, Union
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
from dbt_semantic_interfaces.enum_extension import assert_values_exhausted
|
|
||||||
from dbt_semantic_interfaces.type_enums import MetricType
|
|
||||||
|
|
||||||
import dbt.deprecations
|
import dbt.deprecations
|
||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
@@ -119,6 +117,8 @@ from dbt_common.events.functions import fire_event, get_invocation_id, warn_or_e
|
|||||||
from dbt_common.events.types import Note
|
from dbt_common.events.types import Note
|
||||||
from dbt_common.exceptions.base import DbtValidationError
|
from dbt_common.exceptions.base import DbtValidationError
|
||||||
from dbt_common.helper_types import PathSet
|
from dbt_common.helper_types import PathSet
|
||||||
|
from dbt_semantic_interfaces.enum_extension import assert_values_exhausted
|
||||||
|
from dbt_semantic_interfaces.type_enums import MetricType
|
||||||
|
|
||||||
PERF_INFO_FILE_NAME = "perf_info.json"
|
PERF_INFO_FILE_NAME = "perf_info.json"
|
||||||
|
|
||||||
@@ -222,12 +222,12 @@ class ManifestLoader:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root_project: RuntimeConfig,
|
root_project: RuntimeConfig,
|
||||||
all_projects: Mapping[str, Project],
|
all_projects: Mapping[str, RuntimeConfig],
|
||||||
macro_hook: Optional[Callable[[Manifest], Any]] = None,
|
macro_hook: Optional[Callable[[Manifest], Any]] = None,
|
||||||
file_diff: Optional[FileDiff] = None,
|
file_diff: Optional[FileDiff] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.root_project: RuntimeConfig = root_project
|
self.root_project: RuntimeConfig = root_project
|
||||||
self.all_projects: Mapping[str, Project] = all_projects
|
self.all_projects: Mapping[str, RuntimeConfig] = all_projects
|
||||||
self.file_diff = file_diff
|
self.file_diff = file_diff
|
||||||
self.manifest: Manifest = Manifest()
|
self.manifest: Manifest = Manifest()
|
||||||
self.new_manifest = self.manifest
|
self.new_manifest = self.manifest
|
||||||
@@ -467,6 +467,7 @@ class ManifestLoader:
|
|||||||
self.process_model_inferred_primary_keys()
|
self.process_model_inferred_primary_keys()
|
||||||
self.check_valid_group_config()
|
self.check_valid_group_config()
|
||||||
self.check_valid_access_property()
|
self.check_valid_access_property()
|
||||||
|
self.check_valid_snapshot_config()
|
||||||
|
|
||||||
semantic_manifest = SemanticManifest(self.manifest)
|
semantic_manifest = SemanticManifest(self.manifest)
|
||||||
if not semantic_manifest.validate():
|
if not semantic_manifest.validate():
|
||||||
@@ -570,11 +571,7 @@ class ManifestLoader:
|
|||||||
|
|
||||||
def check_for_model_deprecations(self):
|
def check_for_model_deprecations(self):
|
||||||
for node in self.manifest.nodes.values():
|
for node in self.manifest.nodes.values():
|
||||||
if isinstance(node, ModelNode):
|
if isinstance(node, ModelNode) and node.is_past_deprecation_date:
|
||||||
if (
|
|
||||||
node.deprecation_date
|
|
||||||
and node.deprecation_date < datetime.datetime.now().astimezone()
|
|
||||||
):
|
|
||||||
warn_or_error(
|
warn_or_error(
|
||||||
DeprecatedModel(
|
DeprecatedModel(
|
||||||
model_name=node.name,
|
model_name=node.name,
|
||||||
@@ -588,7 +585,7 @@ class ManifestLoader:
|
|||||||
node.depends_on
|
node.depends_on
|
||||||
for resolved_ref in resolved_model_refs:
|
for resolved_ref in resolved_model_refs:
|
||||||
if resolved_ref.deprecation_date:
|
if resolved_ref.deprecation_date:
|
||||||
if resolved_ref.deprecation_date < datetime.datetime.now().astimezone():
|
if resolved_ref.is_past_deprecation_date:
|
||||||
event_cls = DeprecatedReference
|
event_cls = DeprecatedReference
|
||||||
else:
|
else:
|
||||||
event_cls = UpcomingReferenceDeprecation
|
event_cls = UpcomingReferenceDeprecation
|
||||||
@@ -672,7 +669,7 @@ class ManifestLoader:
|
|||||||
# 'parser_types'
|
# 'parser_types'
|
||||||
def parse_project(
|
def parse_project(
|
||||||
self,
|
self,
|
||||||
project: Project,
|
project: RuntimeConfig,
|
||||||
parser_files,
|
parser_files,
|
||||||
parser_types: List[Type[Parser]],
|
parser_types: List[Type[Parser]],
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -808,8 +805,12 @@ class ManifestLoader:
|
|||||||
plugin_model_nodes = pm.get_nodes().models
|
plugin_model_nodes = pm.get_nodes().models
|
||||||
for node_arg in plugin_model_nodes.values():
|
for node_arg in plugin_model_nodes.values():
|
||||||
node = ModelNode.from_args(node_arg)
|
node = ModelNode.from_args(node_arg)
|
||||||
# node may already exist from package or running project - in which case we should avoid clobbering it with an external node
|
# node may already exist from package or running project (even if it is disabled),
|
||||||
if node.unique_id not in self.manifest.nodes:
|
# in which case we should avoid clobbering it with an external node
|
||||||
|
if (
|
||||||
|
node.unique_id not in self.manifest.nodes
|
||||||
|
and node.unique_id not in self.manifest.disabled
|
||||||
|
):
|
||||||
self.manifest.add_node_nofile(node)
|
self.manifest.add_node_nofile(node)
|
||||||
manifest_nodes_modified = True
|
manifest_nodes_modified = True
|
||||||
|
|
||||||
@@ -1345,6 +1346,16 @@ class ManifestLoader:
|
|||||||
materialization=node.get_materialization(),
|
materialization=node.get_materialization(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def check_valid_snapshot_config(self):
|
||||||
|
# Snapshot config can be set in either SQL files or yaml files,
|
||||||
|
# so we need to validate afterward.
|
||||||
|
for node in self.manifest.nodes.values():
|
||||||
|
if node.resource_type != NodeType.Snapshot:
|
||||||
|
continue
|
||||||
|
if node.created_at < self.started_at:
|
||||||
|
continue
|
||||||
|
node.config.final_validate()
|
||||||
|
|
||||||
def write_perf_info(self, target_path: str):
|
def write_perf_info(self, target_path: str):
|
||||||
path = os.path.join(target_path, PERF_INFO_FILE_NAME)
|
path = os.path.join(target_path, PERF_INFO_FILE_NAME)
|
||||||
write_file(path, json.dumps(self._perf_info, cls=dbt.utils.JSONEncoder, indent=4))
|
write_file(path, json.dumps(self._perf_info, cls=dbt.utils.JSONEncoder, indent=4))
|
||||||
|
|||||||
@@ -204,7 +204,7 @@ class ModelParser(SimpleSQLParser[ModelNode]):
|
|||||||
dbt_parser = PythonParseVisitor(node)
|
dbt_parser = PythonParseVisitor(node)
|
||||||
dbt_parser.visit(tree)
|
dbt_parser.visit(tree)
|
||||||
|
|
||||||
for (func, args, kwargs) in dbt_parser.dbt_function_calls:
|
for func, args, kwargs in dbt_parser.dbt_function_calls:
|
||||||
if func == "get":
|
if func == "get":
|
||||||
num_args = len(args)
|
num_args = len(args)
|
||||||
if num_args == 0:
|
if num_args == 0:
|
||||||
|
|||||||
@@ -968,13 +968,17 @@ class PartialParsing:
|
|||||||
elif unique_id in self.saved_manifest.disabled:
|
elif unique_id in self.saved_manifest.disabled:
|
||||||
self.delete_disabled(unique_id, schema_file.file_id)
|
self.delete_disabled(unique_id, schema_file.file_id)
|
||||||
|
|
||||||
metrics = schema_file.generated_metrics.copy()
|
if schema_file.generated_metrics:
|
||||||
for unique_id in metrics:
|
# If this partial parse file has an old "generated_metrics" list,
|
||||||
|
# call code to fix it up before processing.
|
||||||
|
schema_file.fix_metrics_from_measures()
|
||||||
|
if semantic_model_name in schema_file.metrics_from_measures:
|
||||||
|
for unique_id in schema_file.metrics_from_measures[semantic_model_name]:
|
||||||
if unique_id in self.saved_manifest.metrics:
|
if unique_id in self.saved_manifest.metrics:
|
||||||
self.saved_manifest.metrics.pop(unique_id)
|
self.saved_manifest.metrics.pop(unique_id)
|
||||||
schema_file.generated_metrics.remove(unique_id)
|
|
||||||
elif unique_id in self.saved_manifest.disabled:
|
elif unique_id in self.saved_manifest.disabled:
|
||||||
self.delete_disabled(unique_id, schema_file.file_id)
|
self.delete_disabled(unique_id, schema_file.file_id)
|
||||||
|
del schema_file.metrics_from_measures[semantic_model_name]
|
||||||
|
|
||||||
def delete_schema_unit_test(self, schema_file, unit_test_dict):
|
def delete_schema_unit_test(self, schema_file, unit_test_dict):
|
||||||
unit_test_name = unit_test_dict["name"]
|
unit_test_name = unit_test_dict["name"]
|
||||||
|
|||||||
@@ -1,16 +1,8 @@
|
|||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
from dbt_semantic_interfaces.type_enums import (
|
|
||||||
AggregationType,
|
|
||||||
ConversionCalculationType,
|
|
||||||
DimensionType,
|
|
||||||
EntityType,
|
|
||||||
MetricType,
|
|
||||||
TimeGranularity,
|
|
||||||
)
|
|
||||||
|
|
||||||
from dbt.artifacts.resources import (
|
from dbt.artifacts.resources import (
|
||||||
ConversionTypeParams,
|
ConversionTypeParams,
|
||||||
|
CumulativeTypeParams,
|
||||||
Dimension,
|
Dimension,
|
||||||
DimensionTypeParams,
|
DimensionTypeParams,
|
||||||
Entity,
|
Entity,
|
||||||
@@ -39,9 +31,11 @@ from dbt.context.providers import (
|
|||||||
generate_parse_exposure,
|
generate_parse_exposure,
|
||||||
generate_parse_semantic_models,
|
generate_parse_semantic_models,
|
||||||
)
|
)
|
||||||
|
from dbt.contracts.files import SchemaSourceFile
|
||||||
from dbt.contracts.graph.nodes import Exposure, Group, Metric, SavedQuery, SemanticModel
|
from dbt.contracts.graph.nodes import Exposure, Group, Metric, SavedQuery, SemanticModel
|
||||||
from dbt.contracts.graph.unparsed import (
|
from dbt.contracts.graph.unparsed import (
|
||||||
UnparsedConversionTypeParams,
|
UnparsedConversionTypeParams,
|
||||||
|
UnparsedCumulativeTypeParams,
|
||||||
UnparsedDimension,
|
UnparsedDimension,
|
||||||
UnparsedDimensionTypeParams,
|
UnparsedDimensionTypeParams,
|
||||||
UnparsedEntity,
|
UnparsedEntity,
|
||||||
@@ -64,6 +58,15 @@ from dbt.parser.common import YamlBlock
|
|||||||
from dbt.parser.schemas import ParseResult, SchemaParser, YamlReader
|
from dbt.parser.schemas import ParseResult, SchemaParser, YamlReader
|
||||||
from dbt_common.dataclass_schema import ValidationError
|
from dbt_common.dataclass_schema import ValidationError
|
||||||
from dbt_common.exceptions import DbtInternalError
|
from dbt_common.exceptions import DbtInternalError
|
||||||
|
from dbt_semantic_interfaces.type_enums import (
|
||||||
|
AggregationType,
|
||||||
|
ConversionCalculationType,
|
||||||
|
DimensionType,
|
||||||
|
EntityType,
|
||||||
|
MetricType,
|
||||||
|
PeriodAggregation,
|
||||||
|
TimeGranularity,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_where_filter(
|
def parse_where_filter(
|
||||||
@@ -83,7 +86,7 @@ class ExposureParser(YamlReader):
|
|||||||
self.schema_parser = schema_parser
|
self.schema_parser = schema_parser
|
||||||
self.yaml = yaml
|
self.yaml = yaml
|
||||||
|
|
||||||
def parse_exposure(self, unparsed: UnparsedExposure):
|
def parse_exposure(self, unparsed: UnparsedExposure) -> None:
|
||||||
package_name = self.project.project_name
|
package_name = self.project.project_name
|
||||||
unique_id = f"{NodeType.Exposure}.{package_name}.{unparsed.name}"
|
unique_id = f"{NodeType.Exposure}.{package_name}.{unparsed.name}"
|
||||||
path = self.yaml.path.relative_path
|
path = self.yaml.path.relative_path
|
||||||
@@ -141,6 +144,7 @@ class ExposureParser(YamlReader):
|
|||||||
get_rendered(depends_on_jinja, ctx, parsed, capture_macros=True)
|
get_rendered(depends_on_jinja, ctx, parsed, capture_macros=True)
|
||||||
# parsed now has a populated refs/sources/metrics
|
# parsed now has a populated refs/sources/metrics
|
||||||
|
|
||||||
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
if parsed.config.enabled:
|
if parsed.config.enabled:
|
||||||
self.manifest.add_exposure(self.yaml.file, parsed)
|
self.manifest.add_exposure(self.yaml.file, parsed)
|
||||||
else:
|
else:
|
||||||
@@ -169,7 +173,7 @@ class ExposureParser(YamlReader):
|
|||||||
patch_config_dict=precedence_configs,
|
patch_config_dict=precedence_configs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def parse(self):
|
def parse(self) -> None:
|
||||||
for data in self.get_key_dicts():
|
for data in self.get_key_dicts():
|
||||||
try:
|
try:
|
||||||
UnparsedExposure.validate(data)
|
UnparsedExposure.validate(data)
|
||||||
@@ -221,9 +225,19 @@ class MetricParser(YamlReader):
|
|||||||
|
|
||||||
return input_measures
|
return input_measures
|
||||||
|
|
||||||
def _get_time_window(
|
def _get_period_agg(self, unparsed_period_agg: str) -> PeriodAggregation:
|
||||||
self,
|
return PeriodAggregation(unparsed_period_agg)
|
||||||
unparsed_window: Optional[str],
|
|
||||||
|
def _get_optional_grain_to_date(
|
||||||
|
self, unparsed_grain_to_date: Optional[str]
|
||||||
|
) -> Optional[TimeGranularity]:
|
||||||
|
if not unparsed_grain_to_date:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return TimeGranularity(unparsed_grain_to_date)
|
||||||
|
|
||||||
|
def _get_optional_time_window(
|
||||||
|
self, unparsed_window: Optional[str]
|
||||||
) -> Optional[MetricTimeWindow]:
|
) -> Optional[MetricTimeWindow]:
|
||||||
if unparsed_window is not None:
|
if unparsed_window is not None:
|
||||||
parts = unparsed_window.split(" ")
|
parts = unparsed_window.split(" ")
|
||||||
@@ -277,7 +291,7 @@ class MetricParser(YamlReader):
|
|||||||
name=unparsed.name,
|
name=unparsed.name,
|
||||||
filter=parse_where_filter(unparsed.filter),
|
filter=parse_where_filter(unparsed.filter),
|
||||||
alias=unparsed.alias,
|
alias=unparsed.alias,
|
||||||
offset_window=self._get_time_window(unparsed.offset_window),
|
offset_window=self._get_optional_time_window(unparsed.offset_window),
|
||||||
offset_to_grain=offset_to_grain,
|
offset_to_grain=offset_to_grain,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -311,11 +325,48 @@ class MetricParser(YamlReader):
|
|||||||
conversion_measure=self._get_input_measure(unparsed.conversion_measure),
|
conversion_measure=self._get_input_measure(unparsed.conversion_measure),
|
||||||
entity=unparsed.entity,
|
entity=unparsed.entity,
|
||||||
calculation=ConversionCalculationType(unparsed.calculation),
|
calculation=ConversionCalculationType(unparsed.calculation),
|
||||||
window=self._get_time_window(unparsed.window),
|
window=self._get_optional_time_window(unparsed.window),
|
||||||
constant_properties=unparsed.constant_properties,
|
constant_properties=unparsed.constant_properties,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_metric_type_params(self, type_params: UnparsedMetricTypeParams) -> MetricTypeParams:
|
def _get_optional_cumulative_type_params(
|
||||||
|
self, unparsed_metric: UnparsedMetric
|
||||||
|
) -> Optional[CumulativeTypeParams]:
|
||||||
|
unparsed_type_params = unparsed_metric.type_params
|
||||||
|
if unparsed_metric.type.lower() == MetricType.CUMULATIVE.value:
|
||||||
|
if not unparsed_type_params.cumulative_type_params:
|
||||||
|
unparsed_type_params.cumulative_type_params = UnparsedCumulativeTypeParams()
|
||||||
|
|
||||||
|
if (
|
||||||
|
unparsed_type_params.window
|
||||||
|
and not unparsed_type_params.cumulative_type_params.window
|
||||||
|
):
|
||||||
|
unparsed_type_params.cumulative_type_params.window = unparsed_type_params.window
|
||||||
|
if (
|
||||||
|
unparsed_type_params.grain_to_date
|
||||||
|
and not unparsed_type_params.cumulative_type_params.grain_to_date
|
||||||
|
):
|
||||||
|
unparsed_type_params.cumulative_type_params.grain_to_date = (
|
||||||
|
unparsed_type_params.grain_to_date
|
||||||
|
)
|
||||||
|
|
||||||
|
return CumulativeTypeParams(
|
||||||
|
window=self._get_optional_time_window(
|
||||||
|
unparsed_type_params.cumulative_type_params.window
|
||||||
|
),
|
||||||
|
grain_to_date=self._get_optional_grain_to_date(
|
||||||
|
unparsed_type_params.cumulative_type_params.grain_to_date
|
||||||
|
),
|
||||||
|
period_agg=self._get_period_agg(
|
||||||
|
unparsed_type_params.cumulative_type_params.period_agg
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_metric_type_params(self, unparsed_metric: UnparsedMetric) -> MetricTypeParams:
|
||||||
|
type_params = unparsed_metric.type_params
|
||||||
|
|
||||||
grain_to_date: Optional[TimeGranularity] = None
|
grain_to_date: Optional[TimeGranularity] = None
|
||||||
if type_params.grain_to_date is not None:
|
if type_params.grain_to_date is not None:
|
||||||
grain_to_date = TimeGranularity(type_params.grain_to_date)
|
grain_to_date = TimeGranularity(type_params.grain_to_date)
|
||||||
@@ -325,17 +376,20 @@ class MetricParser(YamlReader):
|
|||||||
numerator=self._get_optional_metric_input(type_params.numerator),
|
numerator=self._get_optional_metric_input(type_params.numerator),
|
||||||
denominator=self._get_optional_metric_input(type_params.denominator),
|
denominator=self._get_optional_metric_input(type_params.denominator),
|
||||||
expr=str(type_params.expr) if type_params.expr is not None else None,
|
expr=str(type_params.expr) if type_params.expr is not None else None,
|
||||||
window=self._get_time_window(type_params.window),
|
window=self._get_optional_time_window(type_params.window),
|
||||||
grain_to_date=grain_to_date,
|
grain_to_date=grain_to_date,
|
||||||
metrics=self._get_metric_inputs(type_params.metrics),
|
metrics=self._get_metric_inputs(type_params.metrics),
|
||||||
conversion_type_params=self._get_optional_conversion_type_params(
|
conversion_type_params=self._get_optional_conversion_type_params(
|
||||||
type_params.conversion_type_params
|
type_params.conversion_type_params
|
||||||
)
|
),
|
||||||
|
cumulative_type_params=self._get_optional_cumulative_type_params(
|
||||||
|
unparsed_metric=unparsed_metric,
|
||||||
|
),
|
||||||
# input measures are calculated via metric processing post parsing
|
# input measures are calculated via metric processing post parsing
|
||||||
# input_measures=?,
|
# input_measures=?,
|
||||||
)
|
)
|
||||||
|
|
||||||
def parse_metric(self, unparsed: UnparsedMetric, generated: bool = False):
|
def parse_metric(self, unparsed: UnparsedMetric, generated_from: Optional[str] = None) -> None:
|
||||||
package_name = self.project.project_name
|
package_name = self.project.project_name
|
||||||
unique_id = f"{NodeType.Metric}.{package_name}.{unparsed.name}"
|
unique_id = f"{NodeType.Metric}.{package_name}.{unparsed.name}"
|
||||||
path = self.yaml.path.relative_path
|
path = self.yaml.path.relative_path
|
||||||
@@ -380,7 +434,10 @@ class MetricParser(YamlReader):
|
|||||||
description=unparsed.description,
|
description=unparsed.description,
|
||||||
label=unparsed.label,
|
label=unparsed.label,
|
||||||
type=MetricType(unparsed.type),
|
type=MetricType(unparsed.type),
|
||||||
type_params=self._get_metric_type_params(unparsed.type_params),
|
type_params=self._get_metric_type_params(unparsed),
|
||||||
|
time_granularity=(
|
||||||
|
TimeGranularity(unparsed.time_granularity) if unparsed.time_granularity else None
|
||||||
|
),
|
||||||
filter=parse_where_filter(unparsed.filter),
|
filter=parse_where_filter(unparsed.filter),
|
||||||
meta=unparsed.meta,
|
meta=unparsed.meta,
|
||||||
tags=unparsed.tags,
|
tags=unparsed.tags,
|
||||||
@@ -390,8 +447,9 @@ class MetricParser(YamlReader):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# if the metric is disabled we do not want it included in the manifest, only in the disabled dict
|
# if the metric is disabled we do not want it included in the manifest, only in the disabled dict
|
||||||
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
if parsed.config.enabled:
|
if parsed.config.enabled:
|
||||||
self.manifest.add_metric(self.yaml.file, parsed, generated)
|
self.manifest.add_metric(self.yaml.file, parsed, generated_from)
|
||||||
else:
|
else:
|
||||||
self.manifest.add_disabled(self.yaml.file, parsed)
|
self.manifest.add_disabled(self.yaml.file, parsed)
|
||||||
|
|
||||||
@@ -419,7 +477,7 @@ class MetricParser(YamlReader):
|
|||||||
)
|
)
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def parse(self):
|
def parse(self) -> None:
|
||||||
for data in self.get_key_dicts():
|
for data in self.get_key_dicts():
|
||||||
try:
|
try:
|
||||||
UnparsedMetric.validate(data)
|
UnparsedMetric.validate(data)
|
||||||
@@ -436,7 +494,7 @@ class GroupParser(YamlReader):
|
|||||||
self.schema_parser = schema_parser
|
self.schema_parser = schema_parser
|
||||||
self.yaml = yaml
|
self.yaml = yaml
|
||||||
|
|
||||||
def parse_group(self, unparsed: UnparsedGroup):
|
def parse_group(self, unparsed: UnparsedGroup) -> None:
|
||||||
package_name = self.project.project_name
|
package_name = self.project.project_name
|
||||||
unique_id = f"{NodeType.Group}.{package_name}.{unparsed.name}"
|
unique_id = f"{NodeType.Group}.{package_name}.{unparsed.name}"
|
||||||
path = self.yaml.path.relative_path
|
path = self.yaml.path.relative_path
|
||||||
@@ -451,6 +509,7 @@ class GroupParser(YamlReader):
|
|||||||
owner=unparsed.owner,
|
owner=unparsed.owner,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
self.manifest.add_group(self.yaml.file, parsed)
|
self.manifest.add_group(self.yaml.file, parsed)
|
||||||
|
|
||||||
def parse(self):
|
def parse(self):
|
||||||
@@ -545,7 +604,12 @@ class SemanticModelParser(YamlReader):
|
|||||||
)
|
)
|
||||||
return measures
|
return measures
|
||||||
|
|
||||||
def _create_metric(self, measure: UnparsedMeasure, enabled: bool) -> None:
|
def _create_metric(
|
||||||
|
self,
|
||||||
|
measure: UnparsedMeasure,
|
||||||
|
enabled: bool,
|
||||||
|
semantic_model_name: str,
|
||||||
|
) -> None:
|
||||||
unparsed_metric = UnparsedMetric(
|
unparsed_metric = UnparsedMetric(
|
||||||
name=measure.name,
|
name=measure.name,
|
||||||
label=measure.name,
|
label=measure.name,
|
||||||
@@ -556,7 +620,7 @@ class SemanticModelParser(YamlReader):
|
|||||||
)
|
)
|
||||||
|
|
||||||
parser = MetricParser(self.schema_parser, yaml=self.yaml)
|
parser = MetricParser(self.schema_parser, yaml=self.yaml)
|
||||||
parser.parse_metric(unparsed=unparsed_metric, generated=True)
|
parser.parse_metric(unparsed=unparsed_metric, generated_from=semantic_model_name)
|
||||||
|
|
||||||
def _generate_semantic_model_config(
|
def _generate_semantic_model_config(
|
||||||
self, target: UnparsedSemanticModel, fqn: List[str], package_name: str, rendered: bool
|
self, target: UnparsedSemanticModel, fqn: List[str], package_name: str, rendered: bool
|
||||||
@@ -583,7 +647,7 @@ class SemanticModelParser(YamlReader):
|
|||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def parse_semantic_model(self, unparsed: UnparsedSemanticModel):
|
def parse_semantic_model(self, unparsed: UnparsedSemanticModel) -> None:
|
||||||
package_name = self.project.project_name
|
package_name = self.project.project_name
|
||||||
unique_id = f"{NodeType.SemanticModel}.{package_name}.{unparsed.name}"
|
unique_id = f"{NodeType.SemanticModel}.{package_name}.{unparsed.name}"
|
||||||
path = self.yaml.path.relative_path
|
path = self.yaml.path.relative_path
|
||||||
@@ -643,6 +707,7 @@ class SemanticModelParser(YamlReader):
|
|||||||
|
|
||||||
# if the semantic model is disabled we do not want it included in the manifest,
|
# if the semantic model is disabled we do not want it included in the manifest,
|
||||||
# only in the disabled dict
|
# only in the disabled dict
|
||||||
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
if parsed.config.enabled:
|
if parsed.config.enabled:
|
||||||
self.manifest.add_semantic_model(self.yaml.file, parsed)
|
self.manifest.add_semantic_model(self.yaml.file, parsed)
|
||||||
else:
|
else:
|
||||||
@@ -651,9 +716,11 @@ class SemanticModelParser(YamlReader):
|
|||||||
# Create a metric for each measure with `create_metric = True`
|
# Create a metric for each measure with `create_metric = True`
|
||||||
for measure in unparsed.measures:
|
for measure in unparsed.measures:
|
||||||
if measure.create_metric is True:
|
if measure.create_metric is True:
|
||||||
self._create_metric(measure=measure, enabled=parsed.config.enabled)
|
self._create_metric(
|
||||||
|
measure=measure, enabled=parsed.config.enabled, semantic_model_name=parsed.name
|
||||||
|
)
|
||||||
|
|
||||||
def parse(self):
|
def parse(self) -> None:
|
||||||
for data in self.get_key_dicts():
|
for data in self.get_key_dicts():
|
||||||
try:
|
try:
|
||||||
UnparsedSemanticModel.validate(data)
|
UnparsedSemanticModel.validate(data)
|
||||||
@@ -779,6 +846,7 @@ class SavedQueryParser(YamlReader):
|
|||||||
delattr(export, "relation_name")
|
delattr(export, "relation_name")
|
||||||
|
|
||||||
# Only add thes saved query if it's enabled, otherwise we track it with other diabled nodes
|
# Only add thes saved query if it's enabled, otherwise we track it with other diabled nodes
|
||||||
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
if parsed.config.enabled:
|
if parsed.config.enabled:
|
||||||
self.manifest.add_saved_query(self.yaml.file, parsed)
|
self.manifest.add_saved_query(self.yaml.file, parsed)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -5,10 +5,15 @@ from dataclasses import dataclass, field
|
|||||||
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar
|
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar
|
||||||
|
|
||||||
from dbt import deprecations
|
from dbt import deprecations
|
||||||
|
from dbt.artifacts.resources import RefArgs
|
||||||
|
from dbt.artifacts.resources.v1.model import TimeSpine
|
||||||
|
from dbt.clients.jinja_static import statically_parse_ref_or_source
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
|
from dbt.config import RuntimeConfig
|
||||||
from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context
|
from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context
|
||||||
from dbt.context.context_config import ContextConfig
|
from dbt.context.context_config import ContextConfig
|
||||||
from dbt.contracts.files import SchemaSourceFile
|
from dbt.contracts.files import SchemaSourceFile, SourceFile
|
||||||
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
from dbt.contracts.graph.nodes import (
|
from dbt.contracts.graph.nodes import (
|
||||||
ModelNode,
|
ModelNode,
|
||||||
ParsedMacroPatch,
|
ParsedMacroPatch,
|
||||||
@@ -64,18 +69,20 @@ from dbt_common.events.functions import warn_or_error
|
|||||||
from dbt_common.exceptions import DbtValidationError
|
from dbt_common.exceptions import DbtValidationError
|
||||||
from dbt_common.utils import deep_merge
|
from dbt_common.utils import deep_merge
|
||||||
|
|
||||||
schema_file_keys = (
|
schema_file_keys_to_resource_types = {
|
||||||
"models",
|
"models": NodeType.Model,
|
||||||
"seeds",
|
"seeds": NodeType.Seed,
|
||||||
"snapshots",
|
"snapshots": NodeType.Snapshot,
|
||||||
"sources",
|
"sources": NodeType.Source,
|
||||||
"macros",
|
"macros": NodeType.Macro,
|
||||||
"analyses",
|
"analyses": NodeType.Analysis,
|
||||||
"exposures",
|
"exposures": NodeType.Exposure,
|
||||||
"metrics",
|
"metrics": NodeType.Metric,
|
||||||
"semantic_models",
|
"semantic_models": NodeType.SemanticModel,
|
||||||
"saved_queries",
|
"saved_queries": NodeType.SavedQuery,
|
||||||
)
|
}
|
||||||
|
|
||||||
|
schema_file_keys = list(schema_file_keys_to_resource_types.keys())
|
||||||
|
|
||||||
|
|
||||||
# ===============================================================================
|
# ===============================================================================
|
||||||
@@ -117,6 +124,11 @@ def yaml_from_file(source_file: SchemaSourceFile) -> Optional[Dict[str, Any]]:
|
|||||||
if contents is None:
|
if contents is None:
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
|
if not isinstance(contents, dict):
|
||||||
|
raise DbtValidationError(
|
||||||
|
f"Contents of file '{source_file.original_file_path}' are not valid. Dictionary expected."
|
||||||
|
)
|
||||||
|
|
||||||
# When loaded_loaded_at_field is defined as None or null, it shows up in
|
# When loaded_loaded_at_field is defined as None or null, it shows up in
|
||||||
# the dict but when it is not defined, it does not show up in the dict
|
# the dict but when it is not defined, it does not show up in the dict
|
||||||
# We need to capture this to be able to override source level settings later.
|
# We need to capture this to be able to override source level settings later.
|
||||||
@@ -137,9 +149,9 @@ def yaml_from_file(source_file: SchemaSourceFile) -> Optional[Dict[str, Any]]:
|
|||||||
class SchemaParser(SimpleParser[YamlBlock, ModelNode]):
|
class SchemaParser(SimpleParser[YamlBlock, ModelNode]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
project,
|
project: RuntimeConfig,
|
||||||
manifest,
|
manifest: Manifest,
|
||||||
root_project,
|
root_project: RuntimeConfig,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(project, manifest, root_project)
|
super().__init__(project, manifest, root_project)
|
||||||
|
|
||||||
@@ -277,33 +289,33 @@ class ParseResult:
|
|||||||
# PatchParser, SemanticModelParser, SavedQueryParser, UnitTestParser
|
# PatchParser, SemanticModelParser, SavedQueryParser, UnitTestParser
|
||||||
class YamlReader(metaclass=ABCMeta):
|
class YamlReader(metaclass=ABCMeta):
|
||||||
def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock, key: str) -> None:
|
def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock, key: str) -> None:
|
||||||
self.schema_parser = schema_parser
|
self.schema_parser: SchemaParser = schema_parser
|
||||||
# key: models, seeds, snapshots, sources, macros,
|
# key: models, seeds, snapshots, sources, macros,
|
||||||
# analyses, exposures, unit_tests
|
# analyses, exposures, unit_tests
|
||||||
self.key = key
|
self.key: str = key
|
||||||
self.yaml = yaml
|
self.yaml: YamlBlock = yaml
|
||||||
self.schema_yaml_vars = SchemaYamlVars()
|
self.schema_yaml_vars: SchemaYamlVars = SchemaYamlVars()
|
||||||
self.render_ctx = generate_schema_yml_context(
|
self.render_ctx = generate_schema_yml_context(
|
||||||
self.schema_parser.root_project,
|
self.schema_parser.root_project,
|
||||||
self.schema_parser.project.project_name,
|
self.schema_parser.project.project_name,
|
||||||
self.schema_yaml_vars,
|
self.schema_yaml_vars,
|
||||||
)
|
)
|
||||||
self.renderer = SchemaYamlRenderer(self.render_ctx, self.key)
|
self.renderer: SchemaYamlRenderer = SchemaYamlRenderer(self.render_ctx, self.key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def manifest(self):
|
def manifest(self) -> Manifest:
|
||||||
return self.schema_parser.manifest
|
return self.schema_parser.manifest
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def project(self):
|
def project(self) -> RuntimeConfig:
|
||||||
return self.schema_parser.project
|
return self.schema_parser.project
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def default_database(self):
|
def default_database(self) -> str:
|
||||||
return self.schema_parser.default_database
|
return self.schema_parser.default_database
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def root_project(self):
|
def root_project(self) -> RuntimeConfig:
|
||||||
return self.schema_parser.root_project
|
return self.schema_parser.root_project
|
||||||
|
|
||||||
# for the different schema subparsers ('models', 'source', etc)
|
# for the different schema subparsers ('models', 'source', etc)
|
||||||
@@ -355,7 +367,7 @@ class YamlReader(metaclass=ABCMeta):
|
|||||||
return dct
|
return dct
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def parse(self) -> ParseResult:
|
def parse(self) -> Optional[ParseResult]:
|
||||||
raise NotImplementedError("parse is abstract")
|
raise NotImplementedError("parse is abstract")
|
||||||
|
|
||||||
|
|
||||||
@@ -420,7 +432,9 @@ class SourceParser(YamlReader):
|
|||||||
fqn=fqn,
|
fqn=fqn,
|
||||||
name=f"{source.name}_{table.name}",
|
name=f"{source.name}_{table.name}",
|
||||||
)
|
)
|
||||||
self.manifest.add_source(self.yaml.file, source_def)
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
|
source_file: SchemaSourceFile = self.yaml.file
|
||||||
|
self.manifest.add_source(source_file, source_def)
|
||||||
|
|
||||||
|
|
||||||
# This class has two subclasses: NodePatchParser and MacroPatchParser
|
# This class has two subclasses: NodePatchParser and MacroPatchParser
|
||||||
@@ -510,7 +524,7 @@ class PatchParser(YamlReader, Generic[NonSourceTarget, Parsed]):
|
|||||||
|
|
||||||
# We want to raise an error if some attributes are in two places, and move them
|
# We want to raise an error if some attributes are in two places, and move them
|
||||||
# from toplevel to config if necessary
|
# from toplevel to config if necessary
|
||||||
def normalize_attribute(self, data, path, attribute):
|
def normalize_attribute(self, data, path, attribute) -> None:
|
||||||
if attribute in data:
|
if attribute in data:
|
||||||
if "config" in data and attribute in data["config"]:
|
if "config" in data and attribute in data["config"]:
|
||||||
raise ParsingError(
|
raise ParsingError(
|
||||||
@@ -524,30 +538,37 @@ class PatchParser(YamlReader, Generic[NonSourceTarget, Parsed]):
|
|||||||
data["config"] = {}
|
data["config"] = {}
|
||||||
data["config"][attribute] = data.pop(attribute)
|
data["config"][attribute] = data.pop(attribute)
|
||||||
|
|
||||||
def normalize_meta_attribute(self, data, path):
|
def normalize_meta_attribute(self, data, path) -> None:
|
||||||
return self.normalize_attribute(data, path, "meta")
|
return self.normalize_attribute(data, path, "meta")
|
||||||
|
|
||||||
def normalize_docs_attribute(self, data, path):
|
def normalize_docs_attribute(self, data, path) -> None:
|
||||||
return self.normalize_attribute(data, path, "docs")
|
return self.normalize_attribute(data, path, "docs")
|
||||||
|
|
||||||
def normalize_group_attribute(self, data, path):
|
def normalize_group_attribute(self, data, path) -> None:
|
||||||
return self.normalize_attribute(data, path, "group")
|
return self.normalize_attribute(data, path, "group")
|
||||||
|
|
||||||
def normalize_contract_attribute(self, data, path):
|
def normalize_contract_attribute(self, data, path) -> None:
|
||||||
return self.normalize_attribute(data, path, "contract")
|
return self.normalize_attribute(data, path, "contract")
|
||||||
|
|
||||||
def normalize_access_attribute(self, data, path):
|
def normalize_access_attribute(self, data, path) -> None:
|
||||||
return self.normalize_attribute(data, path, "access")
|
return self.normalize_attribute(data, path, "access")
|
||||||
|
|
||||||
def validate_data_tests(self, data):
|
@property
|
||||||
|
def is_root_project(self) -> bool:
|
||||||
|
if self.root_project.project_name == self.project.project_name:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_data_tests(self, data) -> None:
|
||||||
# Rename 'tests' -> 'data_tests' at both model-level and column-level
|
# Rename 'tests' -> 'data_tests' at both model-level and column-level
|
||||||
# Raise a validation error if the user has defined both names
|
# Raise a validation error if the user has defined both names
|
||||||
def validate_and_rename(data):
|
def validate_and_rename(data, is_root_project: bool) -> None:
|
||||||
if data.get("tests"):
|
if data.get("tests"):
|
||||||
if "tests" in data and "data_tests" in data:
|
if "tests" in data and "data_tests" in data:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"Invalid test config: cannot have both 'tests' and 'data_tests' defined"
|
"Invalid test config: cannot have both 'tests' and 'data_tests' defined"
|
||||||
)
|
)
|
||||||
|
if is_root_project:
|
||||||
deprecations.warn(
|
deprecations.warn(
|
||||||
"project-test-config",
|
"project-test-config",
|
||||||
deprecated_path="tests",
|
deprecated_path="tests",
|
||||||
@@ -556,22 +577,22 @@ class PatchParser(YamlReader, Generic[NonSourceTarget, Parsed]):
|
|||||||
data["data_tests"] = data.pop("tests")
|
data["data_tests"] = data.pop("tests")
|
||||||
|
|
||||||
# model-level tests
|
# model-level tests
|
||||||
validate_and_rename(data)
|
validate_and_rename(data, self.is_root_project)
|
||||||
|
|
||||||
# column-level tests
|
# column-level tests
|
||||||
if data.get("columns"):
|
if data.get("columns"):
|
||||||
for column in data["columns"]:
|
for column in data["columns"]:
|
||||||
validate_and_rename(column)
|
validate_and_rename(column, self.is_root_project)
|
||||||
|
|
||||||
# versioned models
|
# versioned models
|
||||||
if data.get("versions"):
|
if data.get("versions"):
|
||||||
for version in data["versions"]:
|
for version in data["versions"]:
|
||||||
validate_and_rename(version)
|
validate_and_rename(version, self.is_root_project)
|
||||||
if version.get("columns"):
|
if version.get("columns"):
|
||||||
for column in version["columns"]:
|
for column in version["columns"]:
|
||||||
validate_and_rename(column)
|
validate_and_rename(column, self.is_root_project)
|
||||||
|
|
||||||
def patch_node_config(self, node, patch):
|
def patch_node_config(self, node, patch) -> None:
|
||||||
if "access" in patch.config:
|
if "access" in patch.config:
|
||||||
if AccessType.is_valid(patch.config["access"]):
|
if AccessType.is_valid(patch.config["access"]):
|
||||||
patch.config["access"] = AccessType(patch.config["access"])
|
patch.config["access"] = AccessType(patch.config["access"])
|
||||||
@@ -601,9 +622,16 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
|||||||
# could possibly skip creating one. Leaving here for now for
|
# could possibly skip creating one. Leaving here for now for
|
||||||
# code consistency.
|
# code consistency.
|
||||||
deprecation_date: Optional[datetime.datetime] = None
|
deprecation_date: Optional[datetime.datetime] = None
|
||||||
|
time_spine: Optional[TimeSpine] = None
|
||||||
if isinstance(block.target, UnparsedModelUpdate):
|
if isinstance(block.target, UnparsedModelUpdate):
|
||||||
deprecation_date = block.target.deprecation_date
|
deprecation_date = block.target.deprecation_date
|
||||||
|
time_spine = (
|
||||||
|
TimeSpine(
|
||||||
|
standard_granularity_column=block.target.time_spine.standard_granularity_column
|
||||||
|
)
|
||||||
|
if block.target.time_spine
|
||||||
|
else None
|
||||||
|
)
|
||||||
patch = ParsedNodePatch(
|
patch = ParsedNodePatch(
|
||||||
name=block.target.name,
|
name=block.target.name,
|
||||||
original_file_path=block.target.original_file_path,
|
original_file_path=block.target.original_file_path,
|
||||||
@@ -619,6 +647,7 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
|||||||
latest_version=None,
|
latest_version=None,
|
||||||
constraints=block.target.constraints,
|
constraints=block.target.constraints,
|
||||||
deprecation_date=deprecation_date,
|
deprecation_date=deprecation_date,
|
||||||
|
time_spine=time_spine,
|
||||||
)
|
)
|
||||||
assert isinstance(self.yaml.file, SchemaSourceFile)
|
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||||
source_file: SchemaSourceFile = self.yaml.file
|
source_file: SchemaSourceFile = self.yaml.file
|
||||||
@@ -651,7 +680,10 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
|||||||
# handle disabled nodes
|
# handle disabled nodes
|
||||||
if unique_id is None:
|
if unique_id is None:
|
||||||
# Node might be disabled. Following call returns list of matching disabled nodes
|
# Node might be disabled. Following call returns list of matching disabled nodes
|
||||||
found_nodes = self.manifest.disabled_lookup.find(patch.name, patch.package_name)
|
resource_type = schema_file_keys_to_resource_types[patch.yaml_key]
|
||||||
|
found_nodes = self.manifest.disabled_lookup.find(
|
||||||
|
patch.name, patch.package_name, resource_types=[resource_type]
|
||||||
|
)
|
||||||
if found_nodes:
|
if found_nodes:
|
||||||
if len(found_nodes) > 1 and patch.config.get("enabled"):
|
if len(found_nodes) > 1 and patch.config.get("enabled"):
|
||||||
# There are multiple disabled nodes for this model and the schema file wants to enable one.
|
# There are multiple disabled nodes for this model and the schema file wants to enable one.
|
||||||
@@ -701,7 +733,7 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
|||||||
|
|
||||||
self.patch_node_properties(node, patch)
|
self.patch_node_properties(node, patch)
|
||||||
|
|
||||||
def patch_node_properties(self, node, patch: "ParsedNodePatch"):
|
def patch_node_properties(self, node, patch: "ParsedNodePatch") -> None:
|
||||||
"""Given a ParsedNodePatch, add the new information to the node."""
|
"""Given a ParsedNodePatch, add the new information to the node."""
|
||||||
# explicitly pick out the parts to update so we don't inadvertently
|
# explicitly pick out the parts to update so we don't inadvertently
|
||||||
# step on the model name or anything
|
# step on the model name or anything
|
||||||
@@ -772,7 +804,7 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
versioned_model_name, target.package_name, None
|
versioned_model_name, target.package_name, None
|
||||||
)
|
)
|
||||||
|
|
||||||
versioned_model_node = None
|
versioned_model_node: Optional[ModelNode] = None
|
||||||
add_node_nofile_fn: Callable
|
add_node_nofile_fn: Callable
|
||||||
|
|
||||||
# If this is the latest version, it's allowed to define itself in a model file name that doesn't have a suffix
|
# If this is the latest version, it's allowed to define itself in a model file name that doesn't have a suffix
|
||||||
@@ -783,7 +815,9 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
|
|
||||||
if versioned_model_unique_id is None:
|
if versioned_model_unique_id is None:
|
||||||
# Node might be disabled. Following call returns list of matching disabled nodes
|
# Node might be disabled. Following call returns list of matching disabled nodes
|
||||||
found_nodes = self.manifest.disabled_lookup.find(versioned_model_name, None)
|
found_nodes = self.manifest.disabled_lookup.find(
|
||||||
|
versioned_model_name, None, resource_types=[NodeType.Model]
|
||||||
|
)
|
||||||
if found_nodes:
|
if found_nodes:
|
||||||
if len(found_nodes) > 1 and target.config.get("enabled"):
|
if len(found_nodes) > 1 and target.config.get("enabled"):
|
||||||
# There are multiple disabled nodes for this model and the schema file wants to enable one.
|
# There are multiple disabled nodes for this model and the schema file wants to enable one.
|
||||||
@@ -796,12 +830,17 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
"in `dbt_project.yml` or in the sql files."
|
"in `dbt_project.yml` or in the sql files."
|
||||||
)
|
)
|
||||||
raise ParsingError(msg)
|
raise ParsingError(msg)
|
||||||
versioned_model_node = self.manifest.disabled.pop(
|
# We know that there's only one node in the disabled list because
|
||||||
found_nodes[0].unique_id
|
# otherwise we would have raised the error above
|
||||||
)[0]
|
found_node = found_nodes[0]
|
||||||
|
self.manifest.disabled.pop(found_node.unique_id)
|
||||||
|
assert isinstance(found_node, ModelNode)
|
||||||
|
versioned_model_node = found_node
|
||||||
add_node_nofile_fn = self.manifest.add_disabled_nofile
|
add_node_nofile_fn = self.manifest.add_disabled_nofile
|
||||||
else:
|
else:
|
||||||
versioned_model_node = self.manifest.nodes.pop(versioned_model_unique_id)
|
found_node = self.manifest.nodes.pop(versioned_model_unique_id)
|
||||||
|
assert isinstance(found_node, ModelNode)
|
||||||
|
versioned_model_node = found_node
|
||||||
add_node_nofile_fn = self.manifest.add_node_nofile
|
add_node_nofile_fn = self.manifest.add_node_nofile
|
||||||
|
|
||||||
if versioned_model_node is None:
|
if versioned_model_node is None:
|
||||||
@@ -820,12 +859,12 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
f"model.{target.package_name}.{target.name}.{unparsed_version.formatted_v}"
|
f"model.{target.package_name}.{target.name}.{unparsed_version.formatted_v}"
|
||||||
)
|
)
|
||||||
# update source file.nodes with new unique_id
|
# update source file.nodes with new unique_id
|
||||||
self.manifest.files[versioned_model_node.file_id].nodes.remove(
|
model_source_file = self.manifest.files[versioned_model_node.file_id]
|
||||||
versioned_model_node_unique_id_old
|
assert isinstance(model_source_file, SourceFile)
|
||||||
)
|
# because of incomplete test setup, check before removing
|
||||||
self.manifest.files[versioned_model_node.file_id].nodes.append(
|
if versioned_model_node_unique_id_old in model_source_file.nodes:
|
||||||
versioned_model_node.unique_id
|
model_source_file.nodes.remove(versioned_model_node_unique_id_old)
|
||||||
)
|
model_source_file.nodes.append(versioned_model_node.unique_id)
|
||||||
|
|
||||||
# update versioned node fqn
|
# update versioned node fqn
|
||||||
versioned_model_node.fqn[-1] = target.name
|
versioned_model_node.fqn[-1] = target.name
|
||||||
@@ -877,8 +916,13 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
def _target_type(self) -> Type[UnparsedModelUpdate]:
|
def _target_type(self) -> Type[UnparsedModelUpdate]:
|
||||||
return UnparsedModelUpdate
|
return UnparsedModelUpdate
|
||||||
|
|
||||||
def patch_node_properties(self, node, patch: "ParsedNodePatch"):
|
def patch_node_properties(self, node, patch: "ParsedNodePatch") -> None:
|
||||||
super().patch_node_properties(node, patch)
|
super().patch_node_properties(node, patch)
|
||||||
|
|
||||||
|
# Remaining patch properties are only relevant to ModelNode objects
|
||||||
|
if not isinstance(node, ModelNode):
|
||||||
|
return
|
||||||
|
|
||||||
node.version = patch.version
|
node.version = patch.version
|
||||||
node.latest_version = patch.latest_version
|
node.latest_version = patch.latest_version
|
||||||
node.deprecation_date = patch.deprecation_date
|
node.deprecation_date = patch.deprecation_date
|
||||||
@@ -892,9 +936,10 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
)
|
)
|
||||||
# These two will have to be reapplied after config is built for versioned models
|
# These two will have to be reapplied after config is built for versioned models
|
||||||
self.patch_constraints(node, patch.constraints)
|
self.patch_constraints(node, patch.constraints)
|
||||||
|
self.patch_time_spine(node, patch.time_spine)
|
||||||
node.build_contract_checksum()
|
node.build_contract_checksum()
|
||||||
|
|
||||||
def patch_constraints(self, node, constraints):
|
def patch_constraints(self, node: ModelNode, constraints: List[Dict[str, Any]]) -> None:
|
||||||
contract_config = node.config.get("contract")
|
contract_config = node.config.get("contract")
|
||||||
if contract_config.enforced is True:
|
if contract_config.enforced is True:
|
||||||
self._validate_constraint_prerequisites(node)
|
self._validate_constraint_prerequisites(node)
|
||||||
@@ -909,8 +954,33 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
|
|
||||||
self._validate_pk_constraints(node, constraints)
|
self._validate_pk_constraints(node, constraints)
|
||||||
node.constraints = [ModelLevelConstraint.from_dict(c) for c in constraints]
|
node.constraints = [ModelLevelConstraint.from_dict(c) for c in constraints]
|
||||||
|
self._process_constraints_refs_and_sources(node)
|
||||||
|
|
||||||
def _validate_pk_constraints(self, model_node: ModelNode, constraints: List[Dict[str, Any]]):
|
def _process_constraints_refs_and_sources(self, model_node: ModelNode) -> None:
|
||||||
|
"""
|
||||||
|
Populate model_node.refs and model_node.sources based on foreign-key constraint references,
|
||||||
|
whether defined at the model-level or column-level.
|
||||||
|
"""
|
||||||
|
for constraint in model_node.all_constraints:
|
||||||
|
if constraint.type == ConstraintType.foreign_key and constraint.to:
|
||||||
|
try:
|
||||||
|
ref_or_source = statically_parse_ref_or_source(constraint.to)
|
||||||
|
except ParsingError:
|
||||||
|
raise ParsingError(
|
||||||
|
f"Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model {model_node.name}: {constraint.to}."
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(ref_or_source, RefArgs):
|
||||||
|
model_node.refs.append(ref_or_source)
|
||||||
|
else:
|
||||||
|
model_node.sources.append(ref_or_source)
|
||||||
|
|
||||||
|
def patch_time_spine(self, node: ModelNode, time_spine: Optional[TimeSpine]) -> None:
|
||||||
|
node.time_spine = time_spine
|
||||||
|
|
||||||
|
def _validate_pk_constraints(
|
||||||
|
self, model_node: ModelNode, constraints: List[Dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
errors = []
|
errors = []
|
||||||
# check for primary key constraints defined at the column level
|
# check for primary key constraints defined at the column level
|
||||||
pk_col: List[str] = []
|
pk_col: List[str] = []
|
||||||
@@ -943,7 +1013,7 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
|||||||
+ "\n".join(errors)
|
+ "\n".join(errors)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _validate_constraint_prerequisites(self, model_node: ModelNode):
|
def _validate_constraint_prerequisites(self, model_node: ModelNode) -> None:
|
||||||
column_warn_unsupported = [
|
column_warn_unsupported = [
|
||||||
constraint.warn_unsupported
|
constraint.warn_unsupported
|
||||||
for column in model_node.columns.values()
|
for column in model_node.columns.values()
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user