forked from repo-mirrors/dbt-core
Compare commits
204 Commits
main
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5c1a16d72 | ||
|
|
07748039ad | ||
|
|
64b58ec628 | ||
|
|
1e713db2fa | ||
|
|
6b7b1ad74b | ||
|
|
3e31117ba1 | ||
|
|
451b745aea | ||
|
|
d27232a946 | ||
|
|
b1705fb6f3 | ||
|
|
0e50851fa6 | ||
|
|
b75d5e701e | ||
|
|
f8b1a6dcd1 | ||
|
|
9010537499 | ||
|
|
56d3c9318b | ||
|
|
1fcce443ba | ||
|
|
de03d6f44f | ||
|
|
5db78ca6dd | ||
|
|
ada9e63c13 | ||
|
|
69d19eb5fc | ||
|
|
55bb3c304a | ||
|
|
693564de40 | ||
|
|
04a3df7324 | ||
|
|
31d974f5eb | ||
|
|
c1f64e216f | ||
|
|
8fa6e037d0 | ||
|
|
e1c98e8123 | ||
|
|
9955ea760a | ||
|
|
fdd0546700 | ||
|
|
45f21a7cda | ||
|
|
f250b503d5 | ||
|
|
aa42ff8986 | ||
|
|
3c2fdfe735 | ||
|
|
303c63ccc8 | ||
|
|
17ec11ad30 | ||
|
|
65598f3dc6 | ||
|
|
240a6056fb | ||
|
|
7cd8935b13 | ||
|
|
cd5d4be7ab | ||
|
|
5a23894584 | ||
|
|
70ad9319d2 | ||
|
|
8873581c5a | ||
|
|
1ffd059442 | ||
|
|
091ba5fe0b | ||
|
|
6bbcce1f1c | ||
|
|
0fff5760ff | ||
|
|
f4988c62e3 | ||
|
|
2e6d4f493d | ||
|
|
3e593600e0 | ||
|
|
87584c73b0 | ||
|
|
709bd11c71 | ||
|
|
f7f53732b2 | ||
|
|
32b8097a1f | ||
|
|
36f1143c31 | ||
|
|
cf7a465338 | ||
|
|
465aa0c2fc | ||
|
|
a0284edb6b | ||
|
|
d2bfb4e215 | ||
|
|
38443640ce | ||
|
|
86e0ad49aa | ||
|
|
972eb23d03 | ||
|
|
f56c3868cf | ||
|
|
66fc546766 | ||
|
|
c71d5f6665 | ||
|
|
6e0564a98b | ||
|
|
99827ea220 | ||
|
|
0db83d0abd | ||
|
|
98711cec75 | ||
|
|
4a8f9c181c | ||
|
|
5165716e3d | ||
|
|
65d428004a | ||
|
|
14fc39a76f | ||
|
|
8b4e2a138c | ||
|
|
a11ee322ae | ||
|
|
db8ca25da9 | ||
|
|
c264a7f2b9 | ||
|
|
da6f0a1bd7 | ||
|
|
c643a1d482 | ||
|
|
0f8f42639d | ||
|
|
ec2cf9b561 | ||
|
|
c6b7655b65 | ||
|
|
3e80ad7cc7 | ||
|
|
1efad4e68e | ||
|
|
f5e0a3b1b3 | ||
|
|
a64b5be25b | ||
|
|
b31718a31f | ||
|
|
f6d83c765c | ||
|
|
5b3b22a2e9 | ||
|
|
a9b26d03ce | ||
|
|
31cb5a9b72 | ||
|
|
e5dd4c57a6 | ||
|
|
e7a1c6c315 | ||
|
|
e355be6186 | ||
|
|
12850a36ec | ||
|
|
010411fed3 | ||
|
|
f64a4883eb | ||
|
|
2883933549 | ||
|
|
fe9c78eed8 | ||
|
|
a5ec58dab9 | ||
|
|
29a79557d5 | ||
|
|
35fc3fdda2 | ||
|
|
8931262fa2 | ||
|
|
85d31db1d4 | ||
|
|
d48476a08d | ||
|
|
02f695b423 | ||
|
|
3c95db9c00 | ||
|
|
fec20ff914 | ||
|
|
de38bc9b0d | ||
|
|
f4114130c9 | ||
|
|
e920053306 | ||
|
|
511ff8e0e9 | ||
|
|
0220941849 | ||
|
|
7594d42e02 | ||
|
|
bd08d13ddc | ||
|
|
5095e8d1e8 | ||
|
|
a1958c1193 | ||
|
|
2a4da100ff | ||
|
|
9c91ab27b1 | ||
|
|
3f56cbce5f | ||
|
|
7cca8470e0 | ||
|
|
c82ceaaf39 | ||
|
|
e2e86b788c | ||
|
|
6b747fe801 | ||
|
|
9e6facc4d1 | ||
|
|
5cd966cafa | ||
|
|
47d5d99693 | ||
|
|
359b195d23 | ||
|
|
2a64b7365f | ||
|
|
c6aeb4a291 | ||
|
|
5001e4f0e1 | ||
|
|
61648b5ed2 | ||
|
|
4aa5169212 | ||
|
|
729caf0d5e | ||
|
|
f26d82217e | ||
|
|
e264675db7 | ||
|
|
300aa09fc5 | ||
|
|
493008417c | ||
|
|
906e07c1f2 | ||
|
|
6a954e2d24 | ||
|
|
3b724acc54 | ||
|
|
b0ca1256ae | ||
|
|
9d7820c356 | ||
|
|
1fc193167d | ||
|
|
d9f96a95c1 | ||
|
|
138a2acf84 | ||
|
|
88ada4aa31 | ||
|
|
77d8e3262a | ||
|
|
94b6ae13b3 | ||
|
|
f7c4c3c9cc | ||
|
|
71a93b0cd3 | ||
|
|
7bdf27af31 | ||
|
|
e60b41d9fa | ||
|
|
2ba765d360 | ||
|
|
93e27548ce | ||
|
|
aa89740311 | ||
|
|
aa306693a5 | ||
|
|
7041e5822f | ||
|
|
a08255e4cb | ||
|
|
2cde93bf63 | ||
|
|
f29836fcf3 | ||
|
|
7f32e42230 | ||
|
|
55e0df181f | ||
|
|
588cbabe94 | ||
|
|
5f873da929 | ||
|
|
fdabe9534c | ||
|
|
c0423707b0 | ||
|
|
48d9afa677 | ||
|
|
d71f309c1e | ||
|
|
cb323ef78c | ||
|
|
22bc1c374e | ||
|
|
31881d2a3b | ||
|
|
1dcdcd2f52 | ||
|
|
3de3b827bf | ||
|
|
8a8857a85c | ||
|
|
e4d5a4e777 | ||
|
|
b414ef2cc5 | ||
|
|
57e279cc1b | ||
|
|
2eb1a5c3ea | ||
|
|
dcc9a0ca29 | ||
|
|
892c545985 | ||
|
|
a8702b8374 | ||
|
|
1592987de8 | ||
|
|
710600546a | ||
|
|
0bf38ce294 | ||
|
|
459d156e85 | ||
|
|
95c090bed0 | ||
|
|
f2222d2621 | ||
|
|
97ffc37405 | ||
|
|
bf18b59845 | ||
|
|
88e953e8aa | ||
|
|
6076cf7114 | ||
|
|
a1757934ef | ||
|
|
6c61cb7f7a | ||
|
|
4b1f1c4029 | ||
|
|
7df04b0fe4 | ||
|
|
662101590d | ||
|
|
fc6167a2ee | ||
|
|
983cbb4f28 | ||
|
|
c9582c2323 | ||
|
|
03fdb4c157 | ||
|
|
afe25a99fe | ||
|
|
e32b8a90ac | ||
|
|
1472b86ee2 | ||
|
|
ff6745c795 | ||
|
|
fdfe03d561 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.10.0a1
|
||||
current_version = 1.11.0a1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
|
||||
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
||||
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
|
||||
6
.changes/unreleased/Dependencies-20250616-144408.yaml
Normal file
6
.changes/unreleased/Dependencies-20250616-144408.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Bump minimum jsonschema version to `4.19.1`
|
||||
time: 2025-06-16T14:44:08.512306-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11740"
|
||||
6
.changes/unreleased/Dependencies-20250620-123600.yaml
Normal file
6
.changes/unreleased/Dependencies-20250620-123600.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Allow for either pydantic v1 and v2
|
||||
time: 2025-06-20T12:36:00.196384-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11634"
|
||||
6
.changes/unreleased/Dependencies-20250702-144720.yaml
Normal file
6
.changes/unreleased/Dependencies-20250702-144720.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Bump dbt-common minimum to 1.25.1
|
||||
time: 2025-07-02T14:47:20.772002-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11789"
|
||||
6
.changes/unreleased/Dependencies-20250709-132213.yaml
Normal file
6
.changes/unreleased/Dependencies-20250709-132213.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Upgrade to dbt-semantic-interfaces==0.9.0 for more robust saved query support.
|
||||
time: 2025-07-09T13:22:13.688162-07:00
|
||||
custom:
|
||||
Author: courtneyholcomb
|
||||
Issue: "11809"
|
||||
6
.changes/unreleased/Dependencies-20250811-011224.yaml
Normal file
6
.changes/unreleased/Dependencies-20250811-011224.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump actions/download-artifact from 4 to 5"
|
||||
time: 2025-08-11T01:12:24.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 11910
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Add new hard_deletes="new_record" mode for snapshots.
|
||||
time: 2024-11-04T12:00:53.95191-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "10235"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Add `batch` context object to model jinja context
|
||||
time: 2024-11-21T12:56:30.715473-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11025"
|
||||
6
.changes/unreleased/Features-20250529-085311.yaml
Normal file
6
.changes/unreleased/Features-20250529-085311.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add file_format to catalog integration config
|
||||
time: 2025-05-29T08:53:11.64904-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "11695"
|
||||
6
.changes/unreleased/Features-20250611-160217.yaml
Normal file
6
.changes/unreleased/Features-20250611-160217.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: "11561"
|
||||
time: 2025-06-11T16:02:17.334525-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: deprecate --models,--model, and -m flags
|
||||
7
.changes/unreleased/Features-20250617-142516.yaml
Normal file
7
.changes/unreleased/Features-20250617-142516.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Update jsonschemas with builtin data test properties and exposure configs in
|
||||
dbt_project.yml for more accurate deprecations
|
||||
time: 2025-06-17T14:25:16.976867-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11335"
|
||||
6
.changes/unreleased/Features-20250623-113130.yaml
Normal file
6
.changes/unreleased/Features-20250623-113130.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support loaded_at_query and loaded_at_field on source and table configs
|
||||
time: 2025-06-23T11:31:30.897805-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11659"
|
||||
6
.changes/unreleased/Features-20250625-151818.yaml
Normal file
6
.changes/unreleased/Features-20250625-151818.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Begin validating configs from model sql files
|
||||
time: 2025-06-25T15:18:18.164-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11727"
|
||||
6
.changes/unreleased/Features-20250701-164957.yaml
Normal file
6
.changes/unreleased/Features-20250701-164957.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Deprecate `overrides` property for sources
|
||||
time: 2025-07-01T16:49:57.979871-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "11566"
|
||||
6
.changes/unreleased/Features-20250703-175341.yaml
Normal file
6
.changes/unreleased/Features-20250703-175341.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Create constrained namespace for dbt engine env vars
|
||||
time: 2025-07-03T17:53:41.406701-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11340"
|
||||
6
.changes/unreleased/Features-20250714-232524.yaml
Normal file
6
.changes/unreleased/Features-20250714-232524.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Gate jsonschema validations by adapter
|
||||
time: 2025-07-14T23:25:24.475471-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11680"
|
||||
6
.changes/unreleased/Features-20250721-173100.yaml
Normal file
6
.changes/unreleased/Features-20250721-173100.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Deprecate top-level argument properties in generic tests
|
||||
time: 2025-07-21T17:31:00.960402-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11847"
|
||||
6
.changes/unreleased/Features-20250728-115443.yaml
Normal file
6
.changes/unreleased/Features-20250728-115443.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Deprecate {{ modules.itertools }} usage
|
||||
time: 2025-07-28T11:54:43.28275-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11725"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: dbt retry does not respect --threads
|
||||
time: 2024-08-22T12:21:32.358066+05:30
|
||||
custom:
|
||||
Author: donjin-master
|
||||
Issue: "10584"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Catch DbtRuntimeError for hooks
|
||||
time: 2024-11-21T18:17:39.753235Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "11012"
|
||||
6
.changes/unreleased/Fixes-20250528-092055.yaml
Normal file
6
.changes/unreleased/Fixes-20250528-092055.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Don't warn for metricflow_time_spine with non-day grain
|
||||
time: 2025-05-28T09:20:55.866514-07:00
|
||||
custom:
|
||||
Author: courtneyholcomb
|
||||
Issue: "11690"
|
||||
6
.changes/unreleased/Fixes-20250530-005804.yaml
Normal file
6
.changes/unreleased/Fixes-20250530-005804.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix source freshness set via config to handle explicit nulls
|
||||
time: 2025-05-30T00:58:04.94133-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11685"
|
||||
7
.changes/unreleased/Fixes-20250605-110645.yaml
Normal file
7
.changes/unreleased/Fixes-20250605-110645.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure build_after is present in model freshness in parsing, otherwise skip
|
||||
freshness definition
|
||||
time: 2025-06-05T11:06:45.329942-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11709"
|
||||
6
.changes/unreleased/Fixes-20250609-175239.yaml
Normal file
6
.changes/unreleased/Fixes-20250609-175239.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure source node `.freshness` is equal to node's `.config.freshness`
|
||||
time: 2025-06-09T17:52:39.978403-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11717"
|
||||
6
.changes/unreleased/Fixes-20250610-211241.yaml
Normal file
6
.changes/unreleased/Fixes-20250610-211241.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: ignore invalid model freshness configs in inline model configs
|
||||
time: 2025-06-10T21:12:41.972614-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "11728"
|
||||
6
.changes/unreleased/Fixes-20250612-145159.yaml
Normal file
6
.changes/unreleased/Fixes-20250612-145159.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix store_failures hierarachical config parsing
|
||||
time: 2025-06-12T14:51:59.358498-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "10165"
|
||||
6
.changes/unreleased/Fixes-20250616-085600.yaml
Normal file
6
.changes/unreleased/Fixes-20250616-085600.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: "Remove model freshness property support in favor of config level support"
|
||||
time: 2025-06-16T08:56:00.641553-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11713"
|
||||
6
.changes/unreleased/Fixes-20250624-091258.yaml
Normal file
6
.changes/unreleased/Fixes-20250624-091258.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Bump dbt-common to 1.25.0 to access WarnErrorOptionsV2
|
||||
time: 2025-06-24T09:12:58.904713-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11755"
|
||||
6
.changes/unreleased/Fixes-20250624-152047.yaml
Normal file
6
.changes/unreleased/Fixes-20250624-152047.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: ensure consistent casing in column names while processing user unit tests
|
||||
time: 2025-06-24T15:20:47.589212-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "11770"
|
||||
7
.changes/unreleased/Fixes-20250707-103418.yaml
Normal file
7
.changes/unreleased/Fixes-20250707-103418.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Update jsonschema definitions with nested config defs, cloud info, and dropping
|
||||
source overrides
|
||||
time: 2025-07-07T10:34:18.982725-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: N/A
|
||||
6
.changes/unreleased/Fixes-20250710-170148.yaml
Normal file
6
.changes/unreleased/Fixes-20250710-170148.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make `GenericJSONSchemaValidationDeprecation` a "preview" deprecation
|
||||
time: 2025-07-10T17:01:48.903582-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11814"
|
||||
6
.changes/unreleased/Fixes-20250711-095439.yaml
Normal file
6
.changes/unreleased/Fixes-20250711-095439.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Correct JSONSchema Semantic Layer node issues
|
||||
time: 2025-07-11T09:54:39.86192-05:00
|
||||
custom:
|
||||
Author: MichelleArk
|
||||
Issue: "11818"
|
||||
6
.changes/unreleased/Fixes-20250714-125000.yaml
Normal file
6
.changes/unreleased/Fixes-20250714-125000.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve SL JSONSchema definitions
|
||||
time: 2025-07-14T12:50:00.3541-05:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: N/A
|
||||
6
.changes/unreleased/Fixes-20250714-184131.yaml
Normal file
6
.changes/unreleased/Fixes-20250714-184131.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: raise MissingPlusPrefixDeprecation instead of GenericJSONSchemaValidationDeprecation when config missing plus prefix in dbt_project.yml
|
||||
time: 2025-07-14T18:41:31.322137-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11826"
|
||||
6
.changes/unreleased/Fixes-20250716-164535.yaml
Normal file
6
.changes/unreleased/Fixes-20250716-164535.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Propagate config.meta and config.tags to top-level on source nodes
|
||||
time: 2025-07-16T16:45:35.683199-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11839"
|
||||
6
.changes/unreleased/Fixes-20250724-104410.yaml
Normal file
6
.changes/unreleased/Fixes-20250724-104410.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Safe handling of malformed config.tags on sources/tables
|
||||
time: 2025-07-24T10:44:10.828775-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11855"
|
||||
6
.changes/unreleased/Fixes-20250731-162142.yaml
Normal file
6
.changes/unreleased/Fixes-20250731-162142.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Raise PropertyMovedToConfigDeprecation instead of CustomTopLevelKeyDeprecation when additional attribute is a valid node config
|
||||
time: 2025-07-31T16:21:42.938703-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11879"
|
||||
6
.changes/unreleased/Fixes-20250804-074006.yaml
Normal file
6
.changes/unreleased/Fixes-20250804-074006.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Avoid redundant node patch removal during partial parsing
|
||||
time: 2025-08-04T07:40:06.993913-06:00
|
||||
custom:
|
||||
Author: wircho
|
||||
Issue: "11886"
|
||||
6
.changes/unreleased/Fixes-20250804-074254.yaml
Normal file
6
.changes/unreleased/Fixes-20250804-074254.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Comply with strict `str` type when `block.contents` is `None`
|
||||
time: 2025-08-04T07:42:54.612616-06:00
|
||||
custom:
|
||||
Author: wircho
|
||||
Issue: "11492"
|
||||
6
.changes/unreleased/Fixes-20250805-104309.yaml
Normal file
6
.changes/unreleased/Fixes-20250805-104309.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remove duplicative PropertyMovedToConfigDeprecation for source freshness
|
||||
time: 2025-08-05T10:43:09.502585-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11880"
|
||||
6
.changes/unreleased/Under the Hood-20250527-162136.yaml
Normal file
6
.changes/unreleased/Under the Hood-20250527-162136.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Prevent overcounting PropertyMovedToConfigDeprecation for source freshness
|
||||
time: 2025-05-27T16:21:36.551426+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "11660"
|
||||
6
.changes/unreleased/Under the Hood-20250804-211346.yaml
Normal file
6
.changes/unreleased/Under the Hood-20250804-211346.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: call adapter.add_catalog_integration during parse_manifest
|
||||
time: 2025-08-04T21:13:46.077488-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "11889"
|
||||
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -61,7 +61,7 @@ body:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **OS**: Ubuntu 24.04
|
||||
- **Python**: 3.9.12 (`python3 --version`)
|
||||
- **dbt-core**: 1.1.1 (`dbt --version`)
|
||||
value: |
|
||||
|
||||
15
.github/ISSUE_TEMPLATE/config.yml
vendored
15
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -12,15 +12,6 @@ contact_links:
|
||||
- name: Participate in Discussions
|
||||
url: https://github.com/dbt-labs/dbt-core/discussions
|
||||
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
|
||||
- name: Create an issue for dbt-redshift
|
||||
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-redshift
|
||||
- name: Create an issue for dbt-bigquery
|
||||
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-bigquery
|
||||
- name: Create an issue for dbt-snowflake
|
||||
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-snowflake
|
||||
- name: Create an issue for dbt-spark
|
||||
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-spark
|
||||
- name: Create an issue for adapters
|
||||
url: https://github.com/dbt-labs/dbt-adapters/issues/new/choose
|
||||
about: Report a bug or request a feature for an adapter
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
2
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
@@ -55,7 +55,7 @@ body:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **OS**: Ubuntu 24.04
|
||||
- **Python**: 3.9.12 (`python3 --version`)
|
||||
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
|
||||
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
|
||||
|
||||
8
.github/_README.md
vendored
8
.github/_README.md
vendored
@@ -120,7 +120,7 @@ Some triggers of note that we use:
|
||||
```yaml
|
||||
jobs:
|
||||
dependency_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
|
||||
steps:
|
||||
- name: Get File Name Timestamp
|
||||
@@ -188,6 +188,12 @@ ___
|
||||
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
|
||||
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
|
||||
|
||||
### Runners
|
||||
- We dynamically set runners based on repository vars. Admins can view repository vars and reset them. Current values are the following but are subject to change:
|
||||
- `vars.UBUNTU_LATEST` -> `ubuntu-latest`
|
||||
- `vars.WINDOWS_LATEST` -> `windows-latest`
|
||||
- `vars.MACOS_LATEST` -> `macos-14`
|
||||
|
||||
### Actions from the Marketplace
|
||||
- Don’t use external actions for things that can easily be accomplished manually.
|
||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||
|
||||
2
.github/actions/latest-wrangler/README.md
vendored
2
.github/actions/latest-wrangler/README.md
vendored
@@ -33,7 +33,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Wrangle latest tag
|
||||
|
||||
@@ -3,24 +3,24 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to publish
|
||||
required: true
|
||||
description: The package to publish
|
||||
required: true
|
||||
version_number:
|
||||
description: The version number
|
||||
required: true
|
||||
description: The version number
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.new_version }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Print the results
|
||||
run: |
|
||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||
- uses: actions/checkout@v3
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.new_version }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Print the results
|
||||
run: |
|
||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||
|
||||
19
.github/actions/setup-postgres-linux/action.yml
vendored
19
.github/actions/setup-postgres-linux/action.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: "Set up postgres (linux)"
|
||||
description: "Set up postgres service on linux vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
sudo apt-get --purge remove postgresql postgresql-*
|
||||
sudo apt update -y
|
||||
sudo apt install gnupg2 wget vim -y
|
||||
sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg
|
||||
sudo apt update -y
|
||||
sudo apt install postgresql-16
|
||||
sudo apt-get -y install postgresql postgresql-contrib
|
||||
sudo systemctl start postgresql
|
||||
sudo systemctl enable postgresql
|
||||
pg_isready
|
||||
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
||||
@@ -1 +0,0 @@
|
||||
../../../test/setup_db.sh
|
||||
26
.github/actions/setup-postgres-macos/action.yml
vendored
26
.github/actions/setup-postgres-macos/action.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: "Set up postgres (macos)"
|
||||
description: "Set up postgres service on macos vm for dbt integration tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
brew install postgresql@16
|
||||
brew link postgresql@16 --force
|
||||
brew services start postgresql@16
|
||||
echo "Check PostgreSQL service is running"
|
||||
i=10
|
||||
COMMAND='pg_isready'
|
||||
while [ $i -gt -1 ]; do
|
||||
if [ $i == 0 ]; then
|
||||
echo "PostgreSQL service not ready, all attempts exhausted"
|
||||
exit 1
|
||||
fi
|
||||
echo "Check PostgreSQL service status"
|
||||
eval $COMMAND && break
|
||||
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
|
||||
sleep 10
|
||||
((i--))
|
||||
done
|
||||
createuser -s postgres
|
||||
bash ${{ github.action_path }}/setup_db.sh
|
||||
@@ -1 +0,0 @@
|
||||
../../../test/setup_db.sh
|
||||
186
.github/workflows/artifact-reviews.yml
vendored
Normal file
186
.github/workflows/artifact-reviews.yml
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
# **what?**
|
||||
# Enforces 2 reviews when artifact or validation files are modified.
|
||||
|
||||
# **why?**
|
||||
# Ensure artifact changes receive proper review from designated team members. GitHub doesn't support
|
||||
# multiple reviews on a single PR based on files changed, so we need to enforce this manually.
|
||||
|
||||
# **when?**
|
||||
# This will run when reviews are submitted and dismissed.
|
||||
|
||||
name: "Enforce Additional Reviews on Artifact and Validations Changes"
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
on:
|
||||
# trigger check on review events. use pull_request_target for forks.
|
||||
pull_request_target:
|
||||
types: [opened, reopened, ready_for_review, synchronize, review_requested]
|
||||
pull_request_review:
|
||||
types: [submitted, edited, dismissed]
|
||||
|
||||
# only run this once per PR at a time
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
required_approvals: 2
|
||||
team: "core-group"
|
||||
|
||||
jobs:
|
||||
check-reviews:
|
||||
name: "Validate Additional Reviews"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Get list of changed files"
|
||||
id: changed_files
|
||||
run: |
|
||||
# Fetch files as JSON and process with jq to sanitize output
|
||||
gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files \
|
||||
| jq -r '.[].filename' \
|
||||
| while IFS= read -r file; do
|
||||
# Sanitize the filename by removing any special characters and command injection attempts
|
||||
clean_file=$(echo "$file" | sed 's/[^a-zA-Z0-9\.\/\-_]//g')
|
||||
echo "$clean_file"
|
||||
done > changed_files.txt
|
||||
echo "CHANGED_FILES<<EOF" >> $GITHUB_OUTPUT
|
||||
cat changed_files.txt >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Check if any artifact files changed"
|
||||
id: artifact_files_changed
|
||||
run: |
|
||||
artifact_changes=false
|
||||
while IFS= read -r file; do
|
||||
# Only process if file path looks legitimate
|
||||
if [[ "$file" =~ ^[a-zA-Z0-9\.\/\-_]+$ ]]; then
|
||||
if [[ "$file" == "core/dbt/artifacts/"* ]] ; then
|
||||
artifact_changes=true
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done < changed_files.txt
|
||||
echo "artifact_changes=$artifact_changes" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Core Team Members"
|
||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
||||
id: core_members
|
||||
run: |
|
||||
gh api -H "Accept: application/vnd.github+json" \
|
||||
/orgs/dbt-labs/teams/${{ env.team }}/members > core_members.json
|
||||
|
||||
# Extract usernames and set as multiline output
|
||||
echo "membership<<EOF" >> $GITHUB_OUTPUT
|
||||
jq -r '.[].login' core_members.json >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
|
||||
|
||||
- name: "Verify ${{ env.required_approvals }} core team approvals"
|
||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
||||
id: check_approvals
|
||||
run: |
|
||||
|
||||
# Get all reviews
|
||||
REVIEWS=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews)
|
||||
echo "All reviews:"
|
||||
echo "$REVIEWS"
|
||||
# Count approved reviews from core team members (only most recent review per user)
|
||||
CORE_APPROVALS=0
|
||||
while IFS= read -r member; do
|
||||
echo "Checking member: $member"
|
||||
APPROVED=$(echo "$REVIEWS" | jq --arg user "$member" '
|
||||
group_by(.user.login) |
|
||||
map(select(.[0].user.login == $user) |
|
||||
sort_by(.submitted_at) |
|
||||
last) |
|
||||
map(select(.state == "APPROVED" and (.state != "DISMISSED"))) |
|
||||
length')
|
||||
echo "Latest review state for $member: $APPROVED"
|
||||
CORE_APPROVALS=$((CORE_APPROVALS + APPROVED))
|
||||
echo "Running total: $CORE_APPROVALS"
|
||||
done <<< "${{ steps.core_members.outputs.membership }}"
|
||||
|
||||
echo "CORE_APPROVALS=$CORE_APPROVALS" >> $GITHUB_OUTPUT
|
||||
echo "CORE_APPROVALS=$CORE_APPROVALS"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Find Comment"
|
||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: "### Additional Artifact Review Required"
|
||||
|
||||
- name: "Create Comment"
|
||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.find-comment.outputs.comment-id == '' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
### Additional Artifact Review Required
|
||||
|
||||
Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members.
|
||||
|
||||
- name: "Notify if not enough approvals"
|
||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
||||
run: |
|
||||
if [[ "${{ steps.check_approvals.outputs.CORE_APPROVALS }}" -ge "${{ env.required_approvals }}" ]]; then
|
||||
title="Extra requirements met"
|
||||
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
|
||||
echo "::notice title=$title::$message"
|
||||
echo "REVIEW_STATUS=success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
title="PR Approval Requirements Not Met"
|
||||
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
|
||||
echo "::notice title=$title::$message"
|
||||
echo "REVIEW_STATUS=neutral" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
id: review_check
|
||||
|
||||
- name: "Set check status"
|
||||
id: status_check
|
||||
run: |
|
||||
if [[ "${{ steps.artifact_files_changed.outputs.artifact_changes }}" == 'false' ]]; then
|
||||
# no extra review required
|
||||
echo "current_status=success" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ steps.review_check.outputs.REVIEW_STATUS }}" == "success" ]]; then
|
||||
# we have all the required reviews
|
||||
echo "current_status=success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# neutral exit - neither success nor failure
|
||||
# we can't fail here because we use multiple triggers for this workflow and they won't reset the check
|
||||
# workaround is to use a neutral exit to skip the check run until it's actually successful
|
||||
echo "current_status=neutral" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Post Event"
|
||||
# This step posts the status of the check because the workflow is triggered by multiple events
|
||||
# and we need to ensure the check is always updated. Otherwise we would end up with duplicate
|
||||
# checks in the GitHub UI.
|
||||
run: |
|
||||
if [[ "${{ steps.status_check.outputs.current_status }}" == "success" ]]; then
|
||||
state="success"
|
||||
else
|
||||
state="failure"
|
||||
fi
|
||||
|
||||
gh api \
|
||||
--method POST \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.base.sha }} \
|
||||
-f state="$state" \
|
||||
-f description="Artifact Review Check" \
|
||||
-f context="Artifact Review Check" \
|
||||
-f target_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
50
.github/workflows/auto-respond-bug-reports.yml
vendored
Normal file
50
.github/workflows/auto-respond-bug-reports.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
# **what?**
|
||||
# Check if the an issue is opened near or during an extended holiday period.
|
||||
# If so, post an automatically-generated comment about the holiday for bug reports.
|
||||
# Also provide specific information to customers of dbt Cloud.
|
||||
|
||||
# **why?**
|
||||
# Explain why responses will be delayed during our holiday period.
|
||||
|
||||
# **when?**
|
||||
# This will run when new issues are opened.
|
||||
|
||||
name: Auto-Respond to Bug Reports During Holiday Period
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
auto-response:
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
steps:
|
||||
- name: Check if current date is within holiday period
|
||||
id: date-check
|
||||
run: |
|
||||
current_date=$(date -u +"%Y-%m-%d")
|
||||
start_date="2024-12-23"
|
||||
end_date="2025-01-05"
|
||||
|
||||
if [[ "$current_date" < "$start_date" || "$current_date" > "$end_date" ]]; then
|
||||
echo "outside_holiday=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "outside_holiday=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Post comment
|
||||
if: ${{ env.outside_holiday == 'false' && contains(github.event.issue.labels.*.name, 'bug') }}
|
||||
run: |
|
||||
gh issue comment ${{ github.event.issue.number }} --repo ${{ github.repository }} --body "Thank you for your bug report! Our team is will be out of the office for [Christmas and our Global Week of Rest](https://handbook.getdbt.com/docs/time_off#2024-us-holidays), from December 25, 2024, through January 3, 2025.
|
||||
|
||||
We will review your issue as soon as possible after returning.
|
||||
Thank you for your understanding, and happy holidays! 🎄🎉
|
||||
|
||||
If you are a customer of dbt Cloud, please contact our Customer Support team via the dbt Cloud web interface or email **support@dbtlabs.com**."
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -28,7 +28,7 @@ permissions:
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
|
||||
2
.github/workflows/bot-changelog.yml
vendored
2
.github/workflows/bot-changelog.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
include:
|
||||
- label: "dependencies"
|
||||
changie_kind: "Dependencies"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
|
||||
steps:
|
||||
|
||||
|
||||
8
.github/workflows/check-artifact-changes.yml
vendored
8
.github/workflows/check-artifact-changes.yml
vendored
@@ -4,12 +4,16 @@ on:
|
||||
pull_request:
|
||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||
|
||||
merge_group:
|
||||
types: [checks_requested]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check-artifact-changes:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
|
||||
124
.github/workflows/main.yml
vendored
124
.github/workflows/main.yml
vendored
@@ -20,6 +20,8 @@ on:
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
merge_group:
|
||||
types: [checks_requested]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: read-all
|
||||
@@ -75,7 +77,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
@@ -112,7 +114,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
@@ -152,7 +154,7 @@ jobs:
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
integration-postgres:
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
@@ -162,9 +164,104 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||
os: [ubuntu-20.04]
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
|
||||
os: ["ubuntu-latest"]
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
# Docker Hub image
|
||||
image: postgres
|
||||
# Provide the password for postgres
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
POSTGRES_USER: postgres
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Run postgres setup script
|
||||
run: |
|
||||
./test/setup_db.sh
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGPASSWORD: password
|
||||
|
||||
- name: Install python tools
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run integration tests
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-mac-windows:
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
# this include is where we add the mac and windows os
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
@@ -187,13 +284,14 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Set up postgres (linux)
|
||||
if: runner.os == 'Linux'
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: Set up postgres (macos)
|
||||
if: runner.os == 'macOS'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: ./test/setup_db.sh
|
||||
|
||||
- name: Set up postgres (windows)
|
||||
if: runner.os == 'Windows'
|
||||
@@ -230,7 +328,7 @@ jobs:
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
@@ -239,10 +337,10 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
needs: [integration-mac-windows, integration-postgres]
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
if: ${{ contains(needs.integration-mac-windows.result, 'failure') || contains(needs.integration-mac-windows.result, 'cancelled') || contains(needs.integration-postgres.result, 'failure') || contains(needs.integration-postgres.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
|
||||
12
.github/workflows/model_performance.yml
vendored
12
.github/workflows/model_performance.yml
vendored
@@ -38,7 +38,7 @@ permissions:
|
||||
jobs:
|
||||
set-variables:
|
||||
name: Setting Variables
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
outputs:
|
||||
cache_key: ${{ steps.variables.outputs.cache_key }}
|
||||
release_id: ${{ steps.semver.outputs.base-version }}
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
|
||||
latest-runner:
|
||||
name: Build or Fetch Runner
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
needs: [set-variables]
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -137,7 +137,7 @@ jobs:
|
||||
# a successfully tested and built binary from being cached.
|
||||
needs: [set-variables, latest-runner]
|
||||
name: Model a release
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
steps:
|
||||
|
||||
- name: '[DEBUG] print variables'
|
||||
@@ -206,7 +206,7 @@ jobs:
|
||||
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
|
||||
# i.e. release branch and main
|
||||
needs: [set-variables, latest-runner, model]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -235,7 +235,7 @@ jobs:
|
||||
git push origin ${{ matrix.target-branch }}
|
||||
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
|
||||
@@ -253,7 +253,7 @@ jobs:
|
||||
push: 'origin origin/${{ matrix.target-branch }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{ matrix.base-branch }}
|
||||
|
||||
4
.github/workflows/nightly-release.yml
vendored
4
.github/workflows/nightly-release.yml
vendored
@@ -31,7 +31,7 @@ env:
|
||||
|
||||
jobs:
|
||||
aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
|
||||
outputs:
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
log-outputs-aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
steps:
|
||||
|
||||
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@@ -75,7 +75,7 @@ defaults:
|
||||
jobs:
|
||||
job-setup:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
outputs:
|
||||
starting_sha: ${{ steps.set_sha.outputs.starting_sha }}
|
||||
steps:
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
|
||||
steps:
|
||||
- name: Print variables
|
||||
@@ -188,7 +188,7 @@ jobs:
|
||||
# determine if we need to release dbt-core or both dbt-core and dbt-postgres
|
||||
name: Determine Docker Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
needs: [pypi-release]
|
||||
outputs:
|
||||
matrix: ${{ steps.determine-docker-package.outputs.matrix }}
|
||||
|
||||
17
.github/workflows/schema-check.yml
vendored
17
.github/workflows/schema-check.yml
vendored
@@ -9,15 +9,21 @@
|
||||
# occur so we want to proactively alert to it.
|
||||
#
|
||||
# **when?**
|
||||
# On pushes to `develop` and release branches. Manual runs are also enabled.
|
||||
# Only can be run manually
|
||||
name: Artifact Schema Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||
# pull_request:
|
||||
# types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
||||
# paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
target_branch:
|
||||
description: "The branch to check against"
|
||||
type: string
|
||||
default: 'main'
|
||||
required: true
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
@@ -31,7 +37,7 @@ env:
|
||||
jobs:
|
||||
checking-schemas:
|
||||
name: "Post-merge schema changes required"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
@@ -43,6 +49,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
ref: ${{ inputs.target_branch }}
|
||||
|
||||
- name: Check for changes in core/dbt/artifacts
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
|
||||
@@ -14,6 +14,8 @@ on:
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
merge_group:
|
||||
types: [checks_requested]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: read-all
|
||||
@@ -45,7 +47,7 @@ jobs:
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
@@ -67,6 +69,24 @@ jobs:
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
# Docker Hub image
|
||||
image: postgres
|
||||
# Provide the password for postgres
|
||||
env:
|
||||
POSTGRES_PASSWORD: password
|
||||
POSTGRES_USER: postgres
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v4
|
||||
@@ -85,8 +105,13 @@ jobs:
|
||||
pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Set up postgres
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
- name: Run postgres setup script
|
||||
run: |
|
||||
./test/setup_db.sh
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGPASSWORD: password
|
||||
|
||||
- name: ls
|
||||
run: ls
|
||||
|
||||
14
.github/workflows/test-repeater.yml
vendored
14
.github/workflows/test-repeater.yml
vendored
@@ -51,7 +51,7 @@ defaults:
|
||||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
steps:
|
||||
- name: "[DEBUG] Output Inputs"
|
||||
run: |
|
||||
@@ -95,13 +95,17 @@ jobs:
|
||||
run: make dev
|
||||
|
||||
- name: "Set up postgres (linux)"
|
||||
if: inputs.os == 'ubuntu-latest'
|
||||
if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
|
||||
run: make setup-db
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: "Set up postgres (macos)"
|
||||
if: inputs.os == 'macos-14'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
- name: Set up postgres (macos)
|
||||
if: runner.os == 'macOS'
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: ./test/setup_db.sh
|
||||
|
||||
- name: "Set up postgres (windows)"
|
||||
if: inputs.os == 'windows-latest'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.9
|
||||
default_language_version:
|
||||
|
||||
@@ -6,7 +6,6 @@ Most of the python code in the repository is within the `core/dbt` directory.
|
||||
- [`single python files`](core/dbt/README.md): A number of individual files, such as 'compilation.py' and 'exceptions.py'
|
||||
|
||||
The main subdirectories of core/dbt:
|
||||
- [`adapters`](core/dbt/adapters/README.md): Define base classes for behavior that is likely to differ across databases
|
||||
- [`clients`](core/dbt/clients/README.md): Interface with dependencies (agate, jinja) or across operating systems
|
||||
- [`config`](core/dbt/config/README.md): Reconcile user-supplied configuration from connection profiles, project files, and Jinja macros
|
||||
- [`context`](core/dbt/context/README.md): Build and expose dbt-specific Jinja functionality
|
||||
@@ -14,7 +13,7 @@ The main subdirectories of core/dbt:
|
||||
- [`deps`](core/dbt/deps/README.md): Package installation and dependency resolution
|
||||
- [`events`](core/dbt/events/README.md): Logging events
|
||||
- [`graph`](core/dbt/graph/README.md): Produce a `networkx` DAG of project resources, and selecting those resources given user-supplied criteria
|
||||
- [`include`](core/dbt/include/README.md): The dbt "global project," which defines default implementations of Jinja2 macros
|
||||
- [`include`](core/dbt/include/README.md): Set up the starter project scaffold.
|
||||
- [`parser`](core/dbt/parser/README.md): Read project files, validate, construct python objects
|
||||
- [`task`](core/dbt/task/README.md): Set forth the actions that dbt can perform when invoked
|
||||
|
||||
@@ -32,7 +31,7 @@ This is the docs website code. It comes from the dbt-docs repository, and is gen
|
||||
## Adapters
|
||||
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc.
|
||||
Note: dbt-postgres used to exist in dbt-core but is now in [its own repo](https://github.com/dbt-labs/dbt-postgres)
|
||||
Note: dbt-postgres used to exist in dbt-core but is now in [the dbt-adapters repo](https://github.com/dbt-labs/dbt-adapters/tree/main/dbt-postgres)
|
||||
|
||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
|
||||
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
||||
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
|
||||
@@ -224,7 +224,9 @@ Code can be merged into the current development branch `main` by opening a pull
|
||||
|
||||
Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes.
|
||||
|
||||
Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
We require signed git commits. See docs [here](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) for setting up code signing.
|
||||
|
||||
Once all tests are passing, all comments are resolved, and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
|
||||
|
||||
## Troubleshooting Tips
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V
|
||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||
|
||||
RUN pip3 install -U tox wheel six setuptools
|
||||
RUN pip3 install -U tox wheel six setuptools pre-commit
|
||||
|
||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||
# On Linux, run `make .env` to create the .env file for the current user.
|
||||
|
||||
6
Makefile
6
Makefile
@@ -35,7 +35,7 @@ dev_req: ## Installs dbt-* packages in develop mode along with only development
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@\
|
||||
pre-commit install
|
||||
$(DOCKER_CMD) pre-commit install
|
||||
|
||||
.PHONY: dev-uninstall
|
||||
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||
@@ -43,10 +43,6 @@ dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||
pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
||||
pip uninstall -y dbt-core
|
||||
|
||||
.PHONY: core_proto_types
|
||||
core_proto_types: ## generates google protobuf python file from core_types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@\
|
||||
|
||||
@@ -8,10 +8,12 @@ coverage:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops
|
||||
informational: true
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
informational: true
|
||||
|
||||
comment:
|
||||
layout: "header, diff, flags, components" # show component info in the PR comment
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore *.json
|
||||
include dbt/py.typed
|
||||
recursive-include dbt/task/docs *.html
|
||||
|
||||
26
core/dbt/_pydantic_shim.py
Normal file
26
core/dbt/_pydantic_shim.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# type: ignore
|
||||
|
||||
"""Shim to allow support for both Pydantic 1 and Pydantic 2.
|
||||
|
||||
dbt-core must support both major versions of Pydantic because dbt-core users might be using an environment with
|
||||
either version, and we can't restrict them to one or the other. Here, we essentially import all Pydantic objects
|
||||
from version 1 that we use. Throughout the repo, we import these objects from this file instead of from Pydantic
|
||||
directly, meaning that we essentially only use Pydantic 1 in dbt-core currently, but without forcing that restriction
|
||||
on dbt users. The development environment for this repo should be pinned to Pydantic 1 to ensure devs get appropriate
|
||||
type hints.
|
||||
"""
|
||||
|
||||
from importlib.metadata import version
|
||||
|
||||
pydantic_version = version("pydantic")
|
||||
# Pydantic uses semantic versioning, i.e. <major>.<minor>.<patch>, and we need to know the major
|
||||
pydantic_major = pydantic_version.split(".")[0]
|
||||
|
||||
if pydantic_major == "1":
|
||||
from pydantic import BaseSettings # noqa: F401
|
||||
elif pydantic_major == "2":
|
||||
from pydantic.v1 import BaseSettings # noqa: F401
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Currently only pydantic 1 and 2 are supported, found pydantic {pydantic_version}"
|
||||
)
|
||||
@@ -1,8 +1,10 @@
|
||||
from dbt.artifacts.resources.base import BaseResource, Docs, FileHash, GraphResource
|
||||
from dbt.artifacts.resources.v1.analysis import Analysis
|
||||
from dbt.artifacts.resources.v1.catalog import Catalog, CatalogWriteIntegrationConfig
|
||||
|
||||
# alias to latest resource definitions
|
||||
from dbt.artifacts.resources.v1.components import (
|
||||
ColumnConfig,
|
||||
ColumnInfo,
|
||||
CompiledResource,
|
||||
Contract,
|
||||
@@ -32,7 +34,7 @@ from dbt.artifacts.resources.v1.exposure import (
|
||||
MaturityType,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata
|
||||
from dbt.artifacts.resources.v1.group import Group
|
||||
from dbt.artifacts.resources.v1.group import Group, GroupConfig
|
||||
from dbt.artifacts.resources.v1.hook import HookNode
|
||||
from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsOn
|
||||
from dbt.artifacts.resources.v1.metric import (
|
||||
@@ -46,7 +48,12 @@ from dbt.artifacts.resources.v1.metric import (
|
||||
MetricTimeWindow,
|
||||
MetricTypeParams,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig, TimeSpine
|
||||
from dbt.artifacts.resources.v1.model import (
|
||||
Model,
|
||||
ModelConfig,
|
||||
ModelFreshness,
|
||||
TimeSpine,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.owner import Owner
|
||||
from dbt.artifacts.resources.v1.saved_query import (
|
||||
Export,
|
||||
|
||||
@@ -75,3 +75,6 @@ class BatchSize(StrEnum):
|
||||
day = "day"
|
||||
month = "month"
|
||||
year = "year"
|
||||
|
||||
def plural(self) -> str:
|
||||
return str(self) + "s"
|
||||
|
||||
23
core/dbt/artifacts/resources/v1/catalog.py
Normal file
23
core/dbt/artifacts/resources/v1/catalog.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from dbt.adapters.catalogs import CatalogIntegrationConfig
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogWriteIntegrationConfig(CatalogIntegrationConfig):
|
||||
name: str
|
||||
catalog_type: str
|
||||
external_volume: Optional[str] = None
|
||||
table_format: Optional[str] = None
|
||||
catalog_name: Optional[str] = None
|
||||
file_format: Optional[str] = None
|
||||
adapter_properties: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Catalog(dbtClassMixin):
|
||||
name: str
|
||||
active_write_integration: Optional[str] = None
|
||||
write_integrations: List[CatalogWriteIntegrationConfig] = field(default_factory=list)
|
||||
@@ -6,6 +6,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||
from dbt.artifacts.resources.base import Docs, FileHash, GraphResource
|
||||
from dbt.artifacts.resources.types import NodeType, TimePeriod
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt_common.contracts.config.base import BaseConfig, MergeBehavior
|
||||
from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||
from dbt_common.contracts.util import Mergeable
|
||||
@@ -15,6 +16,20 @@ from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
NodeVersion = Union[str, float]
|
||||
|
||||
|
||||
def _backcompat_doc_blocks(doc_blocks: Any) -> List[str]:
|
||||
"""
|
||||
Make doc_blocks backwards-compatible for scenarios where a user specifies `doc_blocks` on a model or column.
|
||||
Mashumaro will raise a serialization error if the specified `doc_blocks` isn't a list of strings.
|
||||
In such a scenario, this method returns an empty list to avoid a serialization error.
|
||||
Further along, `_get_doc_blocks` in `manifest.py` populates the correct `doc_blocks` for the happy path.
|
||||
"""
|
||||
|
||||
if isinstance(doc_blocks, list) and all(isinstance(x, str) for x in doc_blocks):
|
||||
return doc_blocks
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroDependsOn(dbtClassMixin):
|
||||
macros: List[str] = field(default_factory=list)
|
||||
@@ -55,6 +70,12 @@ class RefArgs(dbtClassMixin):
|
||||
return {}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnConfig(BaseConfig):
|
||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
||||
"""Used in all ManifestNodes and SourceDefinition"""
|
||||
@@ -65,9 +86,16 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
||||
data_type: Optional[str] = None
|
||||
constraints: List[ColumnLevelConstraint] = field(default_factory=list)
|
||||
quote: Optional[bool] = None
|
||||
config: ColumnConfig = field(default_factory=ColumnConfig)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
granularity: Optional[TimeGranularity] = None
|
||||
doc_blocks: List[str] = field(default_factory=list)
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None) -> dict:
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
dct["doc_blocks"] = _backcompat_doc_blocks(dct["doc_blocks"])
|
||||
return dct
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -197,13 +225,18 @@ class ParsedResource(ParsedResourceMandatory):
|
||||
unrendered_config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
relation_name: Optional[str] = None
|
||||
raw_code: str = ""
|
||||
doc_blocks: List[str] = field(default_factory=list)
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
|
||||
if context and context.get("artifact") and "config_call_dict" in dct:
|
||||
del dct["config_call_dict"]
|
||||
if context and context.get("artifact") and "unrendered_config_call_dict" in dct:
|
||||
del dct["unrendered_config_call_dict"]
|
||||
|
||||
dct["doc_blocks"] = _backcompat_doc_blocks(dct["doc_blocks"])
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
|
||||
@@ -181,7 +181,7 @@ class TestConfig(NodeAndTestConfig):
|
||||
warn_if: str = "!= 0"
|
||||
error_if: str = "!= 0"
|
||||
|
||||
def __post_init__(self):
|
||||
def finalize_and_validate(self):
|
||||
"""
|
||||
The presence of a setting for `store_failures_as` overrides any existing setting for `store_failures`,
|
||||
regardless of level of granularity. If `store_failures_as` is not set, then `store_failures` takes effect.
|
||||
@@ -207,6 +207,7 @@ class TestConfig(NodeAndTestConfig):
|
||||
but still allow for backwards compatibility for `store_failures`.
|
||||
See https://github.com/dbt-labs/dbt-core/issues/6914 for more information.
|
||||
"""
|
||||
super().finalize_and_validate()
|
||||
|
||||
# if `store_failures_as` is not set, it gets set by `store_failures`
|
||||
# the settings below mimic existing behavior prior to `store_failures_as`
|
||||
@@ -229,6 +230,8 @@ class TestConfig(NodeAndTestConfig):
|
||||
else:
|
||||
self.store_failures = get_store_failures_map.get(self.store_failures_as, True)
|
||||
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool:
|
||||
"""This is like __eq__, except it explicitly checks certain fields."""
|
||||
|
||||
@@ -27,6 +27,8 @@ class MaturityType(StrEnum):
|
||||
@dataclass
|
||||
class ExposureConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
tags: List[str] = field(default_factory=list)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Literal
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, Literal, Optional
|
||||
|
||||
from dbt.artifacts.resources.base import BaseResource
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.artifacts.resources.v1.owner import Owner
|
||||
from dbt_common.contracts.config.base import BaseConfig, MergeBehavior
|
||||
|
||||
|
||||
@dataclass
|
||||
class GroupConfig(BaseConfig):
|
||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -11,3 +17,5 @@ class Group(BaseResource):
|
||||
name: str
|
||||
owner: Owner
|
||||
resource_type: Literal[NodeType.Group]
|
||||
description: Optional[str] = None
|
||||
config: GroupConfig = field(default_factory=GroupConfig)
|
||||
|
||||
@@ -46,7 +46,15 @@ class MetricInputMeasure(dbtClassMixin):
|
||||
@dataclass
|
||||
class MetricTimeWindow(dbtClassMixin):
|
||||
count: int
|
||||
granularity: TimeGranularity
|
||||
granularity: str
|
||||
|
||||
@property
|
||||
def window_string(self) -> str: # noqa: D
|
||||
return f"{self.count} {self.granularity}"
|
||||
|
||||
@property
|
||||
def is_standard_granularity(self) -> bool: # noqa: D
|
||||
return self.granularity.casefold() in {item.value.casefold() for item in TimeGranularity}
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -55,7 +63,7 @@ class MetricInput(dbtClassMixin):
|
||||
filter: Optional[WhereFilterIntersection] = None
|
||||
alias: Optional[str] = None
|
||||
offset_window: Optional[MetricTimeWindow] = None
|
||||
offset_to_grain: Optional[TimeGranularity] = None
|
||||
offset_to_grain: Optional[str] = None
|
||||
|
||||
def as_reference(self) -> MetricReference:
|
||||
return MetricReference(element_name=self.name)
|
||||
@@ -83,7 +91,7 @@ class ConversionTypeParams(dbtClassMixin):
|
||||
@dataclass
|
||||
class CumulativeTypeParams(dbtClassMixin):
|
||||
window: Optional[MetricTimeWindow] = None
|
||||
grain_to_date: Optional[TimeGranularity] = None
|
||||
grain_to_date: Optional[str] = None
|
||||
period_agg: PeriodAggregation = PeriodAggregation.FIRST
|
||||
|
||||
|
||||
@@ -95,7 +103,9 @@ class MetricTypeParams(dbtClassMixin):
|
||||
denominator: Optional[MetricInput] = None
|
||||
expr: Optional[str] = None
|
||||
window: Optional[MetricTimeWindow] = None
|
||||
grain_to_date: Optional[TimeGranularity] = None
|
||||
grain_to_date: Optional[TimeGranularity] = (
|
||||
None # legacy, use cumulative_type_params.grain_to_date
|
||||
)
|
||||
metrics: Optional[List[MetricInput]] = None
|
||||
conversion_type_params: Optional[ConversionTypeParams] = None
|
||||
cumulative_type_params: Optional[CumulativeTypeParams] = None
|
||||
@@ -121,7 +131,7 @@ class Metric(GraphResource):
|
||||
type_params: MetricTypeParams
|
||||
filter: Optional[WhereFilterIntersection] = None
|
||||
metadata: Optional[SourceFileMetadata] = None
|
||||
time_granularity: Optional[TimeGranularity] = None
|
||||
time_granularity: Optional[str] = None
|
||||
resource_type: Literal[NodeType.Metric]
|
||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import enum
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Literal, Optional
|
||||
|
||||
from dbt.artifacts.resources.types import AccessType, NodeType
|
||||
from dbt.artifacts.resources.types import AccessType, NodeType, TimePeriod
|
||||
from dbt.artifacts.resources.v1.components import (
|
||||
CompiledResource,
|
||||
DeferRelation,
|
||||
@@ -11,7 +12,59 @@ from dbt.artifacts.resources.v1.components import (
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt_common.contracts.config.base import MergeBehavior
|
||||
from dbt_common.contracts.constraints import ModelLevelConstraint
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_common.contracts.util import Mergeable
|
||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||
|
||||
|
||||
class ModelFreshnessUpdatesOnOptions(enum.Enum):
|
||||
all = "all"
|
||||
any = "any"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelBuildAfter(ExtensibleDbtClassMixin):
|
||||
count: int
|
||||
period: TimePeriod
|
||||
updates_on: ModelFreshnessUpdatesOnOptions = ModelFreshnessUpdatesOnOptions.any
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelFreshness(ExtensibleDbtClassMixin, Mergeable):
|
||||
build_after: ModelBuildAfter
|
||||
|
||||
|
||||
def merge_model_freshness(*thresholds: Optional[ModelFreshness]) -> Optional[ModelFreshness]:
|
||||
if not thresholds:
|
||||
return None
|
||||
|
||||
current_merged_value: Optional[ModelFreshness] = thresholds[0]
|
||||
|
||||
for i in range(1, len(thresholds)):
|
||||
base = current_merged_value
|
||||
update = thresholds[i]
|
||||
|
||||
if base is not None and update is not None:
|
||||
# When both base and update freshness are defined,
|
||||
# create a new ModelFreshness instance using the build_after from the 'update'.
|
||||
# This effectively means 'update's build_after configuration takes precedence.
|
||||
merged_freshness_obj = base.merged(update)
|
||||
if (
|
||||
base.build_after.updates_on == ModelFreshnessUpdatesOnOptions.all
|
||||
or update.build_after.updates_on == ModelFreshnessUpdatesOnOptions.all
|
||||
):
|
||||
merged_freshness_obj.build_after.updates_on = ModelFreshnessUpdatesOnOptions.all
|
||||
current_merged_value = merged_freshness_obj
|
||||
elif base is None and update is not None:
|
||||
# If the current merged value is None but the new update is defined,
|
||||
# take the update.
|
||||
current_merged_value = update
|
||||
else:
|
||||
# This covers cases where 'update' is None (regardless of 'base'),
|
||||
# or both 'base' and 'update' are None.
|
||||
# The result of the pair-merge is None.
|
||||
current_merged_value = base
|
||||
|
||||
return current_merged_value
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -20,6 +73,21 @@ class ModelConfig(NodeConfig):
|
||||
default=AccessType.Protected,
|
||||
metadata=MergeBehavior.Clobber.meta(),
|
||||
)
|
||||
freshness: Optional[ModelFreshness] = None
|
||||
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
data = super().__pre_deserialize__(data)
|
||||
# scrub out model configs where "build_after" is not defined
|
||||
if (
|
||||
"freshness" in data
|
||||
and isinstance(data["freshness"], dict)
|
||||
and "build_after" in data["freshness"]
|
||||
):
|
||||
data["freshness"] = ModelFreshness.from_dict(data["freshness"]).to_dict()
|
||||
else:
|
||||
data.pop("freshness", None)
|
||||
return data
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed
|
||||
|
||||
|
||||
@dataclass
|
||||
class Owner(AdditionalPropertiesAllowed):
|
||||
email: Optional[str] = None
|
||||
email: Union[str, List[str], None] = None
|
||||
name: Optional[str] = None
|
||||
|
||||
@@ -2,16 +2,18 @@ from __future__ import annotations
|
||||
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Literal, Optional
|
||||
from typing import Any, Dict, List, Literal, Optional, Union
|
||||
|
||||
from dbt.artifacts.resources.base import GraphResource
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||
from dbt.artifacts.resources.v1.config import list_str, metas
|
||||
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
||||
SourceFileMetadata,
|
||||
WhereFilterIntersection,
|
||||
)
|
||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||
from dbt_common.contracts.config.metadata import ShowBehavior
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
||||
ExportDestinationType,
|
||||
@@ -95,6 +97,10 @@ class SavedQuery(SavedQueryMandatory):
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
tags: Union[List[str], str] = field(
|
||||
default_factory=list_str,
|
||||
metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
||||
)
|
||||
|
||||
@property
|
||||
def metrics(self) -> List[str]:
|
||||
|
||||
@@ -2,9 +2,10 @@ from dataclasses import dataclass
|
||||
from typing import List, Sequence, Tuple
|
||||
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
||||
WhereFilterParser,
|
||||
from dbt_semantic_interfaces.call_parameter_sets import JinjaCallParameterSets
|
||||
from dbt_semantic_interfaces.parsing.where_filter.jinja_object_parser import (
|
||||
JinjaObjectParser,
|
||||
QueryItemLocation,
|
||||
)
|
||||
|
||||
|
||||
@@ -12,17 +13,23 @@ from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
||||
class WhereFilter(dbtClassMixin):
|
||||
where_sql_template: str
|
||||
|
||||
@property
|
||||
def call_parameter_sets(self) -> FilterCallParameterSets:
|
||||
return WhereFilterParser.parse_call_parameter_sets(self.where_sql_template)
|
||||
def call_parameter_sets(
|
||||
self, custom_granularity_names: Sequence[str]
|
||||
) -> JinjaCallParameterSets:
|
||||
return JinjaObjectParser.parse_call_parameter_sets(
|
||||
self.where_sql_template,
|
||||
custom_granularity_names=custom_granularity_names,
|
||||
query_item_location=QueryItemLocation.NON_ORDER_BY,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WhereFilterIntersection(dbtClassMixin):
|
||||
where_filters: List[WhereFilter]
|
||||
|
||||
@property
|
||||
def filter_expression_parameter_sets(self) -> Sequence[Tuple[str, FilterCallParameterSets]]:
|
||||
def filter_expression_parameter_sets(
|
||||
self, custom_granularity_names: Sequence[str]
|
||||
) -> Sequence[Tuple[str, JinjaCallParameterSets]]:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
|
||||
@@ -31,6 +31,14 @@ https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_inter
|
||||
"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class SemanticLayerElementConfig(dbtClassMixin):
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Defaults(dbtClassMixin):
|
||||
agg_time_dimension: Optional[str] = None
|
||||
@@ -72,6 +80,7 @@ class Dimension(dbtClassMixin):
|
||||
type_params: Optional[DimensionTypeParams] = None
|
||||
expr: Optional[str] = None
|
||||
metadata: Optional[SourceFileMetadata] = None
|
||||
config: Optional[SemanticLayerElementConfig] = None
|
||||
|
||||
@property
|
||||
def reference(self) -> DimensionReference:
|
||||
@@ -106,6 +115,7 @@ class Entity(dbtClassMixin):
|
||||
label: Optional[str] = None
|
||||
role: Optional[str] = None
|
||||
expr: Optional[str] = None
|
||||
config: Optional[SemanticLayerElementConfig] = None
|
||||
|
||||
@property
|
||||
def reference(self) -> EntityReference:
|
||||
@@ -147,6 +157,7 @@ class Measure(dbtClassMixin):
|
||||
agg_params: Optional[MeasureAggregationParameters] = None
|
||||
non_additive_dimension: Optional[NonAdditiveDimension] = None
|
||||
agg_time_dimension: Optional[str] = None
|
||||
config: Optional[SemanticLayerElementConfig] = None
|
||||
|
||||
@property
|
||||
def reference(self) -> MeasureReference:
|
||||
|
||||
@@ -20,7 +20,7 @@ class SnapshotMetaColumnNames(dbtClassMixin):
|
||||
class SnapshotConfig(NodeConfig):
|
||||
materialized: str = "snapshot"
|
||||
strategy: Optional[str] = None
|
||||
unique_key: Optional[Union[str, List[str]]] = None
|
||||
unique_key: Union[str, List[str], None] = None
|
||||
target_schema: Optional[str] = None
|
||||
target_database: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
|
||||
@@ -10,7 +10,7 @@ from dbt.artifacts.resources.v1.components import (
|
||||
HasRelationMetadata,
|
||||
Quoting,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.config import BaseConfig
|
||||
from dbt.artifacts.resources.v1.config import BaseConfig, MergeBehavior
|
||||
from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed
|
||||
from dbt_common.contracts.util import Mergeable
|
||||
from dbt_common.exceptions import CompilationError
|
||||
@@ -20,6 +20,11 @@ from dbt_common.exceptions import CompilationError
|
||||
class SourceConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
event_time: Any = None
|
||||
freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
|
||||
loaded_at_field: Optional[str] = None
|
||||
loaded_at_query: Optional[str] = None
|
||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -59,6 +64,7 @@ class ParsedSourceMandatory(GraphResource, HasRelationMetadata):
|
||||
class SourceDefinition(ParsedSourceMandatory):
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
loaded_at_field: Optional[str] = None
|
||||
loaded_at_query: Optional[str] = None
|
||||
freshness: Optional[FreshnessThreshold] = None
|
||||
external: Optional[ExternalTable] = None
|
||||
description: str = ""
|
||||
@@ -73,3 +79,4 @@ class SourceDefinition(ParsedSourceMandatory):
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
unrendered_database: Optional[str] = None
|
||||
unrendered_schema: Optional[str] = None
|
||||
doc_blocks: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import dataclasses
|
||||
import functools
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, ClassVar, Dict, Optional, Type, TypeVar
|
||||
|
||||
from mashumaro.jsonschema import build_json_schema
|
||||
@@ -12,7 +12,7 @@ from dbt_common.clients.system import read_json, write_json
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_common.events.functions import get_metadata_vars
|
||||
from dbt_common.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt_common.invocation import get_invocation_id
|
||||
from dbt_common.invocation import get_invocation_id, get_invocation_started_at
|
||||
|
||||
BASE_SCHEMAS_URL = "https://schemas.getdbt.com/"
|
||||
SCHEMA_PATH = "dbt/{name}/v{version}.json"
|
||||
@@ -55,8 +55,13 @@ class Readable:
|
||||
class BaseArtifactMetadata(dbtClassMixin):
|
||||
dbt_schema_version: str
|
||||
dbt_version: str = __version__
|
||||
generated_at: datetime = dataclasses.field(default_factory=datetime.utcnow)
|
||||
generated_at: datetime = dataclasses.field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
)
|
||||
invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id)
|
||||
invocation_started_at: Optional[datetime] = dataclasses.field(
|
||||
default_factory=get_invocation_started_at
|
||||
)
|
||||
env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_vars)
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
|
||||
@@ -21,6 +21,7 @@ from dbt.artifacts.resources import (
|
||||
SqlOperation,
|
||||
UnitTestDefinition,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.components import Quoting
|
||||
from dbt.artifacts.schemas.base import (
|
||||
ArtifactMixin,
|
||||
BaseArtifactMetadata,
|
||||
@@ -28,6 +29,7 @@ from dbt.artifacts.schemas.base import (
|
||||
schema_version,
|
||||
)
|
||||
from dbt.artifacts.schemas.upgrades import upgrade_manifest_json
|
||||
from dbt_common.exceptions import DbtInternalError
|
||||
|
||||
NodeEdgeMap = Dict[str, List[str]]
|
||||
UniqueID = str
|
||||
@@ -87,6 +89,10 @@ class ManifestMetadata(BaseArtifactMetadata):
|
||||
default=None,
|
||||
metadata=dict(description="The type name of the adapter"),
|
||||
)
|
||||
quoting: Optional[Quoting] = field(
|
||||
default_factory=Quoting,
|
||||
metadata=dict(description="The quoting configuration for the project"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
@@ -180,3 +186,13 @@ class WritableManifest(ArtifactMixin):
|
||||
if manifest_schema_version < cls.dbt_schema_version.version:
|
||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||
return cls.from_dict(data)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, _):
|
||||
# When dbt try to load an artifact with additional optional fields
|
||||
# that are not present in the schema, from_dict will work fine.
|
||||
# As long as validate is not called, the schema will not be enforced.
|
||||
# This is intentional, as it allows for safer schema upgrades.
|
||||
raise DbtInternalError(
|
||||
"The WritableManifest should never be validated directly to allow for schema upgrades."
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Union
|
||||
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
@@ -21,10 +21,10 @@ class TimingInfo(dbtClassMixin):
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
def begin(self):
|
||||
self.started_at = datetime.utcnow()
|
||||
self.started_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
def end(self):
|
||||
self.completed_at = datetime.utcnow()
|
||||
self.completed_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
def to_msg_dict(self):
|
||||
msg_dict = {"name": str(self.name)}
|
||||
@@ -64,6 +64,7 @@ class NodeStatus(StrEnum):
|
||||
PartialSuccess = "partial success"
|
||||
Pass = "pass"
|
||||
RuntimeErr = "runtime error"
|
||||
NoOp = "no-op"
|
||||
|
||||
|
||||
class RunStatus(StrEnum):
|
||||
@@ -71,6 +72,7 @@ class RunStatus(StrEnum):
|
||||
Error = NodeStatus.Error
|
||||
Skipped = NodeStatus.Skipped
|
||||
PartialSuccess = NodeStatus.PartialSuccess
|
||||
NoOp = NodeStatus.NoOp
|
||||
|
||||
|
||||
class TestStatus(StrEnum):
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
import copy
|
||||
import threading
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, Iterable, Optional, Sequence, Tuple
|
||||
|
||||
# https://github.com/dbt-labs/dbt-core/issues/10098
|
||||
@@ -101,7 +101,9 @@ class RunExecutionResult(
|
||||
):
|
||||
results: Sequence[RunResult]
|
||||
args: Dict[str, Any] = field(default_factory=dict)
|
||||
generated_at: datetime = field(default_factory=datetime.utcnow)
|
||||
generated_at: datetime = field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
)
|
||||
|
||||
def write(self, path: str):
|
||||
writable = RunResultsArtifact.from_execution_results(
|
||||
|
||||
@@ -15,14 +15,15 @@ from dbt.cli.exceptions import DbtUsageException
|
||||
from dbt.cli.resolvers import default_log_path, default_project_dir
|
||||
from dbt.cli.types import Command as CliCommand
|
||||
from dbt.config.project import read_project_flags
|
||||
from dbt.config.utils import normalize_warn_error_options
|
||||
from dbt.contracts.project import ProjectFlags
|
||||
from dbt.deprecations import fire_buffered_deprecations, renamed_env_var
|
||||
from dbt.deprecations import fire_buffered_deprecations, renamed_env_var, warn
|
||||
from dbt.events import ALL_EVENT_NAMES
|
||||
from dbt_common import ui
|
||||
from dbt_common.clients import jinja
|
||||
from dbt_common.events import functions
|
||||
from dbt_common.exceptions import DbtInternalError
|
||||
from dbt_common.helper_types import WarnErrorOptions
|
||||
from dbt_common.helper_types import WarnErrorOptionsV2
|
||||
|
||||
if os.name != "nt":
|
||||
# https://bugs.python.org/issue41567
|
||||
@@ -49,6 +50,8 @@ DEPRECATED_PARAMS = {
|
||||
}
|
||||
|
||||
|
||||
DEPRECATED_FLAGS_TO_WARNINGS = {("--models", "--model", "-m"): "model-param-usage-deprecation"}
|
||||
|
||||
WHICH_KEY = "which"
|
||||
|
||||
|
||||
@@ -56,9 +59,10 @@ def convert_config(config_name, config_value):
|
||||
"""Convert the values from config and original set_from_args to the correct type."""
|
||||
ret = config_value
|
||||
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
|
||||
ret = WarnErrorOptions(
|
||||
include=config_value.get("include", []),
|
||||
exclude=config_value.get("exclude", []),
|
||||
normalize_warn_error_options(ret)
|
||||
ret = WarnErrorOptionsV2(
|
||||
error=config_value.get("error", []),
|
||||
warn=config_value.get("warn", []),
|
||||
silence=config_value.get("silence", []),
|
||||
valid_error_names=ALL_EVENT_NAMES,
|
||||
)
|
||||
@@ -383,7 +387,7 @@ class Flags:
|
||||
"Value for `--event-time-start` must be less than `--event-time-end`"
|
||||
)
|
||||
|
||||
def fire_deprecations(self):
|
||||
def fire_deprecations(self, ctx: Optional[Context] = None):
|
||||
"""Fires events for deprecated env_var usage."""
|
||||
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
||||
# It is necessary to remove this attr from the class so it does
|
||||
@@ -392,12 +396,25 @@ class Flags:
|
||||
|
||||
fire_buffered_deprecations()
|
||||
|
||||
# Handle firing deprecations of CLI aliases separately using argv or dbtRunner args
|
||||
# because click doesn't make it possible to disambiguite which literal CLI option was used
|
||||
# and only preserves the 'canonical' representation.
|
||||
original_command_args = (
|
||||
ctx.obj["dbt_runner_command_args"]
|
||||
if (ctx and ctx.obj and "dbt_runner_command_args" in ctx.obj)
|
||||
else sys.argv
|
||||
)
|
||||
for deprecated_flags, warning in DEPRECATED_FLAGS_TO_WARNINGS.items():
|
||||
for deprecated_flag in deprecated_flags:
|
||||
if deprecated_flag in original_command_args:
|
||||
warn(warning)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||
command_arg_list = command_params(command, args_dict)
|
||||
ctx = args_to_context(command_arg_list)
|
||||
flags = cls(ctx=ctx)
|
||||
flags.fire_deprecations()
|
||||
flags.fire_deprecations(ctx=ctx)
|
||||
return flags
|
||||
|
||||
def set_common_global_flags(self):
|
||||
|
||||
@@ -56,6 +56,7 @@ class dbtRunner:
|
||||
dbt_ctx.obj = {
|
||||
"manifest": self.manifest,
|
||||
"callbacks": self.callbacks,
|
||||
"dbt_runner_command_args": args,
|
||||
}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
@@ -129,6 +130,7 @@ def global_flags(func):
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.show_all_deprecations
|
||||
@p.state
|
||||
@p.static_parser
|
||||
@p.target
|
||||
@@ -140,6 +142,8 @@ def global_flags(func):
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
@p.use_fast_test_edges
|
||||
@p.upload_artifacts
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
@@ -178,6 +182,7 @@ def cli(ctx, **kwargs):
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.exclude_resource_type
|
||||
@p.sample
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@@ -189,6 +194,7 @@ def cli(ctx, **kwargs):
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.catalogs
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def build(ctx, **kwargs):
|
||||
@@ -554,6 +560,7 @@ def parse(ctx, **kwargs):
|
||||
@p.empty
|
||||
@p.event_time_start
|
||||
@p.event_time_end
|
||||
@p.sample
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.target_path
|
||||
@@ -563,6 +570,7 @@ def parse(ctx, **kwargs):
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.catalogs
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run(ctx, **kwargs):
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
from click import Choice, ParamType
|
||||
from typing import Optional
|
||||
|
||||
import pytz
|
||||
from click import Choice, Context, Parameter, ParamType
|
||||
|
||||
from dbt.config.utils import normalize_warn_error_options, parse_cli_yaml_string
|
||||
from dbt.event_time.sample_window import SampleWindow
|
||||
from dbt.events import ALL_EVENT_NAMES
|
||||
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
||||
from dbt_common.exceptions import DbtValidationError
|
||||
from dbt_common.helper_types import WarnErrorOptions
|
||||
from dbt_common.helper_types import WarnErrorOptionsV2
|
||||
|
||||
|
||||
class YAML(ParamType):
|
||||
@@ -50,13 +54,13 @@ class WarnErrorOptionsType(YAML):
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# this function is being used by param in click
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
normalize_warn_error_options(include_exclude)
|
||||
warn_error_options = super().convert(value, param, ctx)
|
||||
normalize_warn_error_options(warn_error_options)
|
||||
|
||||
return WarnErrorOptions(
|
||||
include=include_exclude.get("include", []),
|
||||
exclude=include_exclude.get("exclude", []),
|
||||
silence=include_exclude.get("silence", []),
|
||||
return WarnErrorOptionsV2(
|
||||
error=warn_error_options.get("error", []),
|
||||
warn=warn_error_options.get("warn", []),
|
||||
silence=warn_error_options.get("silence", []),
|
||||
valid_error_names=ALL_EVENT_NAMES,
|
||||
)
|
||||
|
||||
@@ -88,3 +92,30 @@ class ChoiceTuple(Choice):
|
||||
super().convert(value, param, ctx)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class SampleType(ParamType):
|
||||
name = "SAMPLE"
|
||||
|
||||
def convert(
|
||||
self, value, param: Optional[Parameter], ctx: Optional[Context]
|
||||
) -> Optional[SampleWindow]:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
# Try and identify if it's a "dict" or a "str"
|
||||
if value.lstrip()[0] == "{":
|
||||
param_option_name: str = param.opts[0] if param.opts else param.name # type: ignore
|
||||
parsed_dict = parse_cli_yaml_string(value, param_option_name.strip("-"))
|
||||
sample_window = SampleWindow.from_dict(parsed_dict)
|
||||
sample_window.start = sample_window.start.replace(tzinfo=pytz.UTC)
|
||||
sample_window.end = sample_window.end.replace(tzinfo=pytz.UTC)
|
||||
return sample_window
|
||||
else:
|
||||
return SampleWindow.from_relative_string(value)
|
||||
except Exception as e:
|
||||
self.fail(e.__str__(), param, ctx)
|
||||
else:
|
||||
self.fail(f"Cannot load SAMPLE_WINDOW from type {type(value)}", param, ctx)
|
||||
|
||||
@@ -1,53 +1,89 @@
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, List
|
||||
|
||||
import click
|
||||
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, Package, WarnErrorOptionsType
|
||||
from dbt.cli.option_types import (
|
||||
YAML,
|
||||
ChoiceTuple,
|
||||
Package,
|
||||
SampleType,
|
||||
WarnErrorOptionsType,
|
||||
)
|
||||
from dbt.cli.options import MultiOption
|
||||
from dbt.cli.resolvers import default_profiles_dir, default_project_dir
|
||||
from dbt.version import get_version_information
|
||||
|
||||
add_package = click.option(
|
||||
# --- shared option specs --- #
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
"envvar": None,
|
||||
"help": "Specify the nodes to include.",
|
||||
"cls": MultiOption,
|
||||
"multiple": True,
|
||||
"type": tuple,
|
||||
}
|
||||
|
||||
# Record of env vars associated with options
|
||||
KNOWN_ENV_VARS: List[str] = []
|
||||
|
||||
|
||||
def _create_option_and_track_env_var(
|
||||
*args: Any, **kwargs: Any
|
||||
) -> Callable[[click.decorators.FC], click.decorators.FC]:
|
||||
global KNOWN_ENV_VARS
|
||||
|
||||
envvar = kwargs.get("envvar", None)
|
||||
if isinstance(envvar, str):
|
||||
KNOWN_ENV_VARS.append(envvar)
|
||||
|
||||
return click.option(*args, **kwargs)
|
||||
|
||||
|
||||
# --- The actual option definitions --- #
|
||||
add_package = _create_option_and_track_env_var(
|
||||
"--add-package",
|
||||
help="Add a package to current package spec, specify it as package-name@version. Change the source with --source flag.",
|
||||
envvar=None,
|
||||
type=Package(),
|
||||
)
|
||||
args = click.option(
|
||||
|
||||
args = _create_option_and_track_env_var(
|
||||
"--args",
|
||||
envvar=None,
|
||||
help="Supply arguments to the macro. This dictionary will be mapped to the keyword arguments defined in the selected macro. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||
type=YAML(),
|
||||
)
|
||||
|
||||
browser = click.option(
|
||||
browser = _create_option_and_track_env_var(
|
||||
"--browser/--no-browser",
|
||||
envvar=None,
|
||||
help="Wether or not to open a local web browser after starting the server",
|
||||
help="Whether or not to open a local web browser after starting the server",
|
||||
default=True,
|
||||
)
|
||||
|
||||
cache_selected_only = click.option(
|
||||
cache_selected_only = _create_option_and_track_env_var(
|
||||
"--cache-selected-only/--no-cache-selected-only",
|
||||
envvar="DBT_CACHE_SELECTED_ONLY",
|
||||
help="At start of run, populate relational cache only for schemas containing selected nodes, or for all schemas of interest.",
|
||||
)
|
||||
|
||||
introspect = click.option(
|
||||
"--introspect/--no-introspect",
|
||||
envvar="DBT_INTROSPECT",
|
||||
help="Whether to scaffold introspective queries as part of compilation",
|
||||
clean_project_files_only = _create_option_and_track_env_var(
|
||||
"--clean-project-files-only / --no-clean-project-files-only",
|
||||
envvar="DBT_CLEAN_PROJECT_FILES_ONLY",
|
||||
help="If disabled, dbt clean will delete all paths specified in clean-paths, even if they're outside the dbt project.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_docs = click.option(
|
||||
compile_docs = _create_option_and_track_env_var(
|
||||
"--compile/--no-compile",
|
||||
envvar=None,
|
||||
help="Whether or not to run 'dbt compile' as part of docs generation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_inject_ephemeral_ctes = click.option(
|
||||
compile_inject_ephemeral_ctes = _create_option_and_track_env_var(
|
||||
"--inject-ephemeral-ctes/--no-inject-ephemeral-ctes",
|
||||
envvar=None,
|
||||
help="Internal flag controlling injection of referenced ephemeral models' CTEs during `compile`.",
|
||||
@@ -55,28 +91,48 @@ compile_inject_ephemeral_ctes = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
config_dir = click.option(
|
||||
config_dir = _create_option_and_track_env_var(
|
||||
"--config-dir",
|
||||
envvar=None,
|
||||
help="Print a system-specific command to access the directory that the current dbt project is searching for a profiles.yml. Then, exit. This flag renders other debug step flags no-ops.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
debug = click.option(
|
||||
debug = _create_option_and_track_env_var(
|
||||
"--debug/--no-debug",
|
||||
"-d/ ",
|
||||
envvar="DBT_DEBUG",
|
||||
help="Display debug logging during dbt execution. Useful for debugging and making bug reports.",
|
||||
)
|
||||
|
||||
debug_connection = _create_option_and_track_env_var(
|
||||
"--connection",
|
||||
envvar=None,
|
||||
help="Test the connection to the target database independent of dependency checks.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
# flag was previously named DEFER_MODE
|
||||
defer = click.option(
|
||||
defer = _create_option_and_track_env_var(
|
||||
"--defer/--no-defer",
|
||||
envvar="DBT_DEFER",
|
||||
help="If set, resolve unselected nodes by deferring to the manifest within the --state directory.",
|
||||
)
|
||||
|
||||
deprecated_defer = click.option(
|
||||
defer_state = _create_option_and_track_env_var(
|
||||
"--defer-state",
|
||||
envvar="DBT_DEFER_STATE",
|
||||
help="Override the state directory for deferral only.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=False,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
deprecated_defer = _create_option_and_track_env_var(
|
||||
"--deprecated-defer",
|
||||
envvar="DBT_DEFER_TO_STATE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
@@ -84,14 +140,59 @@ deprecated_defer = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
empty = click.option(
|
||||
deprecated_favor_state = _create_option_and_track_env_var(
|
||||
"--deprecated-favor-state",
|
||||
envvar="DBT_FAVOR_STATE_MODE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
)
|
||||
|
||||
# Renamed to --export-saved-queries
|
||||
deprecated_include_saved_query = _create_option_and_track_env_var(
|
||||
"--include-saved-query/--no-include-saved-query",
|
||||
envvar="DBT_INCLUDE_SAVED_QUERY",
|
||||
help="Include saved queries in the list of resources to be selected for build command",
|
||||
is_flag=True,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
deprecated_print = _create_option_and_track_env_var(
|
||||
"--deprecated-print/--deprecated-no-print",
|
||||
envvar="DBT_NO_PRINT",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
default=True,
|
||||
hidden=True,
|
||||
callback=lambda ctx, param, value: not value,
|
||||
)
|
||||
|
||||
deprecated_state = _create_option_and_track_env_var(
|
||||
"--deprecated-state",
|
||||
envvar="DBT_ARTIFACT_STATE_PATH",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
hidden=True,
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
empty = _create_option_and_track_env_var(
|
||||
"--empty/--no-empty",
|
||||
envvar="DBT_EMPTY",
|
||||
help="If specified, limit input refs and sources to zero rows.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
event_time_end = click.option(
|
||||
empty_catalog = _create_option_and_track_env_var(
|
||||
"--empty-catalog",
|
||||
help="If specified, generate empty catalog.json file during the `dbt docs generate` command.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
event_time_end = _create_option_and_track_env_var(
|
||||
"--event-time-end",
|
||||
envvar="DBT_EVENT_TIME_END",
|
||||
help="If specified, the end datetime dbt uses to filter microbatch model inputs (exclusive).",
|
||||
@@ -99,7 +200,7 @@ event_time_end = click.option(
|
||||
default=None,
|
||||
)
|
||||
|
||||
event_time_start = click.option(
|
||||
event_time_start = _create_option_and_track_env_var(
|
||||
"--event-time-start",
|
||||
envvar="DBT_EVENT_TIME_START",
|
||||
help="If specified, the start datetime dbt uses to filter microbatch model inputs (inclusive).",
|
||||
@@ -107,7 +208,7 @@ event_time_start = click.option(
|
||||
default=None,
|
||||
)
|
||||
|
||||
exclude = click.option(
|
||||
exclude = _create_option_and_track_env_var(
|
||||
"--exclude",
|
||||
envvar=None,
|
||||
type=tuple,
|
||||
@@ -116,7 +217,34 @@ exclude = click.option(
|
||||
help="Specify the nodes to exclude.",
|
||||
)
|
||||
|
||||
export_saved_queries = click.option(
|
||||
exclude_resource_type = _create_option_and_track_env_var(
|
||||
"--exclude-resource-types",
|
||||
"--exclude-resource-type",
|
||||
envvar="DBT_EXCLUDE_RESOURCE_TYPES",
|
||||
help="Specify the types of resources that dbt will exclude",
|
||||
type=ChoiceTuple(
|
||||
[
|
||||
"metric",
|
||||
"semantic_model",
|
||||
"saved_query",
|
||||
"source",
|
||||
"analysis",
|
||||
"model",
|
||||
"test",
|
||||
"unit_test",
|
||||
"exposure",
|
||||
"snapshot",
|
||||
"seed",
|
||||
"default",
|
||||
],
|
||||
case_sensitive=False,
|
||||
),
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
default=(),
|
||||
)
|
||||
|
||||
export_saved_queries = _create_option_and_track_env_var(
|
||||
"--export-saved-queries/--no-export-saved-queries",
|
||||
envvar="DBT_EXPORT_SAVED_QUERIES",
|
||||
help="Export saved queries within the 'build' command, otherwise no-op",
|
||||
@@ -124,26 +252,20 @@ export_saved_queries = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
fail_fast = click.option(
|
||||
fail_fast = _create_option_and_track_env_var(
|
||||
"--fail-fast/--no-fail-fast",
|
||||
"-x/ ",
|
||||
envvar="DBT_FAIL_FAST",
|
||||
help="Stop execution on first failure.",
|
||||
)
|
||||
|
||||
favor_state = click.option(
|
||||
favor_state = _create_option_and_track_env_var(
|
||||
"--favor-state/--no-favor-state",
|
||||
envvar="DBT_FAVOR_STATE",
|
||||
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
|
||||
)
|
||||
|
||||
deprecated_favor_state = click.option(
|
||||
"--deprecated-favor-state",
|
||||
envvar="DBT_FAVOR_STATE_MODE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
)
|
||||
|
||||
full_refresh = click.option(
|
||||
full_refresh = _create_option_and_track_env_var(
|
||||
"--full-refresh",
|
||||
"-f",
|
||||
envvar="DBT_FULL_REFRESH",
|
||||
@@ -151,7 +273,7 @@ full_refresh = click.option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
host = click.option(
|
||||
host = _create_option_and_track_env_var(
|
||||
"--host",
|
||||
envvar="DBT_HOST",
|
||||
help="host to serve dbt docs on",
|
||||
@@ -159,7 +281,7 @@ host = click.option(
|
||||
default="127.0.0.1",
|
||||
)
|
||||
|
||||
indirect_selection = click.option(
|
||||
indirect_selection = _create_option_and_track_env_var(
|
||||
"--indirect-selection",
|
||||
envvar="DBT_INDIRECT_SELECTION",
|
||||
help="Choose which tests to select that are adjacent to selected resources. Eager is most inclusive, cautious is most exclusive, and buildable is in between. Empty includes no tests at all.",
|
||||
@@ -167,20 +289,40 @@ indirect_selection = click.option(
|
||||
default="eager",
|
||||
)
|
||||
|
||||
lock = click.option(
|
||||
inline = _create_option_and_track_env_var(
|
||||
"--inline",
|
||||
envvar=None,
|
||||
help="Pass SQL inline to dbt compile and show",
|
||||
)
|
||||
|
||||
inline_direct = _create_option_and_track_env_var(
|
||||
"--inline-direct",
|
||||
envvar=None,
|
||||
help="Internal flag to pass SQL inline to dbt show. Do not load the entire project or apply templating.",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
introspect = _create_option_and_track_env_var(
|
||||
"--introspect/--no-introspect",
|
||||
envvar="DBT_INTROSPECT",
|
||||
help="Whether to scaffold introspective queries as part of compilation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
lock = _create_option_and_track_env_var(
|
||||
"--lock",
|
||||
envvar=None,
|
||||
help="Generate the package-lock.yml file without install the packages.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
log_cache_events = click.option(
|
||||
log_cache_events = _create_option_and_track_env_var(
|
||||
"--log-cache-events/--no-log-cache-events",
|
||||
help="Enable verbose logging for relational cache events to help when debugging.",
|
||||
envvar="DBT_LOG_CACHE_EVENTS",
|
||||
)
|
||||
|
||||
log_format = click.option(
|
||||
log_format = _create_option_and_track_env_var(
|
||||
"--log-format",
|
||||
envvar="DBT_LOG_FORMAT",
|
||||
help="Specify the format of logging to the console and the log file. Use --log-format-file to configure the format for the log file differently than the console.",
|
||||
@@ -188,7 +330,7 @@ log_format = click.option(
|
||||
default="default",
|
||||
)
|
||||
|
||||
log_format_file = click.option(
|
||||
log_format_file = _create_option_and_track_env_var(
|
||||
"--log-format-file",
|
||||
envvar="DBT_LOG_FORMAT_FILE",
|
||||
help="Specify the format of logging to the log file by overriding the default value and the general --log-format setting.",
|
||||
@@ -196,7 +338,7 @@ log_format_file = click.option(
|
||||
default="debug",
|
||||
)
|
||||
|
||||
log_level = click.option(
|
||||
log_level = _create_option_and_track_env_var(
|
||||
"--log-level",
|
||||
envvar="DBT_LOG_LEVEL",
|
||||
help="Specify the minimum severity of events that are logged to the console and the log file. Use --log-level-file to configure the severity for the log file differently than the console.",
|
||||
@@ -204,7 +346,7 @@ log_level = click.option(
|
||||
default="info",
|
||||
)
|
||||
|
||||
log_level_file = click.option(
|
||||
log_level_file = _create_option_and_track_env_var(
|
||||
"--log-level-file",
|
||||
envvar="DBT_LOG_LEVEL_FILE",
|
||||
help="Specify the minimum severity of events that are logged to the log file by overriding the default value and the general --log-level setting.",
|
||||
@@ -212,21 +354,7 @@ log_level_file = click.option(
|
||||
default="debug",
|
||||
)
|
||||
|
||||
use_colors = click.option(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Specify whether log output is colorized in the console and the log file. Use --use-colors-file/--no-use-colors-file to colorize the log file differently than the console.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_colors_file = click.option(
|
||||
"--use-colors-file/--no-use-colors-file",
|
||||
envvar="DBT_USE_COLORS_FILE",
|
||||
help="Specify whether log file output is colorized by overriding the default value and the general --use-colors/--no-use-colors setting.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
log_file_max_bytes = _create_option_and_track_env_var(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
@@ -235,7 +363,7 @@ log_file_max_bytes = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
log_path = _create_option_and_track_env_var(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
||||
@@ -243,14 +371,16 @@ log_path = click.option(
|
||||
type=click.Path(resolve_path=True, path_type=Path),
|
||||
)
|
||||
|
||||
macro_debugging = click.option(
|
||||
macro_debugging = _create_option_and_track_env_var(
|
||||
"--macro-debugging/--no-macro-debugging",
|
||||
envvar="DBT_MACRO_DEBUGGING",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
models = _create_option_and_track_env_var(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
# This less standard usage of --output where output_path below is more standard
|
||||
output = click.option(
|
||||
output = _create_option_and_track_env_var(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="Specify the output format: either JSON or a newline-delimited list of selectors, paths, or names",
|
||||
@@ -258,23 +388,7 @@ output = click.option(
|
||||
default="selector",
|
||||
)
|
||||
|
||||
show_output_format = click.option(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="Output format for dbt compile and dbt show",
|
||||
type=click.Choice(["json", "text"], case_sensitive=False),
|
||||
default="text",
|
||||
)
|
||||
|
||||
show_limit = click.option(
|
||||
"--limit",
|
||||
envvar=None,
|
||||
help="Limit the number of results returned by dbt show",
|
||||
type=click.INT,
|
||||
default=5,
|
||||
)
|
||||
|
||||
output_keys = click.option(
|
||||
output_keys = _create_option_and_track_env_var(
|
||||
"--output-keys",
|
||||
envvar=None,
|
||||
help=(
|
||||
@@ -287,7 +401,7 @@ output_keys = click.option(
|
||||
default=[],
|
||||
)
|
||||
|
||||
output_path = click.option(
|
||||
output_path = _create_option_and_track_env_var(
|
||||
"--output",
|
||||
"-o",
|
||||
envvar=None,
|
||||
@@ -296,14 +410,22 @@ output_path = click.option(
|
||||
default=None,
|
||||
)
|
||||
|
||||
partial_parse = click.option(
|
||||
partial_parse = _create_option_and_track_env_var(
|
||||
"--partial-parse/--no-partial-parse",
|
||||
envvar="DBT_PARTIAL_PARSE",
|
||||
help="Allow for partial parsing by looking for and writing to a pickle file in the target directory. This overrides the user configuration file.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
partial_parse_file_path = click.option(
|
||||
partial_parse_file_diff = _create_option_and_track_env_var(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
partial_parse_file_path = _create_option_and_track_env_var(
|
||||
"--partial-parse-file-path",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_PATH",
|
||||
help="Internal flag for path to partial_parse.manifest file.",
|
||||
@@ -312,22 +434,21 @@ partial_parse_file_path = click.option(
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
partial_parse_file_diff = click.option(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
print = _create_option_and_track_env_var(
|
||||
"--print/--no-print",
|
||||
envvar="DBT_PRINT",
|
||||
help="Output all {{ print() }} macro calls.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
populate_cache = _create_option_and_track_env_var(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
help="At start of run, use `show` or `information_schema` queries to populate a relational cache, which can speed up subsequent materializations.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
port = click.option(
|
||||
port = _create_option_and_track_env_var(
|
||||
"--port",
|
||||
envvar=None,
|
||||
help="Specify the port number for the docs server",
|
||||
@@ -335,23 +456,7 @@ port = click.option(
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
print = click.option(
|
||||
"--print/--no-print",
|
||||
envvar="DBT_PRINT",
|
||||
help="Output all {{ print() }} macro calls.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
deprecated_print = click.option(
|
||||
"--deprecated-print/--deprecated-no-print",
|
||||
envvar="DBT_NO_PRINT",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
default=True,
|
||||
hidden=True,
|
||||
callback=lambda ctx, param, value: not value,
|
||||
)
|
||||
|
||||
printer_width = click.option(
|
||||
printer_width = _create_option_and_track_env_var(
|
||||
"--printer-width",
|
||||
envvar="DBT_PRINTER_WIDTH",
|
||||
help="Sets the width of terminal output",
|
||||
@@ -359,13 +464,13 @@ printer_width = click.option(
|
||||
default=80,
|
||||
)
|
||||
|
||||
profile = click.option(
|
||||
profile = _create_option_and_track_env_var(
|
||||
"--profile",
|
||||
envvar="DBT_PROFILE",
|
||||
help="Which existing profile to load. Overrides setting in dbt_project.yml.",
|
||||
)
|
||||
|
||||
profiles_dir = click.option(
|
||||
profiles_dir = _create_option_and_track_env_var(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
@@ -376,7 +481,7 @@ profiles_dir = click.option(
|
||||
# `dbt debug` uses this because it implements custom behaviour for non-existent profiles.yml directories
|
||||
# `dbt deps` does not load a profile at all
|
||||
# `dbt init` will write profiles.yml if it doesn't yet exist
|
||||
profiles_dir_exists_false = click.option(
|
||||
profiles_dir_exists_false = _create_option_and_track_env_var(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
@@ -384,7 +489,7 @@ profiles_dir_exists_false = click.option(
|
||||
type=click.Path(exists=False),
|
||||
)
|
||||
|
||||
project_dir = click.option(
|
||||
project_dir = _create_option_and_track_env_var(
|
||||
"--project-dir",
|
||||
envvar="DBT_PROJECT_DIR",
|
||||
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
||||
@@ -392,14 +497,16 @@ project_dir = click.option(
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
quiet = click.option(
|
||||
quiet = _create_option_and_track_env_var(
|
||||
"--quiet/--no-quiet",
|
||||
"-q",
|
||||
envvar="DBT_QUIET",
|
||||
help="Suppress all non-error logging to stdout. Does not affect {{ print() }} macro calls.",
|
||||
)
|
||||
|
||||
record_timing_info = click.option(
|
||||
raw_select = _create_option_and_track_env_var(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
record_timing_info = _create_option_and_track_env_var(
|
||||
"--record-timing-info",
|
||||
"-r",
|
||||
envvar=None,
|
||||
@@ -407,7 +514,7 @@ record_timing_info = click.option(
|
||||
type=click.Path(exists=False),
|
||||
)
|
||||
|
||||
resource_type = click.option(
|
||||
resource_type = _create_option_and_track_env_var(
|
||||
"--resource-types",
|
||||
"--resource-type",
|
||||
envvar="DBT_RESOURCE_TYPES",
|
||||
@@ -435,114 +542,86 @@ resource_type = click.option(
|
||||
default=(),
|
||||
)
|
||||
|
||||
exclude_resource_type = click.option(
|
||||
"--exclude-resource-types",
|
||||
"--exclude-resource-type",
|
||||
envvar="DBT_EXCLUDE_RESOURCE_TYPES",
|
||||
help="Specify the types of resources that dbt will exclude",
|
||||
type=ChoiceTuple(
|
||||
[
|
||||
"metric",
|
||||
"semantic_model",
|
||||
"saved_query",
|
||||
"source",
|
||||
"analysis",
|
||||
"model",
|
||||
"test",
|
||||
"unit_test",
|
||||
"exposure",
|
||||
"snapshot",
|
||||
"seed",
|
||||
"default",
|
||||
],
|
||||
case_sensitive=False,
|
||||
),
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
default=(),
|
||||
)
|
||||
|
||||
# Renamed to --export-saved-queries
|
||||
deprecated_include_saved_query = click.option(
|
||||
"--include-saved-query/--no-include-saved-query",
|
||||
envvar="DBT_INCLUDE_SAVED_QUERY",
|
||||
help="Include saved queries in the list of resources to be selected for build command",
|
||||
is_flag=True,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
"envvar": None,
|
||||
"help": "Specify the nodes to include.",
|
||||
"cls": MultiOption,
|
||||
"multiple": True,
|
||||
"type": tuple,
|
||||
}
|
||||
|
||||
inline = click.option(
|
||||
"--inline",
|
||||
envvar=None,
|
||||
help="Pass SQL inline to dbt compile and show",
|
||||
)
|
||||
|
||||
inline_direct = click.option(
|
||||
"--inline-direct",
|
||||
envvar=None,
|
||||
help="Internal flag to pass SQL inline to dbt show. Do not load the entire project or apply templating.",
|
||||
hidden=True,
|
||||
sample = _create_option_and_track_env_var(
|
||||
"--sample",
|
||||
envvar="DBT_SAMPLE",
|
||||
help="Run in sample mode with given SAMPLE_WINDOW spec, such that ref/source calls are sampled by the sample window.",
|
||||
default=None,
|
||||
type=SampleType(),
|
||||
hidden=True, # TODO: Unhide
|
||||
)
|
||||
|
||||
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = _create_option_and_track_env_var(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
selector = click.option(
|
||||
selector = _create_option_and_track_env_var(
|
||||
"--selector",
|
||||
envvar=None,
|
||||
help="The selector name to use, as defined in selectors.yml",
|
||||
)
|
||||
|
||||
send_anonymous_usage_stats = click.option(
|
||||
send_anonymous_usage_stats = _create_option_and_track_env_var(
|
||||
"--send-anonymous-usage-stats/--no-send-anonymous-usage-stats",
|
||||
envvar="DBT_SEND_ANONYMOUS_USAGE_STATS",
|
||||
help="Send anonymous usage stats to dbt Labs.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
clean_project_files_only = click.option(
|
||||
"--clean-project-files-only / --no-clean-project-files-only",
|
||||
envvar="DBT_CLEAN_PROJECT_FILES_ONLY",
|
||||
help="If disabled, dbt clean will delete all paths specified in clean-paths, even if they're outside the dbt project.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show = click.option(
|
||||
show = _create_option_and_track_env_var(
|
||||
"--show",
|
||||
envvar=None,
|
||||
help="Show a sample of the loaded data in the terminal",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
show_limit = _create_option_and_track_env_var(
|
||||
"--limit",
|
||||
envvar=None,
|
||||
help="Limit the number of results returned by dbt show",
|
||||
type=click.INT,
|
||||
default=5,
|
||||
)
|
||||
|
||||
show_output_format = _create_option_and_track_env_var(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="Output format for dbt compile and dbt show",
|
||||
type=click.Choice(["json", "text"], case_sensitive=False),
|
||||
default="text",
|
||||
)
|
||||
|
||||
show_resource_report = _create_option_and_track_env_var(
|
||||
"--show-resource-report/--no-show-resource-report",
|
||||
default=False,
|
||||
envvar="DBT_SHOW_RESOURCE_REPORT",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
# TODO: The env var is a correction!
|
||||
# The original env var was `DBT_TEST_SINGLE_THREADED`.
|
||||
# This broke the existing naming convention.
|
||||
# This will need to be communicated as a change to the community!
|
||||
#
|
||||
# N.B. This flag is only used for testing, hence it's hidden from help text.
|
||||
single_threaded = click.option(
|
||||
single_threaded = _create_option_and_track_env_var(
|
||||
"--single-threaded/--no-single-threaded",
|
||||
envvar="DBT_SINGLE_THREADED",
|
||||
default=False,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
skip_profile_setup = click.option(
|
||||
show_all_deprecations = _create_option_and_track_env_var(
|
||||
"--show-all-deprecations/--no-show-all-deprecations",
|
||||
envvar=None,
|
||||
help="By default, each type of a deprecation warning is only shown once. Use this flag to show all deprecation warning instances.",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
)
|
||||
|
||||
skip_profile_setup = _create_option_and_track_env_var(
|
||||
"--skip-profile-setup",
|
||||
"-s",
|
||||
envvar=None,
|
||||
@@ -550,14 +629,7 @@ skip_profile_setup = click.option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
empty_catalog = click.option(
|
||||
"--empty-catalog",
|
||||
help="If specified, generate empty catalog.json file during the `dbt docs generate` command.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
source = click.option(
|
||||
source = _create_option_and_track_env_var(
|
||||
"--source",
|
||||
envvar=None,
|
||||
help="Source to download page from, must be one of hub, git, or local. Defaults to hub.",
|
||||
@@ -565,14 +637,7 @@ source = click.option(
|
||||
default="hub",
|
||||
)
|
||||
|
||||
static = click.option(
|
||||
"--static",
|
||||
help="Generate an additional static_index.html with manifest and catalog built-in.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
state = click.option(
|
||||
state = _create_option_and_track_env_var(
|
||||
"--state",
|
||||
envvar="DBT_STATE",
|
||||
help="Unless overridden, use this state directory for both state comparison and deferral.",
|
||||
@@ -585,76 +650,42 @@ state = click.option(
|
||||
),
|
||||
)
|
||||
|
||||
defer_state = click.option(
|
||||
"--defer-state",
|
||||
envvar="DBT_DEFER_STATE",
|
||||
help="Override the state directory for deferral only.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=False,
|
||||
path_type=Path,
|
||||
),
|
||||
static = _create_option_and_track_env_var(
|
||||
"--static",
|
||||
help="Generate an additional static_index.html with manifest and catalog built-in.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
deprecated_state = click.option(
|
||||
"--deprecated-state",
|
||||
envvar="DBT_ARTIFACT_STATE_PATH",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
hidden=True,
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
static_parser = click.option(
|
||||
static_parser = _create_option_and_track_env_var(
|
||||
"--static-parser/--no-static-parser",
|
||||
envvar="DBT_STATIC_PARSER",
|
||||
help="Use the static parser.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
store_failures = click.option(
|
||||
store_failures = _create_option_and_track_env_var(
|
||||
"--store-failures",
|
||||
envvar="DBT_STORE_FAILURES",
|
||||
help="Store test results (failing rows) in the database",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
target = click.option(
|
||||
target = _create_option_and_track_env_var(
|
||||
"--target",
|
||||
"-t",
|
||||
envvar="DBT_TARGET",
|
||||
help="Which target to load for the given profile",
|
||||
)
|
||||
|
||||
target_path = click.option(
|
||||
target_path = _create_option_and_track_env_var(
|
||||
"--target-path",
|
||||
envvar="DBT_TARGET_PATH",
|
||||
help="Configure the 'target-path'. Only applies this setting for the current run. Overrides the 'DBT_TARGET_PATH' if it is set.",
|
||||
type=click.Path(),
|
||||
)
|
||||
|
||||
upgrade = click.option(
|
||||
"--upgrade",
|
||||
envvar=None,
|
||||
help="Upgrade packages to the latest version.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
debug_connection = click.option(
|
||||
"--connection",
|
||||
envvar=None,
|
||||
help="Test the connection to the target database independent of dependency checks.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
threads = click.option(
|
||||
threads = _create_option_and_track_env_var(
|
||||
"--threads",
|
||||
envvar=None,
|
||||
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
||||
@@ -662,13 +693,41 @@ threads = click.option(
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
use_experimental_parser = click.option(
|
||||
upgrade = _create_option_and_track_env_var(
|
||||
"--upgrade",
|
||||
envvar=None,
|
||||
help="Upgrade packages to the latest version.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
use_colors = _create_option_and_track_env_var(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Specify whether log output is colorized in the console and the log file. Use --use-colors-file/--no-use-colors-file to colorize the log file differently than the console.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_colors_file = _create_option_and_track_env_var(
|
||||
"--use-colors-file/--no-use-colors-file",
|
||||
envvar="DBT_USE_COLORS_FILE",
|
||||
help="Specify whether log file output is colorized by overriding the default value and the general --use-colors/--no-use-colors setting.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_experimental_parser = _create_option_and_track_env_var(
|
||||
"--use-experimental-parser/--no-use-experimental-parser",
|
||||
envvar="DBT_USE_EXPERIMENTAL_PARSER",
|
||||
help="Enable experimental parsing features.",
|
||||
)
|
||||
|
||||
vars = click.option(
|
||||
use_fast_test_edges = _create_option_and_track_env_var(
|
||||
"--use-fast-test-edges/--no-use-fast-test-edges",
|
||||
envvar="DBT_USE_FAST_TEST_EDGES",
|
||||
default=False,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
vars = _create_option_and_track_env_var(
|
||||
"--vars",
|
||||
envvar=None,
|
||||
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||
@@ -686,7 +745,7 @@ def _version_callback(ctx, _param, value):
|
||||
ctx.exit()
|
||||
|
||||
|
||||
version = click.option(
|
||||
version = _create_option_and_track_env_var(
|
||||
"--version",
|
||||
"-V",
|
||||
"-v",
|
||||
@@ -698,14 +757,14 @@ version = click.option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
version_check = click.option(
|
||||
version_check = _create_option_and_track_env_var(
|
||||
"--version-check/--no-version-check",
|
||||
envvar="DBT_VERSION_CHECK",
|
||||
help="If set, ensure the installed dbt version matches the require-dbt-version specified in the dbt_project.yml file (if any). Otherwise, allow them to differ.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
warn_error = click.option(
|
||||
warn_error = _create_option_and_track_env_var(
|
||||
"--warn-error",
|
||||
envvar="DBT_WARN_ERROR",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
@@ -713,25 +772,25 @@ warn_error = click.option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
warn_error_options = click.option(
|
||||
warn_error_options = _create_option_and_track_env_var(
|
||||
"--warn-error-options",
|
||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||
default="{}",
|
||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||
help="""If dbt would normally warn, instead raise an exception based on error/warn configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'error' or 'warn'. eg. '{"error": "all", "warn": ["NoNodesForSelectionCriteria"]}'""",
|
||||
type=WarnErrorOptionsType(),
|
||||
)
|
||||
|
||||
write_json = click.option(
|
||||
write_json = _create_option_and_track_env_var(
|
||||
"--write-json/--no-write-json",
|
||||
envvar="DBT_WRITE_JSON",
|
||||
help="Whether or not to write the manifest.json and run_results.json files to the target directory",
|
||||
default=True,
|
||||
)
|
||||
|
||||
show_resource_report = click.option(
|
||||
"--show-resource-report/--no-show-resource-report",
|
||||
upload_artifacts = _create_option_and_track_env_var(
|
||||
"--upload-to-artifacts-ingest-api/--no-upload-to-artifacts-ingest-api",
|
||||
envvar="DBT_UPLOAD_TO_ARTIFACTS_INGEST_API",
|
||||
help="Whether or not to upload the artifacts to the dbt Cloud API",
|
||||
default=False,
|
||||
envvar="DBT_SHOW_RESOURCE_REPORT",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
import time
|
||||
import traceback
|
||||
from functools import update_wrapper
|
||||
from typing import Optional
|
||||
from typing import Dict, Optional
|
||||
|
||||
from click import Context
|
||||
|
||||
@@ -12,11 +12,15 @@ from dbt.adapters.factory import adapter_management, get_adapter, register_adapt
|
||||
from dbt.cli.exceptions import ExceptionExit, ResultExit
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.config.catalogs import get_active_write_integration, load_catalogs
|
||||
from dbt.config.runtime import UnsetProfile, load_profile, load_project
|
||||
from dbt.context.providers import generate_runtime_macro_context
|
||||
from dbt.context.query_header import generate_query_header_context
|
||||
from dbt.deprecations import show_deprecations_summary
|
||||
from dbt.env_vars import KNOWN_ENGINE_ENV_VARS, validate_engine_env_vars
|
||||
from dbt.events.logging import setup_event_logger
|
||||
from dbt.events.types import (
|
||||
ArtifactUploadError,
|
||||
CommandCompleted,
|
||||
MainEncounteredError,
|
||||
MainReportArgs,
|
||||
@@ -26,17 +30,19 @@ from dbt.events.types import (
|
||||
ResourceReport,
|
||||
)
|
||||
from dbt.exceptions import DbtProjectError, FailFastError
|
||||
from dbt.flags import get_flag_dict, set_flags
|
||||
from dbt.flags import get_flag_dict, get_flags, set_flags
|
||||
from dbt.mp_context import get_mp_context
|
||||
from dbt.parser.manifest import parse_manifest
|
||||
from dbt.plugins import set_up_plugin_manager
|
||||
from dbt.profiler import profiler
|
||||
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||
from dbt.utils import try_get_max_rss_kb
|
||||
from dbt.utils.artifact_upload import upload_artifacts
|
||||
from dbt.version import installed as installed_version
|
||||
from dbt_common.clients.system import get_env
|
||||
from dbt_common.context import get_invocation_context, set_invocation_context
|
||||
from dbt_common.events.base_types import EventLevel
|
||||
from dbt_common.events.event_manager_client import get_event_manager
|
||||
from dbt_common.events.functions import LOG_VERSION, fire_event
|
||||
from dbt_common.events.helpers import get_json_string_utcnow
|
||||
from dbt_common.exceptions import DbtBaseException as DbtException
|
||||
@@ -51,6 +57,17 @@ from dbt_common.record import (
|
||||
from dbt_common.utils import cast_dict_to_dict_of_strings
|
||||
|
||||
|
||||
def _cross_propagate_engine_env_vars(env_dict: Dict[str, str]) -> None:
|
||||
for env_var in KNOWN_ENGINE_ENV_VARS:
|
||||
if env_var.old_name is not None:
|
||||
# If the old name is in the env dict, and not the new name, set the new name based on the old name
|
||||
if env_var.old_name in env_dict and env_var.name not in env_dict:
|
||||
env_dict[env_var.name] = env_dict[env_var.old_name]
|
||||
# If the new name is in the env dict, override the old name with it
|
||||
elif env_var.name in env_dict:
|
||||
env_dict[env_var.old_name] = env_dict[env_var.name]
|
||||
|
||||
|
||||
def preflight(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
@@ -64,12 +81,17 @@ def preflight(func):
|
||||
|
||||
# Must be set after record/replay is set up so that the env can be
|
||||
# recorded or replayed if needed.
|
||||
get_invocation_context()._env = get_env()
|
||||
env_dict = get_env()
|
||||
_cross_propagate_engine_env_vars(env_dict)
|
||||
get_invocation_context()._env = env_dict
|
||||
|
||||
# Flags
|
||||
flags = Flags(ctx)
|
||||
ctx.obj["flags"] = flags
|
||||
set_flags(flags)
|
||||
get_event_manager().require_warn_or_error_handling = (
|
||||
flags.require_all_warnings_handled_by_warn_error
|
||||
)
|
||||
|
||||
# Reset invocation_id for each 'invocation' of a dbt command (can happen multiple times in a single process)
|
||||
reset_invocation_id()
|
||||
@@ -88,7 +110,7 @@ def preflight(func):
|
||||
fire_event(MainReportArgs(args=flags_dict_str))
|
||||
|
||||
# Deprecation warnings
|
||||
flags.fire_deprecations()
|
||||
flags.fire_deprecations(ctx)
|
||||
|
||||
if active_user is not None: # mypy appeasement, always true
|
||||
fire_event(MainTrackingUserState(user_state=active_user.state()))
|
||||
@@ -100,6 +122,9 @@ def preflight(func):
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
# Validate engine env var restricted name space
|
||||
validate_engine_env_vars()
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
@@ -164,6 +189,17 @@ def postflight(func):
|
||||
finally:
|
||||
# Fire ResourceReport, but only on systems which support the resource
|
||||
# module. (Skip it on Windows).
|
||||
try:
|
||||
if get_flags().upload_to_artifacts_ingest_api:
|
||||
upload_artifacts(
|
||||
get_flags().project_dir, get_flags().target_path, ctx.command.name
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
fire_event(ArtifactUploadError(msg=str(e)))
|
||||
|
||||
show_deprecations_summary()
|
||||
|
||||
if importlib.util.find_spec("resource") is not None:
|
||||
import resource
|
||||
|
||||
@@ -231,6 +267,7 @@ def profile(func):
|
||||
threads = getattr(flags, "THREADS", None)
|
||||
profile = load_profile(flags.PROJECT_DIR, flags.VARS, flags.PROFILE, flags.TARGET, threads)
|
||||
ctx.obj["profile"] = profile
|
||||
get_invocation_context().uses_adapter(profile.credentials.type)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
@@ -248,8 +285,10 @@ def project(func):
|
||||
raise DbtProjectError("profile required for project")
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
# TODO deprecations warnings fired from loading the project will lack
|
||||
# the project_id in the snowplow event.
|
||||
project = load_project(
|
||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS, validate=True
|
||||
)
|
||||
ctx.obj["project"] = project
|
||||
|
||||
@@ -313,6 +352,29 @@ def runtime_config(func):
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def catalogs(func):
|
||||
"""A decorator used by click command functions for loading catalogs"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["flags", "profile", "project"]
|
||||
reqs = [ctx.obj.get(req_str) for req_str in req_strs]
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile and flags required to load catalogs")
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
ctx_project = ctx.obj["project"]
|
||||
|
||||
_catalogs = load_catalogs(flags.PROJECT_DIR, ctx_project.project_name, flags.VARS)
|
||||
ctx.obj["catalogs"] = _catalogs
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def manifest(*args0, write=True, write_perf_info=False):
|
||||
"""A decorator used by click command functions for generating a manifest
|
||||
given a profile, project, and runtime config. This also registers the adapter
|
||||
@@ -346,11 +408,19 @@ def setup_manifest(ctx: Context, write: bool = True, write_perf_info: bool = Fal
|
||||
|
||||
runtime_config = ctx.obj["runtime_config"]
|
||||
|
||||
catalogs = ctx.obj["catalogs"] if "catalogs" in ctx.obj else []
|
||||
active_integrations = [get_active_write_integration(catalog) for catalog in catalogs]
|
||||
|
||||
# if a manifest has already been set on the context, don't overwrite it
|
||||
if ctx.obj.get("manifest") is None:
|
||||
ctx.obj["manifest"] = parse_manifest(
|
||||
runtime_config, write_perf_info, write, ctx.obj["flags"].write_json
|
||||
runtime_config,
|
||||
write_perf_info,
|
||||
write,
|
||||
ctx.obj["flags"].write_json,
|
||||
active_integrations,
|
||||
)
|
||||
adapter = get_adapter(runtime_config)
|
||||
else:
|
||||
register_adapter(runtime_config, get_mp_context())
|
||||
adapter = get_adapter(runtime_config)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
#### How are materializations defined
|
||||
|
||||
Model materializations are kept in `core/dbt/include/global_project/macros/materializations/models/`. Materializations are defined using syntax that isn't part of the Jinja standard library. These tags are referenced internally, and materializations can be overridden in user projects when users have specific needs.
|
||||
Model materializations are defined by adapters. See the [dbt-adapters](https://github.com/dbt-labs/dbt-adapters) project for [base implementations](https://github.com/dbt-labs/dbt-adapters/tree/main/dbt-adapters/src/dbt/include/global_project/macros/materializations/models). Materializations are defined using syntax that isn't part of the Jinja standard library. These tags are referenced internally, and materializations can be overridden in user projects when users have specific needs.
|
||||
|
||||
```
|
||||
-- Pseudocode for arguments
|
||||
|
||||
83
core/dbt/clients/checked_load.py
Normal file
83
core/dbt/clients/checked_load.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import collections
|
||||
import dataclasses
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import yaml
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
|
||||
# the C version is faster, but it doesn't always exist
|
||||
try:
|
||||
from yaml import CSafeLoader as SafeLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader # type: ignore # noqa: F401
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class YamlCheckFailure:
|
||||
failure_type: str
|
||||
message: str
|
||||
|
||||
|
||||
def checked_load(contents) -> Tuple[Optional[Dict[str, Any]], List[YamlCheckFailure]]:
|
||||
# A hacky (but sadly justified) method for modifying a bit of PyYAML. We create
|
||||
# a new local subclass of SafeLoader, since we need to associate state with
|
||||
# the static class, but static classes do not have non-static state. This allows
|
||||
# us to be sure we have exclusive access to the class.
|
||||
class CheckedLoader(SafeLoader):
|
||||
check_failures: List[YamlCheckFailure] = []
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if not isinstance(node, yaml.MappingNode):
|
||||
raise yaml.constructor.ConstructorError(
|
||||
None, None, "expected a mapping node, but found %s" % node.id, node.start_mark
|
||||
)
|
||||
is_override = (
|
||||
len(node.value) > 0
|
||||
and len(node.value[0]) > 0
|
||||
and getattr(node.value[0][0], "value") == "<<"
|
||||
)
|
||||
self.flatten_mapping(node)
|
||||
mapping = {}
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
if not isinstance(key, collections.abc.Hashable):
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping",
|
||||
node.start_mark,
|
||||
"found unhashable key",
|
||||
key_node.start_mark,
|
||||
)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
|
||||
if not is_override and key in mapping:
|
||||
start_mark = str(key_node.start_mark)
|
||||
if start_mark.startswith(" in"): # this means it was at the top level
|
||||
message = f"Duplicate key '{key}' {start_mark.lstrip()}"
|
||||
else:
|
||||
message = f"Duplicate key '{key}' at {key_node.start_mark}"
|
||||
|
||||
self.check_failures.append(YamlCheckFailure("duplicate_key", message))
|
||||
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
||||
CheckedLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, CheckedLoader.construct_mapping
|
||||
)
|
||||
|
||||
dct = load_yaml_text(contents, loader=CheckedLoader)
|
||||
check_failures = CheckedLoader.check_failures
|
||||
|
||||
return (dct, check_failures)
|
||||
|
||||
|
||||
def issue_deprecation_warnings_for_failures(failures: List[YamlCheckFailure], file: str):
|
||||
for failure in failures:
|
||||
if failure.failure_type == "duplicate_key":
|
||||
deprecations.warn(
|
||||
"duplicate-yaml-keys-deprecation",
|
||||
duplicate_description=failure.message,
|
||||
file=file,
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user