mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 01:41:28 +00:00
Compare commits
16 Commits
enable-pos
...
v1.6.0rc2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b900f71db | ||
|
|
02e08c7539 | ||
|
|
b4751c8fb4 | ||
|
|
ec50a94e61 | ||
|
|
2aa3c9e095 | ||
|
|
2b00544654 | ||
|
|
ae5df8171e | ||
|
|
57660c9f5d | ||
|
|
1e4167c480 | ||
|
|
f5c8691da3 | ||
|
|
b9f2c1d154 | ||
|
|
fd05fb7ee8 | ||
|
|
f9c8b7c2f5 | ||
|
|
a013a98843 | ||
|
|
cf1dfaa6e6 | ||
|
|
61df98d28b |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.6.0b8
|
||||
current_version = 1.6.0rc2
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
46
.changes/1.6.0-rc1.md
Normal file
46
.changes/1.6.0-rc1.md
Normal file
@@ -0,0 +1,46 @@
|
||||
## dbt-core 1.6.0-rc1 - July 17, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Add validate_sql method to BaseAdapter with implementation for SQLAdapter ([#7839](https://github.com/dbt-labs/dbt-core/issues/7839))
|
||||
- Support validation of metrics and semantic models. ([#7969](https://github.com/dbt-labs/dbt-core/issues/7969))
|
||||
- Begin populating `depends_on` of metric nodes ([#7854](https://github.com/dbt-labs/dbt-core/issues/7854))
|
||||
- Enumerate supported materialized view features for dbt-postgres ([#6911](https://github.com/dbt-labs/dbt-core/issues/6911))
|
||||
|
||||
### Fixes
|
||||
|
||||
- add negative part_number arg for split part macro ([#7915](https://github.com/dbt-labs/dbt-core/issues/7915))
|
||||
- Fix accidental propagation of log messages to root logger. ([#7872](https://github.com/dbt-labs/dbt-core/issues/7872))
|
||||
- Fixed an issue which blocked debug logging to stdout with --log-level debug, unless --debug was also used. ([#7872](https://github.com/dbt-labs/dbt-core/issues/7872))
|
||||
- Fix query comment tests ([#7845](https://github.com/dbt-labs/dbt-core/issues/7845))
|
||||
- Inline query emit proper error message ([#7940](https://github.com/dbt-labs/dbt-core/issues/7940))
|
||||
- Nicer error message if model with enforced contract is missing 'columns' specification ([#7943](https://github.com/dbt-labs/dbt-core/issues/7943))
|
||||
- include 'v' in ModelNodeArgs.unique_id ([#8039](https://github.com/dbt-labs/dbt-core/issues/8039))
|
||||
- Fix fail-fast behavior (including retry) ([#7785](https://github.com/dbt-labs/dbt-core/issues/7785))
|
||||
- Remove `create_metric` as a `SemanticModel.Measure` property because it currently doesn't do anything ([#8064](https://github.com/dbt-labs/dbt-core/issues/8064))
|
||||
- Remove `VOLUME` declaration within Dockerfile ([#4784](https://github.com/dbt-labs/dbt-core/issues/4784))
|
||||
- Fix Dockerfile.test ([#7352](https://github.com/dbt-labs/dbt-core/issues/7352))
|
||||
- Detect breaking contract changes to versioned models ([#8030](https://github.com/dbt-labs/dbt-core/issues/8030))
|
||||
- Update DryRunMethod test classes ValidateSqlMethod naming ([#7839](https://github.com/dbt-labs/dbt-core/issues/7839))
|
||||
- Fix typo in `NonAdditiveDimension` implementation ([#8088](https://github.com/dbt-labs/dbt-core/issues/8088))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix broken links in `CONTRIBUTING.md`. ([dbt-docs/#8018](https://github.com/dbt-labs/dbt-docs/issues/8018))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Add option to specify partial parse file ([#7911](https://github.com/dbt-labs/dbt-core/issues/7911))
|
||||
- Add semantic_models to resource counts ([#8077](https://github.com/dbt-labs/dbt-core/issues/8077))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Pin click>=8.1.1,<8.1.4 ([#8050](https://github.com/dbt-labs/dbt-core/pull/8050))
|
||||
- Bump `dbt-semantic-interfaces` to `~=0.1.0rc1` ([#8082](https://github.com/dbt-labs/dbt-core/pull/8082))
|
||||
|
||||
### Contributors
|
||||
- [@alexrosenfeld10](https://github.com/alexrosenfeld10) ([#4784](https://github.com/dbt-labs/dbt-core/issues/4784))
|
||||
- [@damian3031](https://github.com/damian3031) ([#7845](https://github.com/dbt-labs/dbt-core/issues/7845))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#7915](https://github.com/dbt-labs/dbt-core/issues/7915))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8018](https://github.com/dbt-labs/dbt-core/issues/8018))
|
||||
- [@tlento](https://github.com/tlento) ([#7839](https://github.com/dbt-labs/dbt-core/issues/7839), [#7839](https://github.com/dbt-labs/dbt-core/issues/7839))
|
||||
28
.changes/1.6.0-rc2.md
Normal file
28
.changes/1.6.0-rc2.md
Normal file
@@ -0,0 +1,28 @@
|
||||
## dbt-core 1.6.0-rc2 - July 28, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
|
||||
### Docs
|
||||
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
6
.changes/1.6.0/Dependencies-20230727-145703.yaml
Normal file
6
.changes/1.6.0/Dependencies-20230727-145703.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Update pin for click<9
|
||||
time: 2023-07-27T14:57:03.180458-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8232"
|
||||
6
.changes/1.6.0/Dependencies-20230727-145726.yaml
Normal file
6
.changes/1.6.0/Dependencies-20230727-145726.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Add upper bound to sqlparse pin of <0.5
|
||||
time: 2023-07-27T14:57:26.40416-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8236"
|
||||
6
.changes/1.6.0/Dependencies-20230728-135227.yaml
Normal file
6
.changes/1.6.0/Dependencies-20230728-135227.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Support dbt-semantic-interfaces 0.2.0
|
||||
time: 2023-07-28T13:52:27.207241-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "8250"
|
||||
6
.changes/1.6.0/Docs-20230718-192422.yaml
Normal file
6
.changes/1.6.0/Docs-20230718-192422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
6
.changes/1.6.0/Fixes-20230717-160652.yaml
Normal file
6
.changes/1.6.0/Fixes-20230717-160652.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Copy target_schema from config into snapshot node
|
||||
time: 2023-07-17T16:06:52.957724-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6745"
|
||||
6
.changes/1.6.0/Fixes-20230718-125518.yaml
Normal file
6
.changes/1.6.0/Fixes-20230718-125518.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
6
.changes/1.6.0/Fixes-20230720-161513.yaml
Normal file
6
.changes/1.6.0/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/1.6.0/Fixes-20230720-170112.yaml
Normal file
6
.changes/1.6.0/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
6
.changes/1.6.0/Fixes-20230726-104448.yaml
Normal file
6
.changes/1.6.0/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/1.6.0/Fixes-20230727-125830.yaml
Normal file
6
.changes/1.6.0/Fixes-20230727-125830.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||
time: 2023-07-27T12:58:30.673803-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8230"
|
||||
7
.changes/1.6.0/Fixes-20230728-115620.yaml
Normal file
7
.changes/1.6.0/Fixes-20230728-115620.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||
etc)
|
||||
time: 2023-07-28T11:56:20.863718-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8166"
|
||||
6
.changes/1.6.0/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/1.6.0/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
78
CHANGELOG.md
78
CHANGELOG.md
@@ -5,6 +5,83 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.6.0-rc2 - July 28, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
|
||||
### Docs
|
||||
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
|
||||
|
||||
## dbt-core 1.6.0-rc1 - July 17, 2023
|
||||
|
||||
### Features
|
||||
|
||||
- Add validate_sql method to BaseAdapter with implementation for SQLAdapter ([#7839](https://github.com/dbt-labs/dbt-core/issues/7839))
|
||||
- Support validation of metrics and semantic models. ([#7969](https://github.com/dbt-labs/dbt-core/issues/7969))
|
||||
- Begin populating `depends_on` of metric nodes ([#7854](https://github.com/dbt-labs/dbt-core/issues/7854))
|
||||
- Enumerate supported materialized view features for dbt-postgres ([#6911](https://github.com/dbt-labs/dbt-core/issues/6911))
|
||||
|
||||
### Fixes
|
||||
|
||||
- add negative part_number arg for split part macro ([#7915](https://github.com/dbt-labs/dbt-core/issues/7915))
|
||||
- Fix accidental propagation of log messages to root logger. ([#7872](https://github.com/dbt-labs/dbt-core/issues/7872))
|
||||
- Fixed an issue which blocked debug logging to stdout with --log-level debug, unless --debug was also used. ([#7872](https://github.com/dbt-labs/dbt-core/issues/7872))
|
||||
- Fix query comment tests ([#7845](https://github.com/dbt-labs/dbt-core/issues/7845))
|
||||
- Inline query emit proper error message ([#7940](https://github.com/dbt-labs/dbt-core/issues/7940))
|
||||
- Nicer error message if model with enforced contract is missing 'columns' specification ([#7943](https://github.com/dbt-labs/dbt-core/issues/7943))
|
||||
- include 'v' in ModelNodeArgs.unique_id ([#8039](https://github.com/dbt-labs/dbt-core/issues/8039))
|
||||
- Fix fail-fast behavior (including retry) ([#7785](https://github.com/dbt-labs/dbt-core/issues/7785))
|
||||
- Remove `create_metric` as a `SemanticModel.Measure` property because it currently doesn't do anything ([#8064](https://github.com/dbt-labs/dbt-core/issues/8064))
|
||||
- Remove `VOLUME` declaration within Dockerfile ([#4784](https://github.com/dbt-labs/dbt-core/issues/4784))
|
||||
- Fix Dockerfile.test ([#7352](https://github.com/dbt-labs/dbt-core/issues/7352))
|
||||
- Detect breaking contract changes to versioned models ([#8030](https://github.com/dbt-labs/dbt-core/issues/8030))
|
||||
- Update DryRunMethod test classes ValidateSqlMethod naming ([#7839](https://github.com/dbt-labs/dbt-core/issues/7839))
|
||||
- Fix typo in `NonAdditiveDimension` implementation ([#8088](https://github.com/dbt-labs/dbt-core/issues/8088))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix broken links in `CONTRIBUTING.md`. ([dbt-docs/#8018](https://github.com/dbt-labs/dbt-docs/issues/8018))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Add option to specify partial parse file ([#7911](https://github.com/dbt-labs/dbt-core/issues/7911))
|
||||
- Add semantic_models to resource counts ([#8077](https://github.com/dbt-labs/dbt-core/issues/8077))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Pin click>=8.1.1,<8.1.4 ([#8050](https://github.com/dbt-labs/dbt-core/pull/8050))
|
||||
- Bump `dbt-semantic-interfaces` to `~=0.1.0rc1` ([#8082](https://github.com/dbt-labs/dbt-core/pull/8082))
|
||||
|
||||
### Contributors
|
||||
- [@alexrosenfeld10](https://github.com/alexrosenfeld10) ([#4784](https://github.com/dbt-labs/dbt-core/issues/4784))
|
||||
- [@damian3031](https://github.com/damian3031) ([#7845](https://github.com/dbt-labs/dbt-core/issues/7845))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#7915](https://github.com/dbt-labs/dbt-core/issues/7915))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8018](https://github.com/dbt-labs/dbt-core/issues/8018))
|
||||
- [@tlento](https://github.com/tlento) ([#7839](https://github.com/dbt-labs/dbt-core/issues/7839), [#7839](https://github.com/dbt-labs/dbt-core/issues/7839))
|
||||
|
||||
## dbt-core 1.6.0-b8 - June 30, 2023
|
||||
|
||||
### Features
|
||||
@@ -36,7 +113,6 @@
|
||||
- [@trouze](https://github.com/trouze) ([#7564](https://github.com/dbt-labs/dbt-core/issues/7564))
|
||||
- [@willbryant](https://github.com/willbryant) ([#7350](https://github.com/dbt-labs/dbt-core/issues/7350))
|
||||
|
||||
|
||||
## dbt-core 1.6.0-b7 - June 28, 2023
|
||||
|
||||
### Features
|
||||
|
||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
|
||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
@@ -380,9 +389,9 @@ inline = click.option(
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||
|
||||
selector = click.option(
|
||||
"--selector",
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -378,16 +378,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +523,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -619,6 +619,8 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super().validate(data)
|
||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||
# will fail when parsing the snapshot node.
|
||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||
raise ValidationError(
|
||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||
@@ -649,6 +651,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||
|
||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||
def finalize_and_validate(self):
|
||||
data = self.to_dict(omit_none=True)
|
||||
self.validate(data)
|
||||
|
||||
@@ -50,6 +50,7 @@ from dbt.flags import get_flags
|
||||
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||
from dbt_semantic_interfaces.references import (
|
||||
EntityReference,
|
||||
MeasureReference,
|
||||
LinkableElementReference,
|
||||
SemanticModelReference,
|
||||
@@ -1498,6 +1499,7 @@ class SemanticModel(GraphNode):
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
@property
|
||||
def entity_references(self) -> List[LinkableElementReference]:
|
||||
@@ -1568,17 +1570,26 @@ class SemanticModel(GraphNode):
|
||||
measure is not None
|
||||
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
|
||||
|
||||
if self.defaults is not None:
|
||||
default_agg_time_dimesion = self.defaults.agg_time_dimension
|
||||
default_agg_time_dimension = (
|
||||
self.defaults.agg_time_dimension if self.defaults is not None else None
|
||||
)
|
||||
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimesion
|
||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension
|
||||
assert agg_time_dimension_name is not None, (
|
||||
f"Aggregation time dimension for measure {measure.name} is not set! This should either be set directly on "
|
||||
f"the measure specification in the model, or else defaulted to the primary time dimension in the data "
|
||||
f"source containing the measure."
|
||||
f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! "
|
||||
"To fix this either specify a default `agg_time_dimension` for the semantic model or define an "
|
||||
"`agg_time_dimension` on the measure directly."
|
||||
)
|
||||
return TimeDimensionReference(element_name=agg_time_dimension_name)
|
||||
|
||||
@property
|
||||
def primary_entity_reference(self) -> Optional[EntityReference]:
|
||||
return (
|
||||
EntityReference(element_name=self.primary_entity)
|
||||
if self.primary_entity is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
|
||||
@@ -728,6 +728,7 @@ class UnparsedSemanticModel(dbtClassMixin):
|
||||
entities: List[UnparsedEntity] = field(default_factory=list)
|
||||
measures: List[UnparsedMeasure] = field(default_factory=list)
|
||||
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
||||
primary_entity: Optional[str] = None
|
||||
|
||||
|
||||
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
||||
|
||||
@@ -13,7 +13,7 @@ from uuid import uuid4
|
||||
from dbt.events.format import timestamp_to_datetime_string
|
||||
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg
|
||||
|
||||
import dbt.utils
|
||||
|
||||
# A Filter is a function which takes a BaseEvent and returns True if the event
|
||||
# should be logged, False otherwise.
|
||||
@@ -80,6 +80,7 @@ class LoggerConfig:
|
||||
use_colors: bool = False
|
||||
output_stream: Optional[TextIO] = None
|
||||
output_file_name: Optional[str] = None
|
||||
output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb
|
||||
logger: Optional[Any] = None
|
||||
|
||||
|
||||
@@ -100,7 +101,7 @@ class _Logger:
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
maxBytes=config.output_file_max_bytes, # type: ignore
|
||||
backupCount=5,
|
||||
)
|
||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||
@@ -175,7 +176,7 @@ class _JsonLogger(_Logger):
|
||||
from dbt.events.functions import msg_to_dict
|
||||
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
line = self.scrubber(raw_log_line) # type: ignore
|
||||
return line
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import Callable, Dict, List, Optional, TextIO
|
||||
import uuid
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
|
||||
import dbt.utils
|
||||
|
||||
LOG_VERSION = 3
|
||||
metadata_vars: Optional[Dict[str, str]] = None
|
||||
@@ -67,7 +68,11 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
||||
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logfile_config(
|
||||
log_file, flags.USE_COLORS_FILE, log_file_format, log_level_file
|
||||
log_file,
|
||||
flags.USE_COLORS_FILE,
|
||||
log_file_format,
|
||||
log_level_file,
|
||||
flags.LOG_FILE_MAX_BYTES,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -116,7 +121,11 @@ def _stdout_filter(
|
||||
|
||||
|
||||
def _get_logfile_config(
|
||||
log_path: str, use_colors: bool, line_format: LineFormat, level: EventLevel
|
||||
log_path: str,
|
||||
use_colors: bool,
|
||||
line_format: LineFormat,
|
||||
level: EventLevel,
|
||||
log_file_max_bytes: int,
|
||||
) -> LoggerConfig:
|
||||
return LoggerConfig(
|
||||
name="file_log",
|
||||
@@ -126,6 +135,7 @@ def _get_logfile_config(
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
||||
output_file_name=log_path,
|
||||
output_file_max_bytes=log_file_max_bytes,
|
||||
)
|
||||
|
||||
|
||||
@@ -200,7 +210,7 @@ def stop_capture_stdout_logs():
|
||||
# the message may contain secrets which must be scrubbed at the usage site.
|
||||
def msg_to_json(msg: EventMsg) -> str:
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
return raw_log_line
|
||||
|
||||
|
||||
|
||||
@@ -1153,10 +1153,11 @@ class DeprecatedModel(WarnLevel):
|
||||
|
||||
def message(self) -> str:
|
||||
version = ".v" + self.model_version if self.model_version else ""
|
||||
return (
|
||||
msg = (
|
||||
f"Model {self.model_name}{version} has passed its deprecation date of {self.deprecation_date}. "
|
||||
"This model should be disabled or removed."
|
||||
)
|
||||
return warning_tag(msg)
|
||||
|
||||
|
||||
class UpcomingReferenceDeprecation(WarnLevel):
|
||||
@@ -1178,7 +1179,7 @@ class UpcomingReferenceDeprecation(WarnLevel):
|
||||
)
|
||||
msg = msg + coda
|
||||
|
||||
return msg
|
||||
return warning_tag(msg)
|
||||
|
||||
|
||||
class DeprecatedReference(WarnLevel):
|
||||
@@ -1200,7 +1201,7 @@ class DeprecatedReference(WarnLevel):
|
||||
)
|
||||
msg = msg + coda
|
||||
|
||||
return msg
|
||||
return warning_tag(msg)
|
||||
|
||||
|
||||
class UnsupportedConstraintMaterialization(WarnLevel):
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -102,8 +102,7 @@ class RelationUpdate:
|
||||
self.package_updaters = package_updaters
|
||||
self.component = component
|
||||
|
||||
def __call__(self, parsed_node: Any, config_dict: Dict[str, Any]) -> None:
|
||||
override = config_dict.get(self.component)
|
||||
def __call__(self, parsed_node: Any, override: Optional[str]) -> None:
|
||||
if parsed_node.package_name in self.package_updaters:
|
||||
new_value = self.package_updaters[parsed_node.package_name](override, parsed_node)
|
||||
else:
|
||||
@@ -280,9 +279,19 @@ class ConfiguredParser(
|
||||
def update_parsed_node_relation_names(
|
||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
||||
) -> None:
|
||||
self._update_node_database(parsed_node, config_dict)
|
||||
self._update_node_schema(parsed_node, config_dict)
|
||||
self._update_node_alias(parsed_node, config_dict)
|
||||
|
||||
# These call the RelationUpdate callable to go through generate_name macros
|
||||
self._update_node_database(parsed_node, config_dict.get("database"))
|
||||
self._update_node_schema(parsed_node, config_dict.get("schema"))
|
||||
self._update_node_alias(parsed_node, config_dict.get("alias"))
|
||||
|
||||
# Snapshot nodes use special "target_database" and "target_schema" fields for some reason
|
||||
if parsed_node.resource_type == NodeType.Snapshot:
|
||||
if "target_database" in config_dict and config_dict["target_database"]:
|
||||
parsed_node.database = config_dict["target_database"]
|
||||
if "target_schema" in config_dict and config_dict["target_schema"]:
|
||||
parsed_node.schema = config_dict["target_schema"]
|
||||
|
||||
self._update_node_relation_name(parsed_node)
|
||||
|
||||
def update_parsed_node_config(
|
||||
@@ -349,7 +358,7 @@ class ConfiguredParser(
|
||||
# do this once before we parse the node database/schema/alias, so
|
||||
# parsed_node.config is what it would be if they did nothing
|
||||
self.update_parsed_node_config_dict(parsed_node, config_dict)
|
||||
# This updates the node database/schema/alias
|
||||
# This updates the node database/schema/alias/relation_name
|
||||
self.update_parsed_node_relation_names(parsed_node, config_dict)
|
||||
|
||||
# tests don't have hooks
|
||||
|
||||
@@ -81,7 +81,7 @@ class MacroParser(BaseParser[Macro]):
|
||||
name: str = macro.name.replace(MACRO_PREFIX, "")
|
||||
node = self.parse_macro(block, base_node, name)
|
||||
# get supported_languages for materialization macro
|
||||
if "materialization" in name:
|
||||
if block.block_type_name == "materialization":
|
||||
node.supported_languages = jinja.get_supported_languages(macro)
|
||||
yield node
|
||||
|
||||
|
||||
@@ -562,7 +562,7 @@ class ManifestLoader:
|
||||
node.deprecation_date
|
||||
and node.deprecation_date < datetime.datetime.now().astimezone()
|
||||
):
|
||||
fire_event(
|
||||
warn_or_error(
|
||||
DeprecatedModel(
|
||||
model_name=node.name,
|
||||
model_version=version_to_str(node.version),
|
||||
@@ -581,7 +581,7 @@ class ManifestLoader:
|
||||
else:
|
||||
event_cls = UpcomingReferenceDeprecation
|
||||
|
||||
fire_event(
|
||||
warn_or_error(
|
||||
event_cls(
|
||||
model_name=node.name,
|
||||
ref_model_package=resolved_ref.package_name,
|
||||
|
||||
@@ -532,6 +532,7 @@ class SemanticModelParser(YamlReader):
|
||||
measures=self._get_measures(unparsed.measures),
|
||||
dimensions=self._get_dimensions(unparsed.dimensions),
|
||||
defaults=unparsed.defaults,
|
||||
primary_entity=unparsed.primary_entity,
|
||||
)
|
||||
|
||||
ctx = generate_parse_semantic_models(
|
||||
|
||||
@@ -375,15 +375,17 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
)
|
||||
|
||||
print_run_result_error(failure.result)
|
||||
raise
|
||||
# ensure information about all nodes is propagated to run results when failing fast
|
||||
return self.node_results
|
||||
except KeyboardInterrupt:
|
||||
self._cancel_connections(pool)
|
||||
print_run_end_messages(self.node_results, keyboard_interrupt=True)
|
||||
raise
|
||||
finally:
|
||||
pool.close()
|
||||
pool.join()
|
||||
return self.node_results
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
return self.node_results
|
||||
|
||||
def _mark_dependent_errors(self, node_id, result, cause):
|
||||
if self.graph is None:
|
||||
|
||||
1
core/dbt/tests/fixtures/project.py
vendored
1
core/dbt/tests/fixtures/project.py
vendored
@@ -502,6 +502,7 @@ def project(
|
||||
DEBUG=False,
|
||||
LOG_CACHE_EVENTS=False,
|
||||
QUIET=False,
|
||||
LOG_FILE_MAX_BYTES=1000000,
|
||||
)
|
||||
setup_event_logger(log_flags)
|
||||
orig_cwd = os.getcwd()
|
||||
|
||||
@@ -16,9 +16,8 @@ import time
|
||||
from pathlib import PosixPath, WindowsPath
|
||||
|
||||
from contextlib import contextmanager
|
||||
from dbt.exceptions import ConnectionError, DuplicateAliasError
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import RetryExternalCall, RecordRetryException
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
from dbt import flags
|
||||
from enum import Enum
|
||||
from typing_extensions import Protocol
|
||||
@@ -40,6 +39,7 @@ from typing import (
|
||||
Sequence,
|
||||
)
|
||||
|
||||
import dbt.events.functions
|
||||
import dbt.exceptions
|
||||
|
||||
DECIMALS: Tuple[Type[Any], ...]
|
||||
@@ -337,15 +337,18 @@ class JSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, DECIMALS):
|
||||
return float(obj)
|
||||
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
||||
elif isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, jinja2.Undefined):
|
||||
elif isinstance(obj, jinja2.Undefined):
|
||||
return ""
|
||||
if hasattr(obj, "to_dict"):
|
||||
elif isinstance(obj, Exception):
|
||||
return repr(obj)
|
||||
elif hasattr(obj, "to_dict"):
|
||||
# if we have a to_dict we should try to serialize the result of
|
||||
# that!
|
||||
return obj.to_dict(omit_none=True)
|
||||
return super().default(obj)
|
||||
else:
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
class ForgivingJSONEncoder(JSONEncoder):
|
||||
@@ -369,7 +372,7 @@ class Translator:
|
||||
for key, value in kwargs.items():
|
||||
canonical_key = self.aliases.get(key, key)
|
||||
if canonical_key in result:
|
||||
raise DuplicateAliasError(kwargs, self.aliases, canonical_key)
|
||||
raise dbt.exceptions.DuplicateAliasError(kwargs, self.aliases, canonical_key)
|
||||
result[canonical_key] = self.translate_value(value)
|
||||
return result
|
||||
|
||||
@@ -389,9 +392,7 @@ class Translator:
|
||||
return self.translate_mapping(value)
|
||||
except RuntimeError as exc:
|
||||
if "maximum recursion depth exceeded" in str(exc):
|
||||
raise dbt.exceptions.RecursionError(
|
||||
"Cycle detected in a value passed to translate!"
|
||||
)
|
||||
raise RecursionError("Cycle detected in a value passed to translate!")
|
||||
raise
|
||||
|
||||
|
||||
@@ -603,12 +604,14 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0):
|
||||
ReadError,
|
||||
) as exc:
|
||||
if attempt <= max_attempts - 1:
|
||||
fire_event(RecordRetryException(exc=str(exc)))
|
||||
fire_event(RetryExternalCall(attempt=attempt, max=max_attempts))
|
||||
dbt.events.functions.fire_event(RecordRetryException(exc=str(exc)))
|
||||
dbt.events.functions.fire_event(RetryExternalCall(attempt=attempt, max=max_attempts))
|
||||
time.sleep(1)
|
||||
return _connection_exception_retry(fn, max_attempts, attempt + 1)
|
||||
else:
|
||||
raise ConnectionError("External connection exception occurred: " + str(exc))
|
||||
raise dbt.exceptions.ConnectionError(
|
||||
"External connection exception occurred: " + str(exc)
|
||||
)
|
||||
|
||||
|
||||
# This is used to serialize the args in the run_results and in the logs.
|
||||
@@ -652,6 +655,9 @@ def args_to_dict(args):
|
||||
# this was required for a test case
|
||||
if isinstance(var_args[key], PosixPath) or isinstance(var_args[key], WindowsPath):
|
||||
var_args[key] = str(var_args[key])
|
||||
if isinstance(var_args[key], WarnErrorOptions):
|
||||
var_args[key] = var_args[key].to_dict()
|
||||
|
||||
dict_args[key] = var_args[key]
|
||||
return dict_args
|
||||
|
||||
|
||||
@@ -232,5 +232,5 @@ def _get_adapter_plugin_names() -> Iterator[str]:
|
||||
yield plugin_name
|
||||
|
||||
|
||||
__version__ = "1.6.0b8"
|
||||
__version__ = "1.6.0rc2"
|
||||
installed = get_installed_version()
|
||||
|
||||
@@ -25,7 +25,7 @@ with open(os.path.join(this_directory, "README.md")) as f:
|
||||
|
||||
|
||||
package_name = "dbt-core"
|
||||
package_version = "1.6.0b8"
|
||||
package_version = "1.6.0rc2"
|
||||
description = """With dbt, data analysts and engineers can build analytics \
|
||||
the way engineers build applications."""
|
||||
|
||||
@@ -59,8 +59,7 @@ setup(
|
||||
# ----
|
||||
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
|
||||
# with major versions in each new minor version of dbt-core.
|
||||
# temporarily pinning click for mypy failures: https://github.com/pallets/click/issues/2558
|
||||
"click>=8.1.1,<8.1.4",
|
||||
"click<9",
|
||||
"networkx>=2.3,<4",
|
||||
# ----
|
||||
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
|
||||
@@ -69,16 +68,14 @@ setup(
|
||||
"pathspec>=0.9,<0.12",
|
||||
"isodate>=0.6,<0.7",
|
||||
# ----
|
||||
# There was a pin to below 0.4.4 for a while due to a bug in Ubuntu/sqlparse 0.4.4
|
||||
"sqlparse>=0.2.3",
|
||||
"sqlparse>=0.2.3,<0.5",
|
||||
# ----
|
||||
# These are major-version-0 packages also maintained by dbt-labs. Accept patches.
|
||||
"dbt-extractor~=0.4.1",
|
||||
"hologram~=0.0.16", # includes transitive dependencies on python-dateutil and jsonschema
|
||||
"minimal-snowplow-tracker~=0.0.2",
|
||||
# DSI is under active development, so we're pinning to specific dev versions for now.
|
||||
# TODO: Before RC/final release, update to use ~= pinning.
|
||||
"dbt-semantic-interfaces~=0.1.0rc1",
|
||||
"dbt-semantic-interfaces~=0.2.0",
|
||||
# ----
|
||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
||||
"packaging>20.9",
|
||||
|
||||
@@ -14,12 +14,12 @@ FROM --platform=$build_for python:3.11.2-slim-bullseye as base
|
||||
# N.B. The refs updated automagically every release via bumpversion
|
||||
# N.B. dbt-postgres is currently found in the core codebase so a value of dbt-core@<some_version> is correct
|
||||
|
||||
ARG dbt_core_ref=dbt-core@v1.6.0b8
|
||||
ARG dbt_postgres_ref=dbt-core@v1.6.0b8
|
||||
ARG dbt_redshift_ref=dbt-redshift@v1.6.0b8
|
||||
ARG dbt_bigquery_ref=dbt-bigquery@v1.6.0b8
|
||||
ARG dbt_snowflake_ref=dbt-snowflake@v1.6.0b8
|
||||
ARG dbt_spark_ref=dbt-spark@v1.6.0b8
|
||||
ARG dbt_core_ref=dbt-core@v1.6.0rc2
|
||||
ARG dbt_postgres_ref=dbt-core@v1.6.0rc2
|
||||
ARG dbt_redshift_ref=dbt-redshift@v1.6.0rc2
|
||||
ARG dbt_bigquery_ref=dbt-bigquery@v1.6.0rc2
|
||||
ARG dbt_snowflake_ref=dbt-snowflake@v1.6.0rc2
|
||||
ARG dbt_spark_ref=dbt-spark@v1.6.0rc2
|
||||
# special case args
|
||||
ARG dbt_spark_version=all
|
||||
ARG dbt_third_party
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "1.6.0b8"
|
||||
version = "1.6.0rc2"
|
||||
|
||||
@@ -41,7 +41,7 @@ def _dbt_psycopg2_name():
|
||||
|
||||
|
||||
package_name = "dbt-postgres"
|
||||
package_version = "1.6.0b8"
|
||||
package_version = "1.6.0rc2"
|
||||
description = """The postgres adapter plugin for dbt (data build tool)"""
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "1.6.0b8"
|
||||
version = "1.6.0rc2"
|
||||
|
||||
@@ -20,7 +20,7 @@ except ImportError:
|
||||
|
||||
|
||||
package_name = "dbt-tests-adapter"
|
||||
package_version = "1.6.0b8"
|
||||
package_version = "1.6.0rc2"
|
||||
description = """The dbt adapter tests for adapter plugins"""
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -164,6 +164,10 @@ class TestCompile:
|
||||
with pytest.raises(DbtException, match="Error parsing inline query"):
|
||||
run_dbt(["compile", "--inline", "select * from {{ ref('third_model') }}"])
|
||||
|
||||
def test_inline_fail_database_error(self, project):
|
||||
with pytest.raises(DbtRuntimeError, match="Database Error"):
|
||||
run_dbt(["show", "--inline", "slect asdlkjfsld;j"])
|
||||
|
||||
def test_multiline_jinja(self, project):
|
||||
(results, log_output) = run_dbt_and_capture(["compile", "--inline", model_multiline_jinja])
|
||||
assert len(results) == 1
|
||||
|
||||
@@ -13,7 +13,6 @@ from contextlib import contextmanager
|
||||
import dbt.semver
|
||||
import dbt.config
|
||||
import dbt.exceptions
|
||||
from dbt.contracts.results import RunStatus
|
||||
|
||||
from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture
|
||||
|
||||
@@ -208,9 +207,8 @@ class TestMissingDependency(object):
|
||||
|
||||
def test_missing_dependency(self, project):
|
||||
# dbt should raise a runtime exception
|
||||
res = run_dbt(["compile"], expect_pass=False)
|
||||
assert len(res) == 1
|
||||
assert res[0].status == RunStatus.Error
|
||||
with pytest.raises(dbt.exceptions.DbtRuntimeError):
|
||||
run_dbt(["compile"])
|
||||
|
||||
|
||||
class TestSimpleDependencyWithSchema(BaseDependencyTest):
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import pytest
|
||||
|
||||
from dbt.exceptions import EventCompilationError
|
||||
from dbt.cli.main import dbtRunner
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
|
||||
deprecated_model__yml = """
|
||||
version: 2
|
||||
@@ -41,6 +44,14 @@ class TestModelDeprecationWarning:
|
||||
assert len(matches) == 1
|
||||
assert matches[0].data.model_name == "my_model"
|
||||
|
||||
def test_deprecation_warning_error(self, project):
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(["--warn-error", "parse"])
|
||||
|
||||
def test_deprecation_warning_error_options(self, project):
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(["--warn-error-options", '{"include": ["DeprecatedModel"]}', "parse"])
|
||||
|
||||
|
||||
class TestReferenceDeprecatingWarning:
|
||||
@pytest.fixture(scope="class")
|
||||
@@ -59,6 +70,16 @@ class TestReferenceDeprecatingWarning:
|
||||
assert matches[0].data.model_name == "my_dependant_model"
|
||||
assert matches[0].data.ref_model_name == "my_model"
|
||||
|
||||
def test_deprecation_warning_error(self, project):
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(["--warn-error", "parse"])
|
||||
|
||||
def test_deprecation_warning_error_options(self, project):
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(
|
||||
["--warn-error-options", '{"include": ["UpcomingReferenceDeprecation"]}', "parse"]
|
||||
)
|
||||
|
||||
|
||||
class TestReferenceDeprecatedWarning:
|
||||
@pytest.fixture(scope="class")
|
||||
@@ -76,3 +97,11 @@ class TestReferenceDeprecatedWarning:
|
||||
assert len(matches) == 1
|
||||
assert matches[0].data.model_name == "my_dependant_model"
|
||||
assert matches[0].data.ref_model_name == "my_model"
|
||||
|
||||
def test_deprecation_warning_error(self, project):
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(["--warn-error", "parse"])
|
||||
|
||||
def test_deprecation_warning_error_options(self, project):
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(["--warn-error-options", '{"include": ["DeprecatedReference"]}', "parse"])
|
||||
|
||||
@@ -25,7 +25,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: "years_tenure"
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('people_entity__loves_dbt') }} is true"
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -4,6 +4,12 @@ models__dep_macro = """
|
||||
}}
|
||||
"""
|
||||
|
||||
models__materialization_macro = """
|
||||
{{
|
||||
materialization_macro()
|
||||
}}
|
||||
"""
|
||||
|
||||
models__with_undefined_macro = """
|
||||
{{ dispatch_to_nowhere() }}
|
||||
select 1 as id
|
||||
@@ -75,6 +81,12 @@ macros__my_macros = """
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
macros__named_materialization = """
|
||||
{% macro materialization_macro() %}
|
||||
select 1 as foo
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
macros__no_default_macros = """
|
||||
{% macro do_something2(foo2, bar2) %}
|
||||
|
||||
|
||||
@@ -20,12 +20,14 @@ from tests.functional.macros.fixtures import (
|
||||
models__override_get_columns_macros,
|
||||
models__deprecated_adapter_macro_model,
|
||||
models__incorrect_dispatch,
|
||||
models__materialization_macro,
|
||||
macros__my_macros,
|
||||
macros__no_default_macros,
|
||||
macros__override_get_columns_macros,
|
||||
macros__package_override_get_columns_macros,
|
||||
macros__deprecated_adapter_macro,
|
||||
macros__incorrect_dispatch,
|
||||
macros__named_materialization,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,6 +80,21 @@ class TestMacros:
|
||||
check_relations_equal(project.adapter, ["expected_local_macro", "local_macro"])
|
||||
|
||||
|
||||
class TestMacrosNamedMaterialization:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"models_materialization_macro.sql": models__materialization_macro,
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def macros(self):
|
||||
return {"macros_named_materialization.sql": macros__named_materialization}
|
||||
|
||||
def test_macro_with_materialization_in_name_works(self, project):
|
||||
run_dbt(expect_pass=True)
|
||||
|
||||
|
||||
class TestInvalidMacros:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
@@ -70,7 +70,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: "years_tenure"
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
- name: average_tenure
|
||||
label: "Average tenure"
|
||||
@@ -115,7 +115,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
- name: collective_window
|
||||
label: "Collective window"
|
||||
@@ -124,7 +124,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
window: 14 days
|
||||
|
||||
- name: average_tenure
|
||||
@@ -452,7 +452,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
"""
|
||||
|
||||
@@ -479,7 +479,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@@ -353,7 +353,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: customers
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
+meta:
|
||||
is_okr: True
|
||||
tags:
|
||||
@@ -472,7 +472,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
"""
|
||||
|
||||
@@ -619,7 +619,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
"""
|
||||
|
||||
@@ -1008,7 +1008,7 @@ metrics:
|
||||
type_params:
|
||||
measure:
|
||||
name: years_tenure
|
||||
filter: "{{dimension('loves_dbt')}} is true"
|
||||
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@@ -61,6 +61,8 @@ semantic_models:
|
||||
- name: user
|
||||
type: foreign
|
||||
expr: user_id
|
||||
- name: id
|
||||
type: primary
|
||||
|
||||
metrics:
|
||||
- name: records_with_revenue
|
||||
|
||||
@@ -72,10 +72,13 @@ class TestShow:
|
||||
assert "sample_bool" in log_output
|
||||
|
||||
def test_inline_fail(self, project):
|
||||
run_dbt(["build"])
|
||||
with pytest.raises(DbtException, match="Error parsing inline query"):
|
||||
run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"])
|
||||
|
||||
def test_inline_fail_database_error(self, project):
|
||||
with pytest.raises(DbtRuntimeError, match="Database Error"):
|
||||
run_dbt(["show", "--inline", "slect asdlkjfsld;j"])
|
||||
|
||||
def test_ephemeral_model(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"])
|
||||
|
||||
@@ -96,6 +96,18 @@ snapshots:
|
||||
owner: 'a_owner'
|
||||
"""
|
||||
|
||||
models__schema_with_target_schema_yml = """
|
||||
version: 2
|
||||
snapshots:
|
||||
- name: snapshot_actual
|
||||
tests:
|
||||
- mutually_exclusive_ranges
|
||||
config:
|
||||
meta:
|
||||
owner: 'a_owner'
|
||||
target_schema: schema_from_schema_yml
|
||||
"""
|
||||
|
||||
models__ref_snapshot_sql = """
|
||||
select * from {{ ref('snapshot_actual') }}
|
||||
"""
|
||||
@@ -281,6 +293,26 @@ snapshots_pg__snapshot_sql = """
|
||||
{% endsnapshot %}
|
||||
"""
|
||||
|
||||
snapshots_pg__snapshot_no_target_schema_sql = """
|
||||
{% snapshot snapshot_actual %}
|
||||
|
||||
{{
|
||||
config(
|
||||
target_database=var('target_database', database),
|
||||
unique_key='id || ' ~ "'-'" ~ ' || first_name',
|
||||
strategy='timestamp',
|
||||
updated_at='updated_at',
|
||||
)
|
||||
}}
|
||||
|
||||
{% if var('invalidate_hard_deletes', 'false') | as_bool %}
|
||||
{{ config(invalidate_hard_deletes=True) }}
|
||||
{% endif %}
|
||||
|
||||
select * from {{target.database}}.{{target.schema}}.seed
|
||||
|
||||
{% endsnapshot %}
|
||||
"""
|
||||
|
||||
models_slow__gen_sql = """
|
||||
|
||||
|
||||
@@ -2,13 +2,15 @@ import os
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name
|
||||
from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name, write_file
|
||||
from tests.functional.simple_snapshot.fixtures import (
|
||||
models__schema_yml,
|
||||
models__schema_with_target_schema_yml,
|
||||
models__ref_snapshot_sql,
|
||||
seeds__seed_newcol_csv,
|
||||
seeds__seed_csv,
|
||||
snapshots_pg__snapshot_sql,
|
||||
snapshots_pg__snapshot_no_target_schema_sql,
|
||||
macros__test_no_overlaps_sql,
|
||||
macros_custom_snapshot__custom_sql,
|
||||
snapshots_pg_custom_namespaced__snapshot_sql,
|
||||
@@ -123,6 +125,41 @@ class TestBasicRef(Basic):
|
||||
ref_setup(project, num_snapshot_models=1)
|
||||
|
||||
|
||||
class TestBasicTargetSchemaConfig(Basic):
|
||||
@pytest.fixture(scope="class")
|
||||
def snapshots(self):
|
||||
return {"snapshot.sql": snapshots_pg__snapshot_no_target_schema_sql}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def project_config_update(self, unique_schema):
|
||||
return {
|
||||
"snapshots": {
|
||||
"test": {
|
||||
"target_schema": unique_schema + "_alt",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_target_schema(self, project):
|
||||
manifest = run_dbt(["parse"])
|
||||
assert len(manifest.nodes) == 5
|
||||
# ensure that the schema in the snapshot node is the same as target_schema
|
||||
snapshot_id = "snapshot.test.snapshot_actual"
|
||||
snapshot_node = manifest.nodes[snapshot_id]
|
||||
assert snapshot_node.schema == f"{project.test_schema}_alt"
|
||||
assert (
|
||||
snapshot_node.relation_name
|
||||
== f'"{project.database}"."{project.test_schema}_alt"."snapshot_actual"'
|
||||
)
|
||||
assert snapshot_node.meta == {"owner": "a_owner"}
|
||||
|
||||
# write out schema.yml file and check again
|
||||
write_file(models__schema_with_target_schema_yml, "models", "schema.yml")
|
||||
manifest = run_dbt(["parse"])
|
||||
snapshot_node = manifest.nodes[snapshot_id]
|
||||
assert snapshot_node.schema == "schema_from_schema_yml"
|
||||
|
||||
|
||||
class CustomNamespace:
|
||||
@pytest.fixture(scope="class")
|
||||
def snapshots(self):
|
||||
|
||||
@@ -57,6 +57,11 @@ class TestFlags:
|
||||
assert hasattr(flags, "LOG_PATH")
|
||||
assert getattr(flags, "LOG_PATH") == Path("logs")
|
||||
|
||||
def test_log_file_max_size_default(self, run_context):
|
||||
flags = Flags(run_context)
|
||||
assert hasattr(flags, "LOG_FILE_MAX_BYTES")
|
||||
assert getattr(flags, "LOG_FILE_MAX_BYTES") == 10 * 1024 * 1024
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"set_stats_param,do_not_track,expected_anonymous_usage_stats",
|
||||
[
|
||||
|
||||
@@ -424,6 +424,9 @@ def test_invocation_args_to_dict_in_macro_runtime_context(
|
||||
# Comes from unit/utils.py config_from_parts_or_dicts method
|
||||
assert ctx["invocation_args_dict"]["profile_dir"] == "/dev/null"
|
||||
|
||||
assert isinstance(ctx["invocation_args_dict"]["warn_error_options"], Dict)
|
||||
assert ctx["invocation_args_dict"]["warn_error_options"] == {"include": [], "exclude": []}
|
||||
|
||||
|
||||
def test_model_parse_context(config_postgres, manifest_fx, get_adapter, get_include_paths):
|
||||
ctx = providers.generate_parser_model_context(
|
||||
|
||||
@@ -2,7 +2,7 @@ from argparse import Namespace
|
||||
import pytest
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.events.functions import msg_to_dict, warn_or_error
|
||||
from dbt.events.functions import msg_to_dict, warn_or_error, setup_event_logger
|
||||
from dbt.events.types import InfoLevel, NoNodesForSelectionCriteria
|
||||
from dbt.exceptions import EventCompilationError
|
||||
|
||||
@@ -59,3 +59,13 @@ def test_msg_to_dict_handles_exceptions_gracefully():
|
||||
assert (
|
||||
False
|
||||
), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}"
|
||||
|
||||
|
||||
def test_setup_event_logger_specify_max_bytes(mocker):
|
||||
patched_file_handler = mocker.patch("dbt.events.eventmgr.RotatingFileHandler")
|
||||
args = Namespace(log_file_max_bytes=1234567)
|
||||
flags.set_from_args(args, {})
|
||||
setup_event_logger(flags.get_flags())
|
||||
patched_file_handler.assert_called_once_with(
|
||||
filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5
|
||||
)
|
||||
|
||||
@@ -18,6 +18,7 @@ from dbt import tracking
|
||||
from dbt.contracts.files import SourceFile, FileHash, FilePath
|
||||
from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck
|
||||
from dbt.graph import NodeSelector, parse_difference
|
||||
from dbt.events.functions import setup_event_logger
|
||||
|
||||
try:
|
||||
from queue import Empty
|
||||
@@ -140,6 +141,7 @@ class GraphTest(unittest.TestCase):
|
||||
|
||||
config = config_from_parts_or_dicts(project=cfg, profile=self.profile)
|
||||
dbt.flags.set_from_args(Namespace(), config)
|
||||
setup_event_logger(dbt.flags.get_flags())
|
||||
object.__setattr__(dbt.flags.get_flags(), "PARTIAL_PARSE", False)
|
||||
return config
|
||||
|
||||
|
||||
@@ -169,7 +169,7 @@ def test_metric_node_satisfies_protocol():
|
||||
|
||||
def test_where_filter_satisfies_protocol():
|
||||
where_filter = WhereFilter(
|
||||
where_sql_template="{{ dimension('dimension_name') }} AND {{ time_dimension('time_dimension_name', 'month') }} AND {{ entity('entity_name') }}"
|
||||
where_sql_template="{{ Dimension('enity_name__dimension_name') }} AND {{ TimeDimension('entity_name__time_dimension_name', 'month') }} AND {{ Entity('entity_name') }}"
|
||||
)
|
||||
assert isinstance(where_filter, RuntimeCheckableWhereFilter)
|
||||
|
||||
|
||||
81
tests/unit/test_semantic_models.py
Normal file
81
tests/unit/test_semantic_models.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import pytest
|
||||
|
||||
from typing import List
|
||||
|
||||
from dbt.contracts.graph.nodes import SemanticModel
|
||||
from dbt.contracts.graph.semantic_models import Dimension, Entity, Measure, Defaults
|
||||
from dbt.node_types import NodeType
|
||||
from dbt_semantic_interfaces.references import MeasureReference
|
||||
from dbt_semantic_interfaces.type_enums import AggregationType, DimensionType, EntityType
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def dimensions() -> List[Dimension]:
|
||||
return [Dimension(name="ds", type=DimensionType)]
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def entities() -> List[Entity]:
|
||||
return [Entity(name="test_entity", type=EntityType.PRIMARY, expr="id")]
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def measures() -> List[Measure]:
|
||||
return [Measure(name="test_measure", agg=AggregationType.COUNT, expr="id")]
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def default_semantic_model(
|
||||
dimensions: List[Dimension], entities: List[Entity], measures: List[Measure]
|
||||
) -> SemanticModel:
|
||||
return SemanticModel(
|
||||
name="test_semantic_model",
|
||||
resource_type=NodeType.SemanticModel,
|
||||
model="ref('test_model')",
|
||||
package_name="test",
|
||||
path="test_path",
|
||||
original_file_path="test_fixture",
|
||||
unique_id=f"{NodeType.SemanticModel}.test.test_semantic_model",
|
||||
fqn=[],
|
||||
defaults=Defaults(agg_time_dimension="ds"),
|
||||
dimensions=dimensions,
|
||||
entities=entities,
|
||||
measures=measures,
|
||||
node_relation=None,
|
||||
)
|
||||
|
||||
|
||||
def test_checked_agg_time_dimension_for_measure_via_defaults(
|
||||
default_semantic_model: SemanticModel,
|
||||
):
|
||||
assert default_semantic_model.defaults.agg_time_dimension is not None
|
||||
measure = default_semantic_model.measures[0]
|
||||
measure.agg_time_dimension = None
|
||||
default_semantic_model.checked_agg_time_dimension_for_measure(
|
||||
MeasureReference(element_name=measure.name)
|
||||
)
|
||||
|
||||
|
||||
def test_checked_agg_time_dimension_for_measure_via_measure(default_semantic_model: SemanticModel):
|
||||
default_semantic_model.defaults = None
|
||||
measure = default_semantic_model.measures[0]
|
||||
measure.agg_time_dimension = default_semantic_model.dimensions[0].name
|
||||
default_semantic_model.checked_agg_time_dimension_for_measure(
|
||||
MeasureReference(element_name=measure.name)
|
||||
)
|
||||
|
||||
|
||||
def test_checked_agg_time_dimension_for_measure_exception(default_semantic_model: SemanticModel):
|
||||
default_semantic_model.defaults = None
|
||||
measure = default_semantic_model.measures[0]
|
||||
measure.agg_time_dimension = None
|
||||
|
||||
with pytest.raises(AssertionError) as execinfo:
|
||||
default_semantic_model.checked_agg_time_dimension_for_measure(
|
||||
MeasureReference(measure.name)
|
||||
)
|
||||
|
||||
assert (
|
||||
f"Aggregation time dimension for measure {measure.name} on semantic model {default_semantic_model.name}"
|
||||
in str(execinfo.value)
|
||||
)
|
||||
@@ -105,14 +105,14 @@ class TestYamlRendering(unittest.TestCase):
|
||||
dct = {
|
||||
"name": "test{{ metric_name_end }}",
|
||||
"description": "{{ docs('my_doc') }}",
|
||||
"filter": "{{ dimension('my_dim') }} = false",
|
||||
"filter": "{{ Dimension('my_entity__my_dim') }} = false",
|
||||
}
|
||||
# We expect the expression and description will not be rendered, but
|
||||
# other fields will be
|
||||
expected = {
|
||||
"name": "test_metric",
|
||||
"description": "{{ docs('my_doc') }}",
|
||||
"filter": "{{ dimension('my_dim') }} = false",
|
||||
"filter": "{{ Dimension('my_entity__my_dim') }} = false",
|
||||
}
|
||||
dct = renderer.render_data(dct)
|
||||
self.assertEqual(dct, expected)
|
||||
|
||||
Reference in New Issue
Block a user