mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 06:31:27 +00:00
Compare commits
11 Commits
enable-pos
...
mjsqu-meta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
54b9aa83db | ||
|
|
33e89a9956 | ||
|
|
7c43365e72 | ||
|
|
537e3f0fba | ||
|
|
b4ed7699ac | ||
|
|
e2ac1922f0 | ||
|
|
46b5e90e0b | ||
|
|
046e08ea20 | ||
|
|
2308179389 | ||
|
|
63ae772dc5 | ||
|
|
3f297cb4e3 |
6
.changes/unreleased/Fixes-20251216-120727.yaml
Normal file
6
.changes/unreleased/Fixes-20251216-120727.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: ':bug: :snowman:Propagate exceptions for NodeFinished callbacks in dbtRunner'
|
||||
time: 2025-12-16T12:07:27.576087-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11612"
|
||||
6
.changes/unreleased/Fixes-20251217-002813.yaml
Normal file
6
.changes/unreleased/Fixes-20251217-002813.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Adds omitted return statement to RuntimeConfigObject.meta_require method
|
||||
time: 2025-12-17T00:28:13.015416197Z
|
||||
custom:
|
||||
Author: mjsqu
|
||||
Issue: "12288"
|
||||
6
.changes/unreleased/Fixes-20251217-105918.yaml
Normal file
6
.changes/unreleased/Fixes-20251217-105918.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Do not raise deprecation warning when encountering dataset or project configs for bigquery
|
||||
time: 2025-12-17T10:59:18.372968-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "12285"
|
||||
6
.changes/unreleased/Under the Hood-20250929-151159.yaml
Normal file
6
.changes/unreleased/Under the Hood-20250929-151159.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Update schema file order test
|
||||
time: 2025-09-29T15:11:59.611595-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "11869"
|
||||
6
.changes/unreleased/Under the Hood-20251215-155046.yaml
Normal file
6
.changes/unreleased/Under the Hood-20251215-155046.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Bump lower bound for dbt-common to 1.37.2
|
||||
time: 2025-12-15T15:50:46.857793-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "12284"
|
||||
12
.github/workflows/main.yml
vendored
12
.github/workflows/main.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
python -m pip --version
|
||||
python -m pip install hatch
|
||||
cd core
|
||||
hatch run setup
|
||||
hatch -v run setup
|
||||
|
||||
- name: Verify dbt installation
|
||||
run: |
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: cd core && hatch run ci:unit-tests
|
||||
command: cd core && hatch -v run ci:unit-tests
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -230,7 +230,7 @@ jobs:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
shell: bash
|
||||
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||
command: cd core && hatch -v run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -311,7 +311,7 @@ jobs:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
shell: bash
|
||||
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||
command: cd core && hatch -v run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -326,7 +326,7 @@ jobs:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
- name: Upload Integration Test Coverage
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
||||
with:
|
||||
@@ -380,4 +380,4 @@ jobs:
|
||||
- name: Check and verify distributions
|
||||
run: |
|
||||
cd core
|
||||
hatch run build:check-all
|
||||
hatch -v run build:check-all
|
||||
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: cd core && hatch run ci:integration-tests -- -nauto
|
||||
command: cd core && hatch -v run ci:integration-tests -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
|
||||
@@ -608,6 +608,8 @@ class RuntimeConfigObject(Config):
|
||||
if validator is not None:
|
||||
self._validate(validator, to_return)
|
||||
|
||||
return to_return
|
||||
|
||||
def get(self, name, default=None, validator=None):
|
||||
to_return = self._lookup(name, default)
|
||||
|
||||
|
||||
@@ -37,6 +37,10 @@ _HIERARCHICAL_CONFIG_KEYS = {
|
||||
"unit_tests",
|
||||
}
|
||||
|
||||
_ADAPTER_TO_CONFIG_ALIASES = {
|
||||
"bigquery": ["dataset", "project"],
|
||||
}
|
||||
|
||||
|
||||
def load_json_from_package(jsonschema_type: str, filename: str) -> Dict[str, Any]:
|
||||
"""Loads a JSON file from within a package."""
|
||||
@@ -106,6 +110,16 @@ def _validate_with_schema(
|
||||
return validator.iter_errors(json)
|
||||
|
||||
|
||||
def _get_allowed_config_key_aliases() -> List[str]:
|
||||
config_aliases = []
|
||||
invocation_context = get_invocation_context()
|
||||
for adapter in invocation_context.adapter_types:
|
||||
if adapter in _ADAPTER_TO_CONFIG_ALIASES:
|
||||
config_aliases.extend(_ADAPTER_TO_CONFIG_ALIASES[adapter])
|
||||
|
||||
return config_aliases
|
||||
|
||||
|
||||
def _get_allowed_config_fields_from_error_path(
|
||||
yml_schema: Dict[str, Any], error_path: List[Union[str, int]]
|
||||
) -> Optional[List[str]]:
|
||||
@@ -135,6 +149,7 @@ def _get_allowed_config_fields_from_error_path(
|
||||
][0]["$ref"].split("/")[-1]
|
||||
|
||||
allowed_config_fields = list(set(yml_schema["definitions"][config_field_name]["properties"]))
|
||||
allowed_config_fields.extend(_get_allowed_config_key_aliases())
|
||||
|
||||
return allowed_config_fields
|
||||
|
||||
@@ -169,7 +184,6 @@ def jsonschema_validate(schema: Dict[str, Any], json: Dict[str, Any], file_path:
|
||||
continue
|
||||
|
||||
if key == "overrides" and key_path.startswith("sources"):
|
||||
|
||||
deprecations.warn(
|
||||
"source-override-deprecation",
|
||||
source_name=key_path.split(".")[-1],
|
||||
@@ -205,6 +219,9 @@ def jsonschema_validate(schema: Dict[str, Any], json: Dict[str, Any], file_path:
|
||||
keys = _additional_properties_violation_keys(sub_error)
|
||||
key_path = error_path_to_string(error)
|
||||
for key in keys:
|
||||
if key in _get_allowed_config_key_aliases():
|
||||
continue
|
||||
|
||||
deprecations.warn(
|
||||
"custom-key-in-config-deprecation",
|
||||
key=key,
|
||||
|
||||
@@ -249,34 +249,17 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
thread_exception = e
|
||||
finally:
|
||||
if result is not None:
|
||||
fire_event(
|
||||
NodeFinished(
|
||||
node_info=runner.node.node_info,
|
||||
run_result=result.to_msg_dict(),
|
||||
try:
|
||||
fire_event(
|
||||
NodeFinished(
|
||||
node_info=runner.node.node_info,
|
||||
run_result=result.to_msg_dict(),
|
||||
)
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
result = self._handle_thread_exception(runner, e)
|
||||
else:
|
||||
msg = f"Exception on worker thread. {thread_exception}"
|
||||
|
||||
fire_event(
|
||||
GenericExceptionOnRun(
|
||||
unique_id=runner.node.unique_id,
|
||||
exc=str(thread_exception),
|
||||
node_info=runner.node.node_info,
|
||||
)
|
||||
)
|
||||
|
||||
result = RunResult(
|
||||
status=RunStatus.Error, # type: ignore
|
||||
timing=[],
|
||||
thread_id="",
|
||||
execution_time=0.0,
|
||||
adapter_response={},
|
||||
message=msg,
|
||||
failures=None,
|
||||
batch_results=None,
|
||||
node=runner.node,
|
||||
)
|
||||
result = self._handle_thread_exception(runner, thread_exception)
|
||||
|
||||
# `_event_status` dict is only used for logging. Make sure
|
||||
# it gets deleted when we're done with it
|
||||
@@ -365,6 +348,32 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
args = [runner]
|
||||
self._submit(pool, args, callback)
|
||||
|
||||
def _handle_thread_exception(
|
||||
self,
|
||||
runner: BaseRunner,
|
||||
thread_exception: Optional[Union[KeyboardInterrupt, SystemExit, Exception]],
|
||||
) -> RunResult:
|
||||
msg = f"Exception on worker thread. {thread_exception}"
|
||||
fire_event(
|
||||
GenericExceptionOnRun(
|
||||
unique_id=runner.node.unique_id,
|
||||
exc=str(thread_exception),
|
||||
node_info=runner.node.node_info,
|
||||
)
|
||||
)
|
||||
|
||||
return RunResult(
|
||||
status=RunStatus.Error, # type: ignore
|
||||
timing=[],
|
||||
thread_id="",
|
||||
execution_time=0.0,
|
||||
adapter_response={},
|
||||
message=msg,
|
||||
failures=None,
|
||||
batch_results=None,
|
||||
node=runner.node,
|
||||
)
|
||||
|
||||
def _handle_result(self, result: RunResult) -> None:
|
||||
"""Mark the result as completed, insert the `CompileResultNode` into
|
||||
the manifest, and mark any descendants (potentially with a 'cause' if
|
||||
|
||||
@@ -54,7 +54,7 @@ dependencies = [
|
||||
"dbt-extractor>=0.5.0,<=0.6",
|
||||
"dbt-semantic-interfaces>=0.9.0,<0.10",
|
||||
# Minor versions for these are expected to be backwards-compatible
|
||||
"dbt-common>=1.37.0,<2.0",
|
||||
"dbt-common>=1.37.2,<2.0",
|
||||
"dbt-adapters>=1.15.5,<2.0",
|
||||
"dbt-protos>=1.0.405,<2.0",
|
||||
"pydantic<3",
|
||||
|
||||
@@ -33,7 +33,7 @@ select {{ config.require('meta_key') }} as col_value
|
||||
|
||||
meta_model_meta_require_sql = """
|
||||
-- models/meta_model.sql
|
||||
select {{ config.require('meta_key') }} as col_value
|
||||
select {{ config.meta_require('meta_key') }} as col_value
|
||||
"""
|
||||
|
||||
|
||||
@@ -66,11 +66,11 @@ class TestConfigGetMeta:
|
||||
self,
|
||||
project,
|
||||
):
|
||||
# This test runs a model with a config.get(key, default)
|
||||
# This test runs a model with a config.get(key, default) -> default value returned
|
||||
results = run_dbt(["run"], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
assert str(results[0].status) == "error"
|
||||
assert 'column "my_meta_value" does not exist' in results[0].message
|
||||
assert 'column "meta_default_value" does not exist' in results[0].message
|
||||
|
||||
write_file(meta_model_meta_get_sql, "models", "meta_model.sql")
|
||||
results = run_dbt(["run"], expect_pass=False)
|
||||
@@ -95,7 +95,7 @@ class TestConfigGetMetaRequire:
|
||||
results = run_dbt(["run"], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
assert str(results[0].status) == "error"
|
||||
assert 'column "my_meta_value" does not exist' in results[0].message
|
||||
assert "does not define a required config parameter 'meta_key'" in results[0].message
|
||||
|
||||
write_file(meta_model_meta_require_sql, "models", "meta_model.sql")
|
||||
results = run_dbt(["run"], expect_pass=False)
|
||||
|
||||
@@ -55,6 +55,22 @@ class TestDbtRunner:
|
||||
dbt.invoke(["debug"])
|
||||
mock_callback.assert_called()
|
||||
|
||||
def test_callback_node_finished_exceptions_are_raised(self, project):
|
||||
from dbt_common.events.base_types import EventMsg
|
||||
|
||||
def callback_with_exception(event: EventMsg):
|
||||
if event.info.name == "NodeFinished":
|
||||
raise Exception("This should let continue the execution registering the failure")
|
||||
|
||||
dbt = dbtRunner(callbacks=[callback_with_exception])
|
||||
result = dbt.invoke(["run", "--select", "models"])
|
||||
|
||||
assert result is not None
|
||||
assert (
|
||||
result.result.results[0].message
|
||||
== "Exception on worker thread. This should let continue the execution registering the failure"
|
||||
)
|
||||
|
||||
def test_invoke_kwargs(self, project, dbt):
|
||||
res = dbt.invoke(
|
||||
["run"],
|
||||
|
||||
@@ -220,10 +220,16 @@ models_yml = """
|
||||
models:
|
||||
- name: abcd
|
||||
description: "abcd model"
|
||||
versions:
|
||||
- v: 1
|
||||
- name: efgh
|
||||
description: "efgh model"
|
||||
versions:
|
||||
- v: 1
|
||||
- name: ijkl
|
||||
description: "ijkl model"
|
||||
versions:
|
||||
- v: 1
|
||||
"""
|
||||
|
||||
append_sources_yml = """
|
||||
@@ -233,6 +239,8 @@ append_sources_yml = """
|
||||
append_models_yml = """
|
||||
- name: mnop
|
||||
description: "mnop model"
|
||||
versions:
|
||||
- v: 1
|
||||
"""
|
||||
|
||||
mnop_sql = """
|
||||
@@ -245,9 +253,9 @@ class TestSourcesAndSchemaFiles:
|
||||
def models(self):
|
||||
return {
|
||||
"sources.yml": sources_yml,
|
||||
"abcd.sql": abcd_sql,
|
||||
"efgh.sql": efgh_sql,
|
||||
"ijkl.sql": ijkl_sql,
|
||||
"abcd_v1.sql": abcd_sql,
|
||||
"efgh_v1.sql": efgh_sql,
|
||||
"ijkl_v1.sql": ijkl_sql,
|
||||
"_models.yml": models_yml,
|
||||
}
|
||||
|
||||
@@ -258,7 +266,7 @@ class TestSourcesAndSchemaFiles:
|
||||
assert len(manifest.nodes) == 3
|
||||
|
||||
write_file(models_yml + append_models_yml, project.project_root, "models", "_models.yml")
|
||||
write_file(mnop_sql, project.project_root, "models", "mnop.sql")
|
||||
write_file(mnop_sql, project.project_root, "models", "mnop_v1.sql")
|
||||
write_file(sources_yml + append_sources_yml, project.project_root, "models", "sources.yml")
|
||||
|
||||
manifest = run_dbt(["parse"])
|
||||
@@ -268,3 +276,4 @@ class TestSourcesAndSchemaFiles:
|
||||
# the patch updates, including description, so description will be ""
|
||||
for node in manifest.nodes.values():
|
||||
assert node.description == f"{node.name} model"
|
||||
assert node.unique_id.endswith(".v1")
|
||||
|
||||
@@ -1,9 +1,17 @@
|
||||
import pytest
|
||||
|
||||
from dbt.deprecations import (
|
||||
CustomKeyInConfigDeprecation,
|
||||
CustomKeyInObjectDeprecation,
|
||||
GenericJSONSchemaValidationDeprecation,
|
||||
active_deprecations,
|
||||
reset_deprecations,
|
||||
)
|
||||
from dbt.jsonschemas.jsonschemas import (
|
||||
jsonschema_validate,
|
||||
resources_schema,
|
||||
validate_model_config,
|
||||
)
|
||||
from dbt.jsonschemas.jsonschemas import validate_model_config
|
||||
from dbt.tests.util import safe_set_invocation_context
|
||||
from dbt_common.context import get_invocation_context
|
||||
from dbt_common.events.event_catcher import EventCatcher
|
||||
@@ -48,3 +56,38 @@ class TestValidateModelConfigNoError:
|
||||
assert len(ckicd_catcher.caught_events) == 1
|
||||
assert ckicd_catcher.caught_events[0].data.key == "non_existent_config"
|
||||
assert len(gjsvd_catcher.caught_events) == 0
|
||||
|
||||
|
||||
class TestValidateJsonSchema:
|
||||
@pytest.fixture(scope="class")
|
||||
def model_bigquery_alias_config_contents(self):
|
||||
return {
|
||||
"models": [
|
||||
{
|
||||
"name": "model_1",
|
||||
"config": {
|
||||
"dataset": "dataset_1",
|
||||
"project": "project_1",
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
def test_validate_json_schema_no_error_aliases(self, model_bigquery_alias_config_contents):
|
||||
reset_deprecations()
|
||||
|
||||
safe_set_invocation_context()
|
||||
get_invocation_context().uses_adapter("bigquery")
|
||||
|
||||
jsonschema_validate(resources_schema(), model_bigquery_alias_config_contents, "test.yml")
|
||||
assert active_deprecations == {}
|
||||
|
||||
def test_validate_json_schema_has_error_aliases(self, model_bigquery_alias_config_contents):
|
||||
reset_deprecations()
|
||||
|
||||
safe_set_invocation_context()
|
||||
# Set to adapter that doesn't support aliases specified
|
||||
get_invocation_context().uses_adapter("snowflake")
|
||||
|
||||
jsonschema_validate(resources_schema(), model_bigquery_alias_config_contents, "test.yml")
|
||||
assert active_deprecations == {"custom-key-in-config-deprecation": 2}
|
||||
|
||||
Reference in New Issue
Block a user