Compare commits

..

10 Commits

Author SHA1 Message Date
Michelle Ark
fa96acb15f Pin sqlparse <0.5.5 (#12308)
* pin sqlparse <0.5.5

* upper bound sqlparse dependency
2025-12-19 15:49:38 -08:00
Quigley Malcolm
b5852cb5e7 Bump minimum click to 8.2.0 (#12306) 2025-12-19 16:03:27 -06:00
Emily Rockman
129f2e54bc use post instead of dev (#12300) 2025-12-18 11:14:15 -05:00
Michelle Ark
d2977147fa Resolve ref to prefer package node over root node when parsing package, behind require_ref_searches_node_package_before_root flag (#11366) 2025-12-17 16:10:13 -05:00
Michelle Ark
fcd6870028 Add missing return statement to RuntimeConfigObject.meta_require method (#12294) 2025-12-17 13:14:07 -05:00
Michelle Ark
33e89a9956 Support aliased configs jsonschemas (#12291) 2025-12-17 12:31:52 -05:00
Emily Rockman
046e08ea20 add -v (#12269) 2025-12-17 09:16:38 -05:00
Gerda Shank
2308179389 Update schema file order test (#12061) 2025-12-16 14:30:51 -05:00
Michelle Ark
63ae772dc5 Propagate exceptions for NodeFinished callbacks in dbtRunner (#12286) 2025-12-16 14:28:16 -05:00
Michelle Ark
3f297cb4e3 fix test_config_with_meta_key (#12284) 2025-12-15 16:13:14 -05:00
31 changed files with 479 additions and 103 deletions

View File

@@ -0,0 +1,6 @@
kind: Dependencies
body: Bump minimum click to 8.2.0
time: 2025-12-19T15:38:04.785842-06:00
custom:
Author: QMalcolm
Issue: "12305"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: ':bug: :snowman: Fix ref resolution within package when duplicate nodes exist, behind require_ref_searches_node_package_before_root behavior change flag'
time: 2025-12-04T09:47:53.349428-08:00
custom:
Author: michelleark
Issue: "11351"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: ':bug: :snowman:Propagate exceptions for NodeFinished callbacks in dbtRunner'
time: 2025-12-16T12:07:27.576087-05:00
custom:
Author: michelleark
Issue: "11612"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Adds omitted return statement to RuntimeConfigObject.meta_require method
time: 2025-12-17T00:28:13.015416197Z
custom:
Author: mjsqu
Issue: "12288"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Do not raise deprecation warning when encountering dataset or project configs for bigquery
time: 2025-12-17T10:59:18.372968-05:00
custom:
Author: michelleark
Issue: "12285"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Pin sqlparse <0.5.5 to avoid max tokens issue
time: 2025-12-19T18:44:05.216329-05:00
custom:
Author: michelleark
Issue: "12303"

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Update schema file order test
time: 2025-09-29T15:11:59.611595-04:00
custom:
Author: gshank
Issue: "11869"

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Bump lower bound for dbt-common to 1.37.2
time: 2025-12-15T15:50:46.857793-05:00
custom:
Author: michelleark
Issue: "12284"

View File

@@ -108,62 +108,62 @@ jobs:
echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}" echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}"
echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}" echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}"
integration-tests-postgres: # integration-tests-postgres:
name: "dbt-postgres integration tests" # name: "dbt-postgres integration tests"
needs: [job-prep] # needs: [job-prep]
runs-on: ubuntu-latest # runs-on: ubuntu-latest
defaults: # defaults:
run: # run:
working-directory: "./dbt-postgres" # working-directory: "./dbt-postgres"
environment: # environment:
name: "dbt-postgres" # name: "dbt-postgres"
env: # env:
POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }} # POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }}
POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }} # POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }}
POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }} # POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }}
POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }} # POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }} # POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }}
POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }} # POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }}
services: # services:
postgres: # postgres:
image: postgres # image: postgres
env: # env:
POSTGRES_PASSWORD: postgres # POSTGRES_PASSWORD: postgres
options: >- # options: >-
--health-cmd pg_isready # --health-cmd pg_isready
--health-interval 10s # --health-interval 10s
--health-timeout 5s # --health-timeout 5s
--health-retries 5 # --health-retries 5
ports: # ports:
- ${{ vars.POSTGRES_TEST_PORT }}:5432 # - ${{ vars.POSTGRES_TEST_PORT }}:5432
steps: # steps:
- name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}" # - name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 # uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with: # with:
repository: dbt-labs/dbt-adapters # repository: dbt-labs/dbt-adapters
ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }} # ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }}
- name: "Set up Python" # - name: "Set up Python"
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5 # uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
with: # with:
python-version: ${{ inputs.python-version }} # python-version: ${{ inputs.python-version }}
- name: "Set environment variables" # - name: "Set environment variables"
run: | # run: |
echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV # echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV
echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV # echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV
- name: "Setup test database" # - name: "Setup test database"
run: psql -f ./scripts/setup_test_database.sql # run: psql -f ./scripts/setup_test_database.sql
env: # env:
PGHOST: ${{ vars.POSTGRES_TEST_HOST }} # PGHOST: ${{ vars.POSTGRES_TEST_HOST }}
PGPORT: ${{ vars.POSTGRES_TEST_PORT }} # PGPORT: ${{ vars.POSTGRES_TEST_PORT }}
PGUSER: postgres # PGUSER: postgres
PGPASSWORD: postgres # PGPASSWORD: postgres
PGDATABASE: postgres # PGDATABASE: postgres
- name: "Install hatch" # - name: "Install hatch"
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install # uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install
- name: "Run integration tests" # - name: "Run integration tests"
run: hatch run ${{ inputs.hatch-env }}:integration-tests # run: hatch run ${{ inputs.hatch-env }}:integration-tests

View File

@@ -62,7 +62,7 @@ jobs:
python -m pip --version python -m pip --version
python -m pip install hatch python -m pip install hatch
cd core cd core
hatch run setup hatch -v run setup
- name: Verify dbt installation - name: Verify dbt installation
run: | run: |
@@ -106,7 +106,7 @@ jobs:
with: with:
timeout_minutes: 10 timeout_minutes: 10
max_attempts: 3 max_attempts: 3
command: cd core && hatch run ci:unit-tests command: cd core && hatch -v run ci:unit-tests
- name: Get current date - name: Get current date
if: always() if: always()
@@ -230,7 +230,7 @@ jobs:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
shell: bash shell: bash
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }} command: cd core && hatch -v run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
- name: Get current date - name: Get current date
if: always() if: always()
@@ -311,7 +311,7 @@ jobs:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
shell: bash shell: bash
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }} command: cd core && hatch -v run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
- name: Get current date - name: Get current date
if: always() if: always()
@@ -326,7 +326,7 @@ jobs:
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }} name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
path: ./logs path: ./logs
- name: Upload Integration Test Coverage to Codecov - name: Upload Integration Test Coverage
if: ${{ matrix.python-version == '3.11' }} if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5 uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
with: with:
@@ -380,4 +380,4 @@ jobs:
- name: Check and verify distributions - name: Check and verify distributions
run: | run: |
cd core cd core
hatch run build:check-all hatch -v run build:check-all

View File

@@ -62,7 +62,7 @@ jobs:
- name: "Generate Nightly Release Version Number" - name: "Generate Nightly Release Version Number"
id: nightly-release-version id: nightly-release-version
run: | run: |
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}" number="${{ steps.semver.outputs.version }}.post${{ steps.current-date.outputs.date }}"
echo "number=$number" >> $GITHUB_OUTPUT echo "number=$number" >> $GITHUB_OUTPUT
- name: "Audit Nightly Release Version And Parse Into Parts" - name: "Audit Nightly Release Version And Parse Into Parts"

View File

@@ -123,7 +123,7 @@ jobs:
with: with:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
command: cd core && hatch run ci:integration-tests -- -nauto command: cd core && hatch -v run ci:integration-tests -- -nauto
env: env:
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}

View File

@@ -608,6 +608,8 @@ class RuntimeConfigObject(Config):
if validator is not None: if validator is not None:
self._validate(validator, to_return) self._validate(validator, to_return)
return to_return
def get(self, name, default=None, validator=None): def get(self, name, default=None, validator=None):
to_return = self._lookup(name, default) to_return = self._lookup(name, default)

View File

@@ -558,7 +558,10 @@ def _packages_to_search(
elif current_project == node_package: elif current_project == node_package:
return [current_project, None] return [current_project, None]
else: else:
return [current_project, node_package, None] if get_flags().require_ref_searches_node_package_before_root:
return [node_package, current_project, None]
else:
return [current_project, node_package, None]
def _sort_values(dct): def _sort_values(dct):

View File

@@ -367,6 +367,7 @@ class ProjectFlags(ExtensibleDbtClassMixin):
require_all_warnings_handled_by_warn_error: bool = False require_all_warnings_handled_by_warn_error: bool = False
require_generic_test_arguments_property: bool = True require_generic_test_arguments_property: bool = True
require_unique_project_resource_names: bool = False require_unique_project_resource_names: bool = False
require_ref_searches_node_package_before_root: bool = False
@property @property
def project_only_flags(self) -> Dict[str, Any]: def project_only_flags(self) -> Dict[str, Any]:
@@ -384,6 +385,7 @@ class ProjectFlags(ExtensibleDbtClassMixin):
"require_all_warnings_handled_by_warn_error": self.require_all_warnings_handled_by_warn_error, "require_all_warnings_handled_by_warn_error": self.require_all_warnings_handled_by_warn_error,
"require_generic_test_arguments_property": self.require_generic_test_arguments_property, "require_generic_test_arguments_property": self.require_generic_test_arguments_property,
"require_unique_project_resource_names": self.require_unique_project_resource_names, "require_unique_project_resource_names": self.require_unique_project_resource_names,
"require_ref_searches_node_package_before_root": self.require_ref_searches_node_package_before_root,
} }

View File

@@ -1270,6 +1270,19 @@ class InvalidMacroAnnotation(WarnLevel):
return self.msg return self.msg
class PackageNodeDependsOnRootProjectNode(WarnLevel):
def code(self) -> str:
return "I077"
def message(self) -> str:
msg = (
f"The node '{self.node_name}'in package '{self.package_name}' depends on the root project node '{self.root_project_unique_id}'."
"This may lead to unexpected cycles downstream. Please set the 'require_ref_prefers_node_package_to_root' behavior change flag to True to avoid this issue."
"For more information, see the documentation at https://docs.getdbt.com/reference/global-configs/behavior-changes#require_ref_prefers_node_package_to_root"
)
return warning_tag(msg)
# ======================================================= # =======================================================
# M - Deps generation # M - Deps generation
# ======================================================= # =======================================================

View File

@@ -37,6 +37,10 @@ _HIERARCHICAL_CONFIG_KEYS = {
"unit_tests", "unit_tests",
} }
_ADAPTER_TO_CONFIG_ALIASES = {
"bigquery": ["dataset", "project"],
}
def load_json_from_package(jsonschema_type: str, filename: str) -> Dict[str, Any]: def load_json_from_package(jsonschema_type: str, filename: str) -> Dict[str, Any]:
"""Loads a JSON file from within a package.""" """Loads a JSON file from within a package."""
@@ -106,6 +110,16 @@ def _validate_with_schema(
return validator.iter_errors(json) return validator.iter_errors(json)
def _get_allowed_config_key_aliases() -> List[str]:
config_aliases = []
invocation_context = get_invocation_context()
for adapter in invocation_context.adapter_types:
if adapter in _ADAPTER_TO_CONFIG_ALIASES:
config_aliases.extend(_ADAPTER_TO_CONFIG_ALIASES[adapter])
return config_aliases
def _get_allowed_config_fields_from_error_path( def _get_allowed_config_fields_from_error_path(
yml_schema: Dict[str, Any], error_path: List[Union[str, int]] yml_schema: Dict[str, Any], error_path: List[Union[str, int]]
) -> Optional[List[str]]: ) -> Optional[List[str]]:
@@ -135,6 +149,7 @@ def _get_allowed_config_fields_from_error_path(
][0]["$ref"].split("/")[-1] ][0]["$ref"].split("/")[-1]
allowed_config_fields = list(set(yml_schema["definitions"][config_field_name]["properties"])) allowed_config_fields = list(set(yml_schema["definitions"][config_field_name]["properties"]))
allowed_config_fields.extend(_get_allowed_config_key_aliases())
return allowed_config_fields return allowed_config_fields
@@ -169,7 +184,6 @@ def jsonschema_validate(schema: Dict[str, Any], json: Dict[str, Any], file_path:
continue continue
if key == "overrides" and key_path.startswith("sources"): if key == "overrides" and key_path.startswith("sources"):
deprecations.warn( deprecations.warn(
"source-override-deprecation", "source-override-deprecation",
source_name=key_path.split(".")[-1], source_name=key_path.split(".")[-1],
@@ -205,6 +219,9 @@ def jsonschema_validate(schema: Dict[str, Any], json: Dict[str, Any], file_path:
keys = _additional_properties_violation_keys(sub_error) keys = _additional_properties_violation_keys(sub_error)
key_path = error_path_to_string(error) key_path = error_path_to_string(error)
for key in keys: for key in keys:
if key in _get_allowed_config_key_aliases():
continue
deprecations.warn( deprecations.warn(
"custom-key-in-config-deprecation", "custom-key-in-config-deprecation",
key=key, key=key,

View File

@@ -77,6 +77,7 @@ from dbt.events.types import (
InvalidDisabledTargetInTestNode, InvalidDisabledTargetInTestNode,
MicrobatchModelNoEventTimeInputs, MicrobatchModelNoEventTimeInputs,
NodeNotFoundOrDisabled, NodeNotFoundOrDisabled,
PackageNodeDependsOnRootProjectNode,
ParsedFileLoadFailed, ParsedFileLoadFailed,
ParsePerfInfoPath, ParsePerfInfoPath,
PartialParsingError, PartialParsingError,
@@ -1636,6 +1637,33 @@ def invalid_target_fail_unless_test(
) )
def warn_if_package_node_depends_on_root_project_node(
node: ManifestNode,
target_model: ManifestNode,
ref_package_name: Optional[str],
current_project: str,
) -> None:
"""
Args:
node: The node that specifies the ref
target_model: The node that is being ref'd to
ref_package_name: The package name specified in the ref
current_project: The root project
"""
if (
node.package_name != current_project
and target_model.package_name == current_project
and ref_package_name != current_project
):
warn_or_error(
PackageNodeDependsOnRootProjectNode(
node_name=node.name,
package_name=node.package_name,
root_project_unique_id=target_model.unique_id,
)
)
def _build_model_names_to_versions(manifest: Manifest) -> Dict[str, Dict]: def _build_model_names_to_versions(manifest: Manifest) -> Dict[str, Dict]:
model_names_to_versions: Dict[str, Dict] = {} model_names_to_versions: Dict[str, Dict] = {}
for node in manifest.nodes.values(): for node in manifest.nodes.values():
@@ -1893,6 +1921,11 @@ def _process_refs(
scope=target_model.package_name, scope=target_model.package_name,
) )
if not get_flags().require_ref_searches_node_package_before_root:
warn_if_package_node_depends_on_root_project_node(
node, target_model, ref.package, current_project
)
target_model_id = target_model.unique_id target_model_id = target_model.unique_id
node.depends_on.add_node(target_model_id) node.depends_on.add_node(target_model_id)

View File

@@ -249,34 +249,17 @@ class GraphRunnableTask(ConfiguredTask):
thread_exception = e thread_exception = e
finally: finally:
if result is not None: if result is not None:
fire_event( try:
NodeFinished( fire_event(
node_info=runner.node.node_info, NodeFinished(
run_result=result.to_msg_dict(), node_info=runner.node.node_info,
run_result=result.to_msg_dict(),
)
) )
) except Exception as e:
result = self._handle_thread_exception(runner, e)
else: else:
msg = f"Exception on worker thread. {thread_exception}" result = self._handle_thread_exception(runner, thread_exception)
fire_event(
GenericExceptionOnRun(
unique_id=runner.node.unique_id,
exc=str(thread_exception),
node_info=runner.node.node_info,
)
)
result = RunResult(
status=RunStatus.Error, # type: ignore
timing=[],
thread_id="",
execution_time=0.0,
adapter_response={},
message=msg,
failures=None,
batch_results=None,
node=runner.node,
)
# `_event_status` dict is only used for logging. Make sure # `_event_status` dict is only used for logging. Make sure
# it gets deleted when we're done with it # it gets deleted when we're done with it
@@ -365,6 +348,32 @@ class GraphRunnableTask(ConfiguredTask):
args = [runner] args = [runner]
self._submit(pool, args, callback) self._submit(pool, args, callback)
def _handle_thread_exception(
self,
runner: BaseRunner,
thread_exception: Optional[Union[KeyboardInterrupt, SystemExit, Exception]],
) -> RunResult:
msg = f"Exception on worker thread. {thread_exception}"
fire_event(
GenericExceptionOnRun(
unique_id=runner.node.unique_id,
exc=str(thread_exception),
node_info=runner.node.node_info,
)
)
return RunResult(
status=RunStatus.Error, # type: ignore
timing=[],
thread_id="",
execution_time=0.0,
adapter_response={},
message=msg,
failures=None,
batch_results=None,
node=runner.node,
)
def _handle_result(self, result: RunResult) -> None: def _handle_result(self, result: RunResult) -> None:
"""Mark the result as completed, insert the `CompileResultNode` into """Mark the result as completed, insert the `CompileResultNode` into
the manifest, and mark any descendants (potentially with a 'cause' if the manifest, and mark any descendants (potentially with a 'cause' if

View File

@@ -37,7 +37,7 @@ dependencies = [
# ---- # ----
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility # dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
# with major versions in each new minor version of dbt-core. # with major versions in each new minor version of dbt-core.
"click>=8.0.2,<9.0", "click>=8.2.0,<9.0",
"jsonschema>=4.19.1,<5.0", "jsonschema>=4.19.1,<5.0",
"networkx>=2.3,<4.0", "networkx>=2.3,<4.0",
"protobuf>=6.0,<7.0", "protobuf>=6.0,<7.0",
@@ -47,14 +47,14 @@ dependencies = [
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes) # These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
# and check compatibility / bump in each new minor version of dbt-core. # and check compatibility / bump in each new minor version of dbt-core.
"pathspec>=0.9,<0.13", "pathspec>=0.9,<0.13",
"sqlparse>=0.5.0,<0.6.0", "sqlparse>=0.5.0,<0.5.5",
# ---- # ----
# These are major-version-0 packages also maintained by dbt-labs. # These are major-version-0 packages also maintained by dbt-labs.
# Accept patches but avoid automatically updating past a set minor version range. # Accept patches but avoid automatically updating past a set minor version range.
"dbt-extractor>=0.5.0,<=0.6", "dbt-extractor>=0.5.0,<=0.6",
"dbt-semantic-interfaces>=0.9.0,<0.10", "dbt-semantic-interfaces>=0.9.0,<0.10",
# Minor versions for these are expected to be backwards-compatible # Minor versions for these are expected to be backwards-compatible
"dbt-common>=1.37.0,<2.0", "dbt-common>=1.37.2,<2.0",
"dbt-adapters>=1.15.5,<2.0", "dbt-adapters>=1.15.5,<2.0",
"dbt-protos>=1.0.405,<2.0", "dbt-protos>=1.0.405,<2.0",
"pydantic<3", "pydantic<3",

View File

@@ -33,7 +33,7 @@ select {{ config.require('meta_key') }} as col_value
meta_model_meta_require_sql = """ meta_model_meta_require_sql = """
-- models/meta_model.sql -- models/meta_model.sql
select {{ config.require('meta_key') }} as col_value select {{ config.meta_require('meta_key') }} as col_value
""" """
@@ -66,11 +66,11 @@ class TestConfigGetMeta:
self, self,
project, project,
): ):
# This test runs a model with a config.get(key, default) # This test runs a model with a config.get(key, default) -> default value returned
results = run_dbt(["run"], expect_pass=False) results = run_dbt(["run"], expect_pass=False)
assert len(results) == 1 assert len(results) == 1
assert str(results[0].status) == "error" assert str(results[0].status) == "error"
assert 'column "my_meta_value" does not exist' in results[0].message assert 'column "meta_default_value" does not exist' in results[0].message
write_file(meta_model_meta_get_sql, "models", "meta_model.sql") write_file(meta_model_meta_get_sql, "models", "meta_model.sql")
results = run_dbt(["run"], expect_pass=False) results = run_dbt(["run"], expect_pass=False)
@@ -95,7 +95,7 @@ class TestConfigGetMetaRequire:
results = run_dbt(["run"], expect_pass=False) results = run_dbt(["run"], expect_pass=False)
assert len(results) == 1 assert len(results) == 1
assert str(results[0].status) == "error" assert str(results[0].status) == "error"
assert 'column "my_meta_value" does not exist' in results[0].message assert "does not define a required config parameter 'meta_key'" in results[0].message
write_file(meta_model_meta_require_sql, "models", "meta_model.sql") write_file(meta_model_meta_require_sql, "models", "meta_model.sql")
results = run_dbt(["run"], expect_pass=False) results = run_dbt(["run"], expect_pass=False)

View File

@@ -55,6 +55,22 @@ class TestDbtRunner:
dbt.invoke(["debug"]) dbt.invoke(["debug"])
mock_callback.assert_called() mock_callback.assert_called()
def test_callback_node_finished_exceptions_are_raised(self, project):
from dbt_common.events.base_types import EventMsg
def callback_with_exception(event: EventMsg):
if event.info.name == "NodeFinished":
raise Exception("This should let continue the execution registering the failure")
dbt = dbtRunner(callbacks=[callback_with_exception])
result = dbt.invoke(["run", "--select", "models"])
assert result is not None
assert (
result.result.results[0].message
== "Exception on worker thread. This should let continue the execution registering the failure"
)
def test_invoke_kwargs(self, project, dbt): def test_invoke_kwargs(self, project, dbt):
res = dbt.invoke( res = dbt.invoke(
["run"], ["run"],

View File

@@ -0,0 +1,22 @@
name: 'inverted_ref_dependency'
version: '1.0'
config-version: 2
profile: 'default'
model-paths: ["models"]
analysis-paths: ["analyses"]
test-paths: ["tests"]
seed-paths: ["seeds"]
macro-paths: ["macros"]
require-dbt-version: '>=0.1.0'
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_packages"
seeds:
quote_columns: False

View File

@@ -0,0 +1,3 @@
{{ config(alias='package_a')}}
select 1 as id

View File

@@ -0,0 +1 @@
select * from {{ ref('a') }}

View File

@@ -0,0 +1 @@
select * from {{ ref('test', 'a') }}

View File

@@ -0,0 +1,78 @@
import shutil
from pathlib import Path
import pytest
from dbt.events.types import PackageNodeDependsOnRootProjectNode
from dbt.tests.util import run_dbt
from dbt_common.events.event_catcher import EventCatcher
class BaseInvertedRefDependencyTest(object):
@pytest.fixture(scope="class")
def models(self):
return {
"a.sql": "select 1 as id",
}
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
shutil.copytree(
project.test_dir / Path("inverted_ref_dependency"),
project.project_root / Path("inverted_ref_dependency"),
)
@pytest.fixture(scope="class")
def packages(self):
return {"packages": [{"local": "inverted_ref_dependency"}]}
class TestInvertedRefDependency(BaseInvertedRefDependencyTest):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"flags": {
"require_ref_searches_node_package_before_root": True,
}
}
def test_inverted_ref_dependency(self, project):
event_catcher = EventCatcher(PackageNodeDependsOnRootProjectNode)
run_dbt(["deps"])
manifest = run_dbt(["parse"], callbacks=[event_catcher.catch])
assert len(manifest.nodes) == 4
# Correct behavior - package node depends on node from same package
assert manifest.nodes["model.inverted_ref_dependency.b"].depends_on.nodes == [
"model.inverted_ref_dependency.a"
]
# If a package explicitly references a root project node, it still resolves to root project
manifest.nodes["model.inverted_ref_dependency.b_root_package_in_ref"].depends_on.nodes == [
"model.test.a"
]
# No inverted ref warning raised
assert len(event_catcher.caught_events) == 0
class TestInvertedRefDependencyLegacy(BaseInvertedRefDependencyTest):
def test_inverted_ref_dependency(self, project):
event_catcher = EventCatcher(PackageNodeDependsOnRootProjectNode)
run_dbt(["deps"])
manifest = run_dbt(["parse"], callbacks=[event_catcher.catch])
assert len(manifest.nodes) == 4
# Legacy behavior - package node depends on node from root project
assert manifest.nodes["model.inverted_ref_dependency.b"].depends_on.nodes == [
"model.test.a"
]
assert manifest.nodes[
"model.inverted_ref_dependency.b_root_package_in_ref"
].depends_on.nodes == ["model.test.a"]
# Inverted ref warning raised - only for b, not b_root_package_in_ref
assert len(event_catcher.caught_events) == 1
assert event_catcher.caught_events[0].data.node_name == "b"
assert event_catcher.caught_events[0].data.package_name == "inverted_ref_dependency"

View File

@@ -220,10 +220,16 @@ models_yml = """
models: models:
- name: abcd - name: abcd
description: "abcd model" description: "abcd model"
versions:
- v: 1
- name: efgh - name: efgh
description: "efgh model" description: "efgh model"
versions:
- v: 1
- name: ijkl - name: ijkl
description: "ijkl model" description: "ijkl model"
versions:
- v: 1
""" """
append_sources_yml = """ append_sources_yml = """
@@ -233,6 +239,8 @@ append_sources_yml = """
append_models_yml = """ append_models_yml = """
- name: mnop - name: mnop
description: "mnop model" description: "mnop model"
versions:
- v: 1
""" """
mnop_sql = """ mnop_sql = """
@@ -245,9 +253,9 @@ class TestSourcesAndSchemaFiles:
def models(self): def models(self):
return { return {
"sources.yml": sources_yml, "sources.yml": sources_yml,
"abcd.sql": abcd_sql, "abcd_v1.sql": abcd_sql,
"efgh.sql": efgh_sql, "efgh_v1.sql": efgh_sql,
"ijkl.sql": ijkl_sql, "ijkl_v1.sql": ijkl_sql,
"_models.yml": models_yml, "_models.yml": models_yml,
} }
@@ -258,7 +266,7 @@ class TestSourcesAndSchemaFiles:
assert len(manifest.nodes) == 3 assert len(manifest.nodes) == 3
write_file(models_yml + append_models_yml, project.project_root, "models", "_models.yml") write_file(models_yml + append_models_yml, project.project_root, "models", "_models.yml")
write_file(mnop_sql, project.project_root, "models", "mnop.sql") write_file(mnop_sql, project.project_root, "models", "mnop_v1.sql")
write_file(sources_yml + append_sources_yml, project.project_root, "models", "sources.yml") write_file(sources_yml + append_sources_yml, project.project_root, "models", "sources.yml")
manifest = run_dbt(["parse"]) manifest = run_dbt(["parse"])
@@ -268,3 +276,4 @@ class TestSourcesAndSchemaFiles:
# the patch updates, including description, so description will be "" # the patch updates, including description, so description will be ""
for node in manifest.nodes.values(): for node in manifest.nodes.values():
assert node.description == f"{node.name} model" assert node.description == f"{node.name} model"
assert node.unique_id.endswith(".v1")

View File

@@ -10,7 +10,6 @@ from unittest import mock
import freezegun import freezegun
import pytest import pytest
import dbt.flags
import dbt.version import dbt.version
import dbt_common.invocation import dbt_common.invocation
from dbt import tracking from dbt import tracking
@@ -1788,6 +1787,19 @@ def _ambiguous_ref_parameter_sets():
return sets return sets
def _duplicate_node_name_across_packages_ref_parameter_sets():
sets = [
FindNodeSpec(
nodes=[MockNode("project_a", "my_model"), MockNode("root", "my_model")],
sources=[],
package=None,
version=None,
expected=("project_a", "my_model"),
),
]
return sets
def id_nodes(arg): def id_nodes(arg):
if isinstance(arg, list): if isinstance(arg, list):
node_names = "__".join(f"{n.package_name}_{n.search_name}" for n in arg) node_names = "__".join(f"{n.package_name}_{n.search_name}" for n in arg)
@@ -1849,6 +1861,60 @@ def test_resolve_ref_ambiguous_resource_name_across_packages(
) )
@pytest.mark.parametrize(
"nodes,sources,package,version,expected",
_duplicate_node_name_across_packages_ref_parameter_sets(),
ids=id_nodes,
)
def test_resolve_ref_with_node_package_legacy(nodes, sources, package, version, expected):
set_from_args(
Namespace(
SEND_ANONYMOUS_USAGE_STATS=False,
REQUIRE_REF_SEARCHES_NODE_PACKAGE_BEFORE_ROOT=False,
),
None,
)
manifest = make_manifest(nodes=nodes, sources=sources)
result = manifest.resolve_ref(
source_node=None,
target_model_name="my_model",
target_model_package=package,
target_model_version=version,
current_project="root",
node_package="project_a",
)
assert result.name == "my_model"
assert result.package_name == "root"
@pytest.mark.parametrize(
"nodes,sources,package,version,expected",
_duplicate_node_name_across_packages_ref_parameter_sets(),
ids=id_nodes,
)
def test_resolve_ref_with_node_package(nodes, sources, package, version, expected):
set_from_args(
Namespace(
SEND_ANONYMOUS_USAGE_STATS=False,
REQUIRE_REF_SEARCHES_NODE_PACKAGE_BEFORE_ROOT=True,
),
None,
)
manifest = make_manifest(nodes=nodes, sources=sources)
result = manifest.resolve_ref(
source_node=None,
target_model_name="my_model",
target_model_package=package,
target_model_version=version,
current_project="root",
node_package="project_a",
)
assert result.name == "my_model"
assert result.package_name == "project_a"
def _source_parameter_sets(): def _source_parameter_sets():
sets = [ sets = [
# empties # empties
@@ -1931,6 +1997,13 @@ def _source_parameter_sets():
ids=id_nodes, ids=id_nodes,
) )
def test_resolve_source(nodes, sources, package, version, expected): def test_resolve_source(nodes, sources, package, version, expected):
set_from_args(
Namespace(
SEND_ANONYMOUS_USAGE_STATS=False,
REQUIRE_REF_SEARCHES_NODE_PACKAGE_BEFORE_ROOT=False,
),
None,
)
manifest = make_manifest(nodes=nodes, sources=sources) manifest = make_manifest(nodes=nodes, sources=sources)
result = manifest.resolve_source( result = manifest.resolve_source(
target_source_name="my_source", target_source_name="my_source",

View File

@@ -311,6 +311,9 @@ sample_values = [
core_types.MicrobatchModelNoEventTimeInputs(model_name=""), core_types.MicrobatchModelNoEventTimeInputs(model_name=""),
core_types.InvalidConcurrentBatchesConfig(num_models=1, adapter_type=""), core_types.InvalidConcurrentBatchesConfig(num_models=1, adapter_type=""),
core_types.InvalidMacroAnnotation(msg="", macro_file_path="", macro_unique_id=""), core_types.InvalidMacroAnnotation(msg="", macro_file_path="", macro_unique_id=""),
core_types.PackageNodeDependsOnRootProjectNode(
node_name="", node_package="", root_project_unique_id=""
),
# M - Deps generation ====================== # M - Deps generation ======================
core_types.GitSparseCheckoutSubdirectory(subdir=""), core_types.GitSparseCheckoutSubdirectory(subdir=""),
core_types.GitProgressCheckoutRevision(revision=""), core_types.GitProgressCheckoutRevision(revision=""),

View File

@@ -1,9 +1,17 @@
import pytest
from dbt.deprecations import ( from dbt.deprecations import (
CustomKeyInConfigDeprecation, CustomKeyInConfigDeprecation,
CustomKeyInObjectDeprecation, CustomKeyInObjectDeprecation,
GenericJSONSchemaValidationDeprecation, GenericJSONSchemaValidationDeprecation,
active_deprecations,
reset_deprecations,
)
from dbt.jsonschemas.jsonschemas import (
jsonschema_validate,
resources_schema,
validate_model_config,
) )
from dbt.jsonschemas.jsonschemas import validate_model_config
from dbt.tests.util import safe_set_invocation_context from dbt.tests.util import safe_set_invocation_context
from dbt_common.context import get_invocation_context from dbt_common.context import get_invocation_context
from dbt_common.events.event_catcher import EventCatcher from dbt_common.events.event_catcher import EventCatcher
@@ -48,3 +56,38 @@ class TestValidateModelConfigNoError:
assert len(ckicd_catcher.caught_events) == 1 assert len(ckicd_catcher.caught_events) == 1
assert ckicd_catcher.caught_events[0].data.key == "non_existent_config" assert ckicd_catcher.caught_events[0].data.key == "non_existent_config"
assert len(gjsvd_catcher.caught_events) == 0 assert len(gjsvd_catcher.caught_events) == 0
class TestValidateJsonSchema:
@pytest.fixture(scope="class")
def model_bigquery_alias_config_contents(self):
return {
"models": [
{
"name": "model_1",
"config": {
"dataset": "dataset_1",
"project": "project_1",
},
}
],
}
def test_validate_json_schema_no_error_aliases(self, model_bigquery_alias_config_contents):
reset_deprecations()
safe_set_invocation_context()
get_invocation_context().uses_adapter("bigquery")
jsonschema_validate(resources_schema(), model_bigquery_alias_config_contents, "test.yml")
assert active_deprecations == {}
def test_validate_json_schema_has_error_aliases(self, model_bigquery_alias_config_contents):
reset_deprecations()
safe_set_invocation_context()
# Set to adapter that doesn't support aliases specified
get_invocation_context().uses_adapter("snowflake")
jsonschema_validate(resources_schema(), model_bigquery_alias_config_contents, "test.yml")
assert active_deprecations == {"custom-key-in-config-deprecation": 2}