mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-23 11:21:27 +00:00
Compare commits
18 Commits
enable-pos
...
feature-br
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
90305a509e | ||
|
|
d3f01e04da | ||
|
|
14d6e3f1ff | ||
|
|
5de6c8f623 | ||
|
|
0e05af28f7 | ||
|
|
1d5bae5ff5 | ||
|
|
21a477973f | ||
|
|
94439bac20 | ||
|
|
dbc7c07f19 | ||
|
|
668e114330 | ||
|
|
04f579a37b | ||
|
|
f10db08287 | ||
|
|
b1c859da4c | ||
|
|
caa8cf2384 | ||
|
|
0b9d371c0c | ||
|
|
3d707bc242 | ||
|
|
72e4fdef36 | ||
|
|
f1bd3f718c |
6
.changes/unreleased/Features-20250414-171209.yaml
Normal file
6
.changes/unreleased/Features-20250414-171209.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Basic jsonschema validation of `dbt_project.yml`
|
||||||
|
time: 2025-04-14T17:12:09.351572-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11503"
|
||||||
6
.changes/unreleased/Features-20250415-133751.yaml
Normal file
6
.changes/unreleased/Features-20250415-133751.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Freatures
|
||||||
|
body: Begin checking YAML files for duplicate keys
|
||||||
|
time: 2025-04-15T13:37:51.878256-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb QMalcolm
|
||||||
|
Issue: "11296"
|
||||||
6
.changes/unreleased/Features-20250416-144427.yaml
Normal file
6
.changes/unreleased/Features-20250416-144427.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add deprecation warnings for unexpected blocks in jinja.
|
||||||
|
time: 2025-04-16T14:44:27.136199-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "11393"
|
||||||
6
.changes/unreleased/Features-20250416-151711.yaml
Normal file
6
.changes/unreleased/Features-20250416-151711.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Begin validating the jsonschema of resource YAML files
|
||||||
|
time: 2025-04-16T15:17:11.760509-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11504"
|
||||||
6
.changes/unreleased/Features-20250417-001347.yaml
Normal file
6
.changes/unreleased/Features-20250417-001347.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Add deprecation warning for custom top level keys in YAML files.
|
||||||
|
time: 2025-04-17T00:13:47.744191-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "11338"
|
||||||
6
.changes/unreleased/Features-20250417-114915.yaml
Normal file
6
.changes/unreleased/Features-20250417-114915.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Begin emitting deprecationw warnings for custom keys in config blocks
|
||||||
|
time: 2025-04-17T11:49:15.056242-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11337"
|
||||||
6
.changes/unreleased/Features-20250418-104210.yaml
Normal file
6
.changes/unreleased/Features-20250418-104210.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Begin emitting deprecation events for custom properties found in objects
|
||||||
|
time: 2025-04-18T10:42:10.048839-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11336"
|
||||||
6
.changes/unreleased/Features-20250424-153327.yaml
Normal file
6
.changes/unreleased/Features-20250424-153327.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Create a singular deprecations summary event
|
||||||
|
time: 2025-04-24T15:33:27.252763-05:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "11536"
|
||||||
6
.changes/unreleased/Fixes-20250414-082916.yaml
Normal file
6
.changes/unreleased/Fixes-20250414-082916.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Install `pre-commit` in Docker container for running tests
|
||||||
|
time: 2025-04-14T08:29:16.392175-06:00
|
||||||
|
custom:
|
||||||
|
Author: dbeatty10
|
||||||
|
Issue: "11498"
|
||||||
@@ -50,7 +50,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V
|
|||||||
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
|
||||||
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
|
||||||
|
|
||||||
RUN pip3 install -U tox wheel six setuptools
|
RUN pip3 install -U tox wheel six setuptools pre-commit
|
||||||
|
|
||||||
# These args are passed in via docker-compose, which reads then from the .env file.
|
# These args are passed in via docker-compose, which reads then from the .env file.
|
||||||
# On Linux, run `make .env` to create the .env file for the current user.
|
# On Linux, run `make .env` to create the .env file for the current user.
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -35,7 +35,7 @@ dev_req: ## Installs dbt-* packages in develop mode along with only development
|
|||||||
.PHONY: dev
|
.PHONY: dev
|
||||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||||
@\
|
@\
|
||||||
pre-commit install
|
$(DOCKER_CMD) pre-commit install
|
||||||
|
|
||||||
.PHONY: dev-uninstall
|
.PHONY: dev-uninstall
|
||||||
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore *.json
|
||||||
include dbt/py.typed
|
include dbt/py.typed
|
||||||
recursive-include dbt/task/docs *.html
|
recursive-include dbt/task/docs *.html
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from dbt.config.catalogs import get_active_write_integration, load_catalogs
|
|||||||
from dbt.config.runtime import UnsetProfile, load_profile, load_project
|
from dbt.config.runtime import UnsetProfile, load_profile, load_project
|
||||||
from dbt.context.providers import generate_runtime_macro_context
|
from dbt.context.providers import generate_runtime_macro_context
|
||||||
from dbt.context.query_header import generate_query_header_context
|
from dbt.context.query_header import generate_query_header_context
|
||||||
from dbt.deprecations import show_all_deprecation_summaries
|
from dbt.deprecations import show_deprecations_summary
|
||||||
from dbt.events.logging import setup_event_logger
|
from dbt.events.logging import setup_event_logger
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
ArtifactUploadError,
|
ArtifactUploadError,
|
||||||
@@ -181,7 +181,7 @@ def postflight(func):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
fire_event(ArtifactUploadError(msg=str(e)))
|
fire_event(ArtifactUploadError(msg=str(e)))
|
||||||
|
|
||||||
show_all_deprecation_summaries()
|
show_deprecations_summary()
|
||||||
|
|
||||||
if importlib.util.find_spec("resource") is not None:
|
if importlib.util.find_spec("resource") is not None:
|
||||||
import resource
|
import resource
|
||||||
@@ -267,8 +267,10 @@ def project(func):
|
|||||||
raise DbtProjectError("profile required for project")
|
raise DbtProjectError("profile required for project")
|
||||||
|
|
||||||
flags = ctx.obj["flags"]
|
flags = ctx.obj["flags"]
|
||||||
|
# TODO deprecations warnings fired from loading the project will lack
|
||||||
|
# the project_id in the snowplow event.
|
||||||
project = load_project(
|
project = load_project(
|
||||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS, validate=True
|
||||||
)
|
)
|
||||||
ctx.obj["project"] = project
|
ctx.obj["project"] = project
|
||||||
|
|
||||||
|
|||||||
78
core/dbt/clients/checked_load.py
Normal file
78
core/dbt/clients/checked_load.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import collections
|
||||||
|
import dataclasses
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from dbt import deprecations
|
||||||
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
|
|
||||||
|
# the C version is faster, but it doesn't always exist
|
||||||
|
try:
|
||||||
|
from yaml import CSafeLoader as SafeLoader
|
||||||
|
except ImportError:
|
||||||
|
from yaml import SafeLoader # type: ignore # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class YamlCheckFailure:
|
||||||
|
failure_type: str
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
def checked_load(contents) -> Tuple[Optional[Dict[str, Any]], List[YamlCheckFailure]]:
|
||||||
|
# A hacky (but sadly justified) method for modifying a bit of PyYAML. We create
|
||||||
|
# a new local subclass of SafeLoader, since we need to associate state with
|
||||||
|
# the static class, but static classes do not have non-static state. This allows
|
||||||
|
# us to be sure we have exclusive access to the class.
|
||||||
|
class CheckedLoader(SafeLoader):
|
||||||
|
check_failures: List[YamlCheckFailure] = []
|
||||||
|
|
||||||
|
def construct_mapping(self, node, deep=False):
|
||||||
|
if not isinstance(node, yaml.MappingNode):
|
||||||
|
raise yaml.constructor.ConstructorError(
|
||||||
|
None, None, "expected a mapping node, but found %s" % node.id, node.start_mark
|
||||||
|
)
|
||||||
|
self.flatten_mapping(node)
|
||||||
|
mapping = {}
|
||||||
|
for key_node, value_node in node.value:
|
||||||
|
key = self.construct_object(key_node, deep=deep)
|
||||||
|
if not isinstance(key, collections.abc.Hashable):
|
||||||
|
raise yaml.constructor.ConstructorError(
|
||||||
|
"while constructing a mapping",
|
||||||
|
node.start_mark,
|
||||||
|
"found unhashable key",
|
||||||
|
key_node.start_mark,
|
||||||
|
)
|
||||||
|
value = self.construct_object(value_node, deep=deep)
|
||||||
|
|
||||||
|
if key in mapping:
|
||||||
|
start_mark = str(key_node.start_mark)
|
||||||
|
if start_mark.startswith(" in"): # this means it was at the top level
|
||||||
|
message = f"Duplicate key '{key}' {start_mark.lstrip()}"
|
||||||
|
else:
|
||||||
|
message = f"Duplicate key '{key}' at {key_node.start_mark}"
|
||||||
|
|
||||||
|
self.check_failures.append(YamlCheckFailure("duplicate_key", message))
|
||||||
|
|
||||||
|
mapping[key] = value
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
CheckedLoader.add_constructor(
|
||||||
|
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, CheckedLoader.construct_mapping
|
||||||
|
)
|
||||||
|
|
||||||
|
dct = load_yaml_text(contents, loader=CheckedLoader)
|
||||||
|
check_failures = CheckedLoader.check_failures
|
||||||
|
|
||||||
|
return (dct, check_failures)
|
||||||
|
|
||||||
|
|
||||||
|
def issue_deprecation_warnings_for_failures(failures: List[YamlCheckFailure], file: str):
|
||||||
|
for failure in failures:
|
||||||
|
if failure.failure_type == "duplicate_key":
|
||||||
|
deprecations.warn(
|
||||||
|
"duplicate-yaml-keys-deprecation",
|
||||||
|
duplicate_description=failure.message,
|
||||||
|
file=file,
|
||||||
|
)
|
||||||
@@ -56,9 +56,9 @@ def safe_load(contents) -> Optional[Dict[str, Any]]:
|
|||||||
return yaml.load(contents, Loader=SafeLoader)
|
return yaml.load(contents, Loader=SafeLoader)
|
||||||
|
|
||||||
|
|
||||||
def load_yaml_text(contents, path=None):
|
def load_yaml_text(contents, path=None, loader=SafeLoader) -> Optional[Dict[str, Any]]:
|
||||||
try:
|
try:
|
||||||
return safe_load(contents)
|
return yaml.load(contents, loader)
|
||||||
except (yaml.scanner.ScannerError, yaml.YAMLError) as e:
|
except (yaml.scanner.ScannerError, yaml.YAMLError) as e:
|
||||||
if hasattr(e, "problem_mark"):
|
if hasattr(e, "problem_mark"):
|
||||||
error = contextualized_yaml_error(contents, e)
|
error = contextualized_yaml_error(contents, e)
|
||||||
|
|||||||
@@ -8,6 +8,10 @@ from typing_extensions import Protocol, runtime_checkable
|
|||||||
|
|
||||||
from dbt import deprecations
|
from dbt import deprecations
|
||||||
from dbt.adapters.contracts.connection import QueryComment
|
from dbt.adapters.contracts.connection import QueryComment
|
||||||
|
from dbt.clients.checked_load import (
|
||||||
|
checked_load,
|
||||||
|
issue_deprecation_warnings_for_failures,
|
||||||
|
)
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.config.selectors import SelectorDict
|
from dbt.config.selectors import SelectorDict
|
||||||
from dbt.config.utils import normalize_warn_error_options
|
from dbt.config.utils import normalize_warn_error_options
|
||||||
@@ -29,6 +33,7 @@ from dbt.exceptions import (
|
|||||||
)
|
)
|
||||||
from dbt.flags import get_flags
|
from dbt.flags import get_flags
|
||||||
from dbt.graph import SelectionSpec
|
from dbt.graph import SelectionSpec
|
||||||
|
from dbt.jsonschemas import jsonschema_validate, project_schema
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt.utils import MultiDict, coerce_dict_str, md5
|
from dbt.utils import MultiDict, coerce_dict_str, md5
|
||||||
from dbt.version import get_installed_version
|
from dbt.version import get_installed_version
|
||||||
@@ -86,9 +91,14 @@ class IsFQNResource(Protocol):
|
|||||||
package_name: str
|
package_name: str
|
||||||
|
|
||||||
|
|
||||||
def _load_yaml(path):
|
def _load_yaml(path, validate: bool = False):
|
||||||
contents = load_file_contents(path)
|
contents = load_file_contents(path)
|
||||||
return load_yaml_text(contents)
|
if validate:
|
||||||
|
result, failures = checked_load(contents)
|
||||||
|
issue_deprecation_warnings_for_failures(failures=failures, file=path)
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return load_yaml_text(contents)
|
||||||
|
|
||||||
|
|
||||||
def load_yml_dict(file_path):
|
def load_yml_dict(file_path):
|
||||||
@@ -182,7 +192,7 @@ def value_or(value: Optional[T], default: T) -> T:
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def load_raw_project(project_root: str) -> Dict[str, Any]:
|
def load_raw_project(project_root: str, validate: bool = False) -> Dict[str, Any]:
|
||||||
project_root = os.path.normpath(project_root)
|
project_root = os.path.normpath(project_root)
|
||||||
project_yaml_filepath = os.path.join(project_root, DBT_PROJECT_FILE_NAME)
|
project_yaml_filepath = os.path.join(project_root, DBT_PROJECT_FILE_NAME)
|
||||||
|
|
||||||
@@ -194,7 +204,12 @@ def load_raw_project(project_root: str) -> Dict[str, Any]:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
project_dict = _load_yaml(project_yaml_filepath)
|
project_dict = _load_yaml(project_yaml_filepath, validate=validate)
|
||||||
|
|
||||||
|
if validate:
|
||||||
|
jsonschema_validate(
|
||||||
|
schema=project_schema(), json=project_dict, file_path=project_yaml_filepath
|
||||||
|
)
|
||||||
|
|
||||||
if not isinstance(project_dict, dict):
|
if not isinstance(project_dict, dict):
|
||||||
raise DbtProjectError(f"{DBT_PROJECT_FILE_NAME} does not parse to a dictionary")
|
raise DbtProjectError(f"{DBT_PROJECT_FILE_NAME} does not parse to a dictionary")
|
||||||
@@ -534,7 +549,7 @@ class PartialProject(RenderComponents):
|
|||||||
project_root: str,
|
project_root: str,
|
||||||
project_dict: Dict[str, Any],
|
project_dict: Dict[str, Any],
|
||||||
packages_dict: Dict[str, Any],
|
packages_dict: Dict[str, Any],
|
||||||
selectors_dict: Dict[str, Any],
|
selectors_dict: Optional[Dict[str, Any]],
|
||||||
*,
|
*,
|
||||||
verify_version: bool = False,
|
verify_version: bool = False,
|
||||||
packages_specified_path: str = PACKAGES_FILE_NAME,
|
packages_specified_path: str = PACKAGES_FILE_NAME,
|
||||||
@@ -550,17 +565,17 @@ class PartialProject(RenderComponents):
|
|||||||
project_root=project_root,
|
project_root=project_root,
|
||||||
project_dict=project_dict,
|
project_dict=project_dict,
|
||||||
packages_dict=packages_dict,
|
packages_dict=packages_dict,
|
||||||
selectors_dict=selectors_dict,
|
selectors_dict=selectors_dict, # type: ignore
|
||||||
verify_version=verify_version,
|
verify_version=verify_version,
|
||||||
packages_specified_path=packages_specified_path,
|
packages_specified_path=packages_specified_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_project_root(
|
def from_project_root(
|
||||||
cls, project_root: str, *, verify_version: bool = False
|
cls, project_root: str, *, verify_version: bool = False, validate: bool = False
|
||||||
) -> "PartialProject":
|
) -> "PartialProject":
|
||||||
project_root = os.path.normpath(project_root)
|
project_root = os.path.normpath(project_root)
|
||||||
project_dict = load_raw_project(project_root)
|
project_dict = load_raw_project(project_root, validate=validate)
|
||||||
(
|
(
|
||||||
packages_dict,
|
packages_dict,
|
||||||
packages_specified_path,
|
packages_specified_path,
|
||||||
@@ -747,8 +762,11 @@ class Project:
|
|||||||
renderer: DbtProjectYamlRenderer,
|
renderer: DbtProjectYamlRenderer,
|
||||||
*,
|
*,
|
||||||
verify_version: bool = False,
|
verify_version: bool = False,
|
||||||
|
validate: bool = False,
|
||||||
) -> "Project":
|
) -> "Project":
|
||||||
partial = PartialProject.from_project_root(project_root, verify_version=verify_version)
|
partial = PartialProject.from_project_root(
|
||||||
|
project_root, verify_version=verify_version, validate=validate
|
||||||
|
)
|
||||||
return partial.render(renderer)
|
return partial.render(renderer)
|
||||||
|
|
||||||
def hashed_name(self):
|
def hashed_name(self):
|
||||||
|
|||||||
@@ -50,11 +50,12 @@ def load_project(
|
|||||||
version_check: bool,
|
version_check: bool,
|
||||||
profile: HasCredentials,
|
profile: HasCredentials,
|
||||||
cli_vars: Optional[Dict[str, Any]] = None,
|
cli_vars: Optional[Dict[str, Any]] = None,
|
||||||
|
validate: bool = False,
|
||||||
) -> Project:
|
) -> Project:
|
||||||
# get the project with all of the provided information
|
# get the project with all of the provided information
|
||||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||||
project = Project.from_project_root(
|
project = Project.from_project_root(
|
||||||
project_root, project_renderer, verify_version=version_check
|
project_root, project_renderer, verify_version=version_check, validate=validate
|
||||||
)
|
)
|
||||||
|
|
||||||
# Save env_vars encountered in rendering for partial parsing
|
# Save env_vars encountered in rendering for partial parsing
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, Union
|
from typing import Any, Dict, Optional, Union
|
||||||
|
|
||||||
from dbt.clients.yaml_helper import Dumper, Loader, load_yaml_text, yaml # noqa: F401
|
from dbt.clients.yaml_helper import Dumper, Loader, load_yaml_text, yaml # noqa: F401
|
||||||
from dbt.contracts.selection import SelectorFile
|
from dbt.contracts.selection import SelectorFile
|
||||||
@@ -78,6 +78,8 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
|||||||
) -> "SelectorConfig":
|
) -> "SelectorConfig":
|
||||||
try:
|
try:
|
||||||
data = load_yaml_text(load_file_contents(str(path)))
|
data = load_yaml_text(load_file_contents(str(path)))
|
||||||
|
if data is None:
|
||||||
|
raise ValidationError("No data found in selector file at path: {path}")
|
||||||
except (ValidationError, DbtRuntimeError) as exc:
|
except (ValidationError, DbtRuntimeError) as exc:
|
||||||
raise DbtSelectorsError(
|
raise DbtSelectorsError(
|
||||||
f"Could not read selector file: {exc}",
|
f"Could not read selector file: {exc}",
|
||||||
@@ -92,7 +94,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def selector_data_from_root(project_root: str) -> Dict[str, Any]:
|
def selector_data_from_root(project_root: str) -> Optional[Dict[str, Any]]:
|
||||||
selector_filepath = resolve_path_from_base("selectors.yml", project_root)
|
selector_filepath = resolve_path_from_base("selectors.yml", project_root)
|
||||||
|
|
||||||
if path_exists(selector_filepath):
|
if path_exists(selector_filepath):
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, An
|
|||||||
try:
|
try:
|
||||||
cli_vars = yaml_helper.load_yaml_text(var_string)
|
cli_vars = yaml_helper.load_yaml_text(var_string)
|
||||||
var_type = type(cli_vars)
|
var_type = type(cli_vars)
|
||||||
if var_type is dict:
|
if cli_vars is not None and var_type is dict:
|
||||||
return cli_vars
|
return cli_vars
|
||||||
else:
|
else:
|
||||||
raise OptionNotYamlDictError(var_type, cli_option_name)
|
raise OptionNotYamlDictError(var_type, cli_option_name)
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
import abc
|
import abc
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Callable, ClassVar, DefaultDict, Dict, List, Optional
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Callable, ClassVar, DefaultDict, Dict, List, Optional
|
||||||
|
|
||||||
import dbt.tracking
|
import dbt.tracking
|
||||||
from dbt.events import types as core_types
|
from dbt.events import types as core_types
|
||||||
from dbt.flags import get_flags
|
from dbt.flags import get_flags
|
||||||
from dbt_common.events.base_types import BaseEvent
|
from dbt_common.dataclass_schema import dbtClassMixin
|
||||||
from dbt_common.events.functions import warn_or_error
|
from dbt_common.events.functions import warn_or_error
|
||||||
|
|
||||||
|
|
||||||
class DBTDeprecation:
|
class DBTDeprecation:
|
||||||
_name: ClassVar[Optional[str]] = None
|
_name: ClassVar[Optional[str]] = None
|
||||||
_event: ClassVar[Optional[str]] = None
|
_event: ClassVar[Optional[str]] = None
|
||||||
_summary_event: ClassVar[Optional[str]] = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
@@ -37,20 +37,6 @@ class DBTDeprecation:
|
|||||||
raise NameError(msg)
|
raise NameError(msg)
|
||||||
raise NotImplementedError("event not implemented for {}".format(self._event))
|
raise NotImplementedError("event not implemented for {}".format(self._event))
|
||||||
|
|
||||||
@property
|
|
||||||
def summary_event(self) -> Optional[abc.ABCMeta]:
|
|
||||||
if self._summary_event is None:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
module_path = core_types
|
|
||||||
class_name = self._summary_event
|
|
||||||
|
|
||||||
try:
|
|
||||||
return getattr(module_path, class_name)
|
|
||||||
except AttributeError:
|
|
||||||
msg = f"Event Class `{class_name}` is not defined in `{module_path}`"
|
|
||||||
raise NameError(msg)
|
|
||||||
|
|
||||||
def show(self, *args, **kwargs) -> None:
|
def show(self, *args, **kwargs) -> None:
|
||||||
flags = get_flags()
|
flags = get_flags()
|
||||||
if self.name not in active_deprecations or flags.show_all_deprecations:
|
if self.name not in active_deprecations or flags.show_all_deprecations:
|
||||||
@@ -60,22 +46,10 @@ class DBTDeprecation:
|
|||||||
|
|
||||||
active_deprecations[self.name] += 1
|
active_deprecations[self.name] += 1
|
||||||
|
|
||||||
def show_summary(self) -> None:
|
|
||||||
event_class = self.summary_event
|
|
||||||
if self.name in active_deprecations and event_class is not None:
|
|
||||||
show_all_hint = (
|
|
||||||
not get_flags().show_all_deprecations and active_deprecations[self.name] > 1
|
|
||||||
)
|
|
||||||
event: BaseEvent = event_class(
|
|
||||||
occurrences=active_deprecations[self.name], show_all_hint=show_all_hint
|
|
||||||
)
|
|
||||||
warn_or_error(event)
|
|
||||||
|
|
||||||
|
|
||||||
class PackageRedirectDeprecation(DBTDeprecation):
|
class PackageRedirectDeprecation(DBTDeprecation):
|
||||||
_name = "package-redirect"
|
_name = "package-redirect"
|
||||||
_event = "PackageRedirectDeprecation"
|
_event = "PackageRedirectDeprecation"
|
||||||
_summary_event = "PackageRedirectDeprecationSummary"
|
|
||||||
|
|
||||||
|
|
||||||
class PackageInstallPathDeprecation(DBTDeprecation):
|
class PackageInstallPathDeprecation(DBTDeprecation):
|
||||||
@@ -165,6 +139,36 @@ class MicrobatchMacroOutsideOfBatchesDeprecation(DBTDeprecation):
|
|||||||
_event = "MicrobatchMacroOutsideOfBatchesDeprecation"
|
_event = "MicrobatchMacroOutsideOfBatchesDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class GenericJSONSchemaValidationDeprecation(DBTDeprecation):
|
||||||
|
_name = "generic-json-schema-validation-deprecation"
|
||||||
|
_event = "GenericJSONSchemaValidationDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class UnexpectedJinjaBlockDeprecation(DBTDeprecation):
|
||||||
|
_name = "unexpected-jinja-block-deprecation"
|
||||||
|
_event = "UnexpectedJinjaBlockDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateYAMLKeysDeprecation(DBTDeprecation):
|
||||||
|
_name = "duplicate-yaml-keys-deprecation"
|
||||||
|
_event = "DuplicateYAMLKeysDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomTopLevelKeyDeprecation(DBTDeprecation):
|
||||||
|
_name = "custom-top-level-key-deprecation"
|
||||||
|
_event = "CustomTopLevelKeyDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomKeyInConfigDeprecation(DBTDeprecation):
|
||||||
|
_name = "custom-key-in-config-deprecation"
|
||||||
|
_event = "CustomKeyInConfigDeprecation"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomKeyInObjectDeprecation(DBTDeprecation):
|
||||||
|
_name = "custom-key-in-object-deprecation"
|
||||||
|
_event = "CustomKeyInObjectDeprecation"
|
||||||
|
|
||||||
|
|
||||||
def renamed_env_var(old_name: str, new_name: str):
|
def renamed_env_var(old_name: str, new_name: str):
|
||||||
class EnvironmentVariableRenamed(DBTDeprecation):
|
class EnvironmentVariableRenamed(DBTDeprecation):
|
||||||
_name = f"environment-variable-renamed:{old_name}"
|
_name = f"environment-variable-renamed:{old_name}"
|
||||||
@@ -195,9 +199,23 @@ def buffer(name: str, *args, **kwargs):
|
|||||||
buffered_deprecations.append(show_callback)
|
buffered_deprecations.append(show_callback)
|
||||||
|
|
||||||
|
|
||||||
def show_all_deprecation_summaries() -> None:
|
def show_deprecations_summary() -> None:
|
||||||
for deprecation in active_deprecations:
|
summaries: List[Dict[str, Any]] = []
|
||||||
deprecations[deprecation].show_summary()
|
for deprecation, occurrences in active_deprecations.items():
|
||||||
|
deprecation_event = deprecations[deprecation].event()
|
||||||
|
summaries.append(
|
||||||
|
DeprecationSummary(
|
||||||
|
event_name=deprecation_event.__name__,
|
||||||
|
event_code=deprecation_event.code(),
|
||||||
|
occurrences=occurrences,
|
||||||
|
).to_msg_dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(summaries) > 0:
|
||||||
|
show_all_hint = not get_flags().show_all_deprecations
|
||||||
|
warn_or_error(
|
||||||
|
core_types.DeprecationsSummary(summaries=summaries, show_all_hint=show_all_hint)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# these are globally available
|
# these are globally available
|
||||||
@@ -221,6 +239,12 @@ deprecations_list: List[DBTDeprecation] = [
|
|||||||
MFTimespineWithoutYamlConfigurationDeprecation(),
|
MFTimespineWithoutYamlConfigurationDeprecation(),
|
||||||
MFCumulativeTypeParamsDeprecation(),
|
MFCumulativeTypeParamsDeprecation(),
|
||||||
MicrobatchMacroOutsideOfBatchesDeprecation(),
|
MicrobatchMacroOutsideOfBatchesDeprecation(),
|
||||||
|
GenericJSONSchemaValidationDeprecation(),
|
||||||
|
UnexpectedJinjaBlockDeprecation(),
|
||||||
|
DuplicateYAMLKeysDeprecation(),
|
||||||
|
CustomTopLevelKeyDeprecation(),
|
||||||
|
CustomKeyInConfigDeprecation(),
|
||||||
|
CustomKeyInObjectDeprecation(),
|
||||||
]
|
]
|
||||||
|
|
||||||
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
||||||
@@ -235,3 +259,17 @@ def reset_deprecations():
|
|||||||
def fire_buffered_deprecations():
|
def fire_buffered_deprecations():
|
||||||
[dep_fn() for dep_fn in buffered_deprecations]
|
[dep_fn() for dep_fn in buffered_deprecations]
|
||||||
buffered_deprecations.clear()
|
buffered_deprecations.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DeprecationSummary(dbtClassMixin):
|
||||||
|
event_name: str
|
||||||
|
event_code: str
|
||||||
|
occurrences: int
|
||||||
|
|
||||||
|
def to_msg_dict(self) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"event_name": self.event_name,
|
||||||
|
"event_code": self.event_code,
|
||||||
|
"occurrences": self.occurrences,
|
||||||
|
}
|
||||||
|
|||||||
@@ -470,15 +470,92 @@ message MicrobatchMacroOutsideOfBatchesDeprecationMsg {
|
|||||||
MicrobatchMacroOutsideOfBatchesDeprecation data = 2;
|
MicrobatchMacroOutsideOfBatchesDeprecation data = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
// D021
|
// Skipping D021. It belonged to the now deleted PackageRedirectDeprecationSummaryMsg.
|
||||||
message PackageRedirectDeprecationSummary {
|
|
||||||
int32 occurrences = 1;
|
// D022
|
||||||
|
message GenericJSONSchemaValidationDeprecation {
|
||||||
|
string violation = 1;
|
||||||
|
string file = 2;
|
||||||
|
string key_path = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message GenericJSONSchemaValidationDeprecationMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
GenericJSONSchemaValidationDeprecation data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// D023
|
||||||
|
message UnexpectedJinjaBlockDeprecation {
|
||||||
|
string msg = 1;
|
||||||
|
string file = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message UnexpectedJinjaBlockDeprecationMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
UnexpectedJinjaBlockDeprecation data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// D024
|
||||||
|
message DuplicateYAMLKeysDeprecation {
|
||||||
|
string duplicate_description = 1;
|
||||||
|
string file = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DuplicateYAMLKeysDeprecationMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
DuplicateYAMLKeysDeprecation data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// D025
|
||||||
|
message CustomTopLevelKeyDeprecation {
|
||||||
|
string msg = 1;
|
||||||
|
string file = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CustomTopLevelKeyDeprecationMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
CustomTopLevelKeyDeprecation data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// D026
|
||||||
|
message CustomKeyInConfigDeprecation {
|
||||||
|
string key = 1;
|
||||||
|
string file = 2;
|
||||||
|
string key_path = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CustomKeyInConfigDeprecationMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
CustomKeyInConfigDeprecation data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// D027
|
||||||
|
message CustomKeyInObjectDeprecation {
|
||||||
|
string key = 1;
|
||||||
|
string file = 2;
|
||||||
|
string key_path = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CustomKeyInObjectDeprecationMsg {
|
||||||
|
CoreEventInfo info = 1;
|
||||||
|
CustomKeyInObjectDeprecation data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// D028
|
||||||
|
message DeprecationSummary {
|
||||||
|
string event_name = 1;
|
||||||
|
string event_code = 2;
|
||||||
|
int32 occurrences = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DeprecationsSummary {
|
||||||
|
repeated DeprecationSummary summaries = 1;
|
||||||
bool show_all_hint = 2;
|
bool show_all_hint = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message PackageRedirectDeprecationSummaryMsg {
|
message DeprecationsSummaryMsg {
|
||||||
CoreEventInfo info = 1;
|
CoreEventInfo info = 1;
|
||||||
PackageRedirectDeprecationSummary data = 2;
|
DeprecationsSummary data = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
// I065
|
// I065
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -14,7 +14,15 @@ from dbt_common.events.format import (
|
|||||||
pluralize,
|
pluralize,
|
||||||
timestamp_to_datetime_string,
|
timestamp_to_datetime_string,
|
||||||
)
|
)
|
||||||
from dbt_common.ui import error_tag, green, line_wrap_message, red, warning_tag, yellow
|
from dbt_common.ui import (
|
||||||
|
deprecation_tag,
|
||||||
|
error_tag,
|
||||||
|
green,
|
||||||
|
line_wrap_message,
|
||||||
|
red,
|
||||||
|
warning_tag,
|
||||||
|
yellow,
|
||||||
|
)
|
||||||
|
|
||||||
# Event codes have prefixes which follow this table
|
# Event codes have prefixes which follow this table
|
||||||
#
|
#
|
||||||
@@ -253,23 +261,7 @@ class PackageRedirectDeprecation(WarnLevel):
|
|||||||
f"The `{self.old_name}` package is deprecated in favor of `{self.new_name}`. Please "
|
f"The `{self.old_name}` package is deprecated in favor of `{self.new_name}`. Please "
|
||||||
f"update your `packages.yml` configuration to use `{self.new_name}` instead."
|
f"update your `packages.yml` configuration to use `{self.new_name}` instead."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class PackageRedirectDeprecationSummary(WarnLevel):
|
|
||||||
def code(self) -> str:
|
|
||||||
return "D021"
|
|
||||||
|
|
||||||
def message(self) -> str:
|
|
||||||
description = (
|
|
||||||
f"Found {pluralize(self.occurrences, 'package')} that {'has' if self.occurrences == 1 else 'have'} been deprecated in favor of new packages. Please "
|
|
||||||
f"update your `packages.yml` configuration to use the new packages instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.show_all_hint:
|
|
||||||
description += " To see all deprecated packages, run command again with the `--show-all-deprecations` flag."
|
|
||||||
|
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
|
||||||
|
|
||||||
|
|
||||||
class PackageInstallPathDeprecation(WarnLevel):
|
class PackageInstallPathDeprecation(WarnLevel):
|
||||||
@@ -282,7 +274,7 @@ class PackageInstallPathDeprecation(WarnLevel):
|
|||||||
Please update `clean-targets` in `dbt_project.yml` and check `.gitignore` as well.
|
Please update `clean-targets` in `dbt_project.yml` and check `.gitignore` as well.
|
||||||
Or, set `packages-install-path: dbt_modules` if you'd like to keep the current value.
|
Or, set `packages-install-path: dbt_modules` if you'd like to keep the current value.
|
||||||
"""
|
"""
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class ConfigSourcePathDeprecation(WarnLevel):
|
class ConfigSourcePathDeprecation(WarnLevel):
|
||||||
@@ -294,7 +286,7 @@ class ConfigSourcePathDeprecation(WarnLevel):
|
|||||||
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
||||||
"Please update your `dbt_project.yml` configuration to reflect this change."
|
"Please update your `dbt_project.yml` configuration to reflect this change."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class ConfigDataPathDeprecation(WarnLevel):
|
class ConfigDataPathDeprecation(WarnLevel):
|
||||||
@@ -306,7 +298,7 @@ class ConfigDataPathDeprecation(WarnLevel):
|
|||||||
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
||||||
"Please update your `dbt_project.yml` configuration to reflect this change."
|
"Please update your `dbt_project.yml` configuration to reflect this change."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class MetricAttributesRenamed(WarnLevel):
|
class MetricAttributesRenamed(WarnLevel):
|
||||||
@@ -323,7 +315,7 @@ class MetricAttributesRenamed(WarnLevel):
|
|||||||
"\nRelevant issue here: https://github.com/dbt-labs/dbt-core/issues/5849"
|
"\nRelevant issue here: https://github.com/dbt-labs/dbt-core/issues/5849"
|
||||||
)
|
)
|
||||||
|
|
||||||
return warning_tag(f"Deprecated functionality\n\n{description}")
|
return deprecation_tag(description)
|
||||||
|
|
||||||
|
|
||||||
class ExposureNameDeprecation(WarnLevel):
|
class ExposureNameDeprecation(WarnLevel):
|
||||||
@@ -338,7 +330,7 @@ class ExposureNameDeprecation(WarnLevel):
|
|||||||
"follow this pattern. Please update the 'name', and use the 'label' property for a "
|
"follow this pattern. Please update the 'name', and use the 'label' property for a "
|
||||||
"human-friendly title. This will raise an error in a future version of dbt-core."
|
"human-friendly title. This will raise an error in a future version of dbt-core."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class InternalDeprecation(WarnLevel):
|
class InternalDeprecation(WarnLevel):
|
||||||
@@ -367,7 +359,7 @@ class EnvironmentVariableRenamed(WarnLevel):
|
|||||||
f"Set `{self.new_name}` and unset `{self.old_name}` to avoid this deprecation warning and "
|
f"Set `{self.new_name}` and unset `{self.old_name}` to avoid this deprecation warning and "
|
||||||
"ensure it works properly in a future release."
|
"ensure it works properly in a future release."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class ConfigLogPathDeprecation(WarnLevel):
|
class ConfigLogPathDeprecation(WarnLevel):
|
||||||
@@ -384,7 +376,7 @@ class ConfigLogPathDeprecation(WarnLevel):
|
|||||||
f"If you wish to write dbt {output} to a custom directory, please use "
|
f"If you wish to write dbt {output} to a custom directory, please use "
|
||||||
f"the {cli_flag} CLI flag or {env_var} env var instead."
|
f"the {cli_flag} CLI flag or {env_var} env var instead."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class ConfigTargetPathDeprecation(WarnLevel):
|
class ConfigTargetPathDeprecation(WarnLevel):
|
||||||
@@ -401,7 +393,7 @@ class ConfigTargetPathDeprecation(WarnLevel):
|
|||||||
f"If you wish to write dbt {output} to a custom directory, please use "
|
f"If you wish to write dbt {output} to a custom directory, please use "
|
||||||
f"the {cli_flag} CLI flag or {env_var} env var instead."
|
f"the {cli_flag} CLI flag or {env_var} env var instead."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
# Note: this deprecation has been removed, but we are leaving
|
# Note: this deprecation has been removed, but we are leaving
|
||||||
@@ -416,7 +408,7 @@ class TestsConfigDeprecation(WarnLevel):
|
|||||||
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. "
|
||||||
"Please see https://docs.getdbt.com/docs/build/data-tests#new-data_tests-syntax for more information."
|
"Please see https://docs.getdbt.com/docs/build/data-tests#new-data_tests-syntax for more information."
|
||||||
)
|
)
|
||||||
return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}"))
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
class ProjectFlagsMovedDeprecation(WarnLevel):
|
class ProjectFlagsMovedDeprecation(WarnLevel):
|
||||||
@@ -429,7 +421,7 @@ class ProjectFlagsMovedDeprecation(WarnLevel):
|
|||||||
"key in dbt_project.yml."
|
"key in dbt_project.yml."
|
||||||
)
|
)
|
||||||
# Can't use line_wrap_message here because flags.printer_width isn't available yet
|
# Can't use line_wrap_message here because flags.printer_width isn't available yet
|
||||||
return warning_tag(f"Deprecated functionality\n\n{description}")
|
return deprecation_tag(description)
|
||||||
|
|
||||||
|
|
||||||
class SpacesInResourceNameDeprecation(DynamicLevel):
|
class SpacesInResourceNameDeprecation(DynamicLevel):
|
||||||
@@ -512,6 +504,84 @@ class MicrobatchMacroOutsideOfBatchesDeprecation(WarnLevel):
|
|||||||
return line_wrap_message(warning_tag(description))
|
return line_wrap_message(warning_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
# Skipping D021. It belonged to the now deleted PackageRedirectDeprecationSummary event.
|
||||||
|
|
||||||
|
|
||||||
|
class GenericJSONSchemaValidationDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D022"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
if self.key_path == "":
|
||||||
|
description = f"{self.violation} at top level in file `{self.file}`"
|
||||||
|
else:
|
||||||
|
description = f"{self.violation} in file `{self.file}` at path `{self.key_path}`"
|
||||||
|
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
class UnexpectedJinjaBlockDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D023"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = f"{self.msg} in file `{self.file}`"
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateYAMLKeysDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D024"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = f"{self.duplicate_description} in file `{self.file}`"
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
class CustomTopLevelKeyDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D025"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = f"{self.msg} in file `{self.file}`"
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
class CustomKeyInConfigDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D026"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = f"Custom key `{self.key}` found in `config` at path `{self.key_path}` in file `{self.file}`. Custom config keys should move into the `config.meta`."
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
class CustomKeyInObjectDeprecation(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D027"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = f"Custom key `{self.key}` found at `{self.key_path}` in file `{self.file}`. This may mean the key is a typo, or is simply not a key supported by the object."
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecationsSummary(WarnLevel):
|
||||||
|
def code(self) -> str:
|
||||||
|
return "D028"
|
||||||
|
|
||||||
|
def message(self) -> str:
|
||||||
|
description = "Summary of encountered deprecations:"
|
||||||
|
for summary in self.summaries:
|
||||||
|
description += (
|
||||||
|
f"\n\n- {summary.event_name}: {pluralize(summary.occurrences, 'occurrence')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.show_all_hint:
|
||||||
|
description += "\n\nTo see all deprecation instances instead of just the first occurrence of each, run command again with the `--show-all-deprecations` flag. You may also need to run with `--no-partial-parse` as some deprecations are only encountered during parsing."
|
||||||
|
|
||||||
|
return line_wrap_message(deprecation_tag(description))
|
||||||
|
|
||||||
|
|
||||||
# =======================================================
|
# =======================================================
|
||||||
# I - Project parsing
|
# I - Project parsing
|
||||||
# =======================================================
|
# =======================================================
|
||||||
|
|||||||
3
core/dbt/include/jsonschemas/__init__.py
Normal file
3
core/dbt/include/jsonschemas/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
JSONSCHEMAS_PATH = os.path.dirname(__file__)
|
||||||
2026
core/dbt/include/jsonschemas/project/0.0.110.json
Normal file
2026
core/dbt/include/jsonschemas/project/0.0.110.json
Normal file
File diff suppressed because it is too large
Load Diff
2015
core/dbt/include/jsonschemas/project/0.0.85.json
Normal file
2015
core/dbt/include/jsonschemas/project/0.0.85.json
Normal file
File diff suppressed because it is too large
Load Diff
2635
core/dbt/include/jsonschemas/resources/0.0.110.json
Normal file
2635
core/dbt/include/jsonschemas/resources/0.0.110.json
Normal file
File diff suppressed because it is too large
Load Diff
2536
core/dbt/include/jsonschemas/resources/0.0.85.json
Normal file
2536
core/dbt/include/jsonschemas/resources/0.0.85.json
Normal file
File diff suppressed because it is too large
Load Diff
100
core/dbt/jsonschemas.py
Normal file
100
core/dbt/jsonschemas.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import date, datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, Iterator
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
from jsonschema import ValidationError
|
||||||
|
from jsonschema._keywords import type as type_rule
|
||||||
|
from jsonschema.validators import Draft7Validator, extend
|
||||||
|
|
||||||
|
from dbt import deprecations
|
||||||
|
from dbt.include.jsonschemas import JSONSCHEMAS_PATH
|
||||||
|
|
||||||
|
|
||||||
|
def load_json_from_package(jsonschema_type: str, filename: str) -> Dict[str, Any]:
|
||||||
|
"""Loads a JSON file from within a package."""
|
||||||
|
|
||||||
|
path = Path(JSONSCHEMAS_PATH).joinpath(jsonschema_type, filename)
|
||||||
|
data = path.read_bytes()
|
||||||
|
return json.loads(data)
|
||||||
|
|
||||||
|
|
||||||
|
def project_schema() -> Dict[str, Any]:
|
||||||
|
return load_json_from_package(jsonschema_type="project", filename="0.0.110.json")
|
||||||
|
|
||||||
|
|
||||||
|
def resources_schema() -> Dict[str, Any]:
|
||||||
|
return load_json_from_package(jsonschema_type="resources", filename="0.0.110.json")
|
||||||
|
|
||||||
|
|
||||||
|
def custom_type_rule(validator, types, instance, schema):
|
||||||
|
"""This is necessary because PyYAML loads things that look like dates or datetimes as those
|
||||||
|
python objects. Then jsonschema.validate() fails because it expects strings.
|
||||||
|
"""
|
||||||
|
if "string" in types and (isinstance(instance, datetime) or isinstance(instance, date)):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
return type_rule(validator, types, instance, schema)
|
||||||
|
|
||||||
|
|
||||||
|
CustomDraft7Validator = extend(Draft7Validator, validators={"type": custom_type_rule})
|
||||||
|
|
||||||
|
|
||||||
|
def error_path_to_string(error: jsonschema.ValidationError) -> str:
|
||||||
|
if len(error.path) == 0:
|
||||||
|
return ""
|
||||||
|
else:
|
||||||
|
path = str(error.path.popleft())
|
||||||
|
for part in error.path:
|
||||||
|
if isinstance(part, int):
|
||||||
|
path += f"[{part}]"
|
||||||
|
else:
|
||||||
|
path += f".{part}"
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def jsonschema_validate(schema: Dict[str, Any], json: Dict[str, Any], file_path: str) -> None:
|
||||||
|
validator = CustomDraft7Validator(schema)
|
||||||
|
errors: Iterator[ValidationError] = validator.iter_errors(json) # get all validation errors
|
||||||
|
|
||||||
|
for error in errors:
|
||||||
|
# Listify the error path to make it easier to work with (it's a deque in the ValidationError object)
|
||||||
|
error_path = list(error.path)
|
||||||
|
if error.validator == "additionalProperties":
|
||||||
|
key = re.search(r"'\S+'", error.message)
|
||||||
|
if len(error.path) == 0:
|
||||||
|
deprecations.warn(
|
||||||
|
"custom-top-level-key-deprecation",
|
||||||
|
msg="Unexpected top-level key" + (" " + key.group() if key else ""),
|
||||||
|
file=file_path,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
deprecations.warn(
|
||||||
|
"custom-key-in-object-deprecation",
|
||||||
|
key=key.group() if key else "",
|
||||||
|
file=file_path,
|
||||||
|
key_path=error_path_to_string(error),
|
||||||
|
)
|
||||||
|
elif (
|
||||||
|
error.validator == "anyOf"
|
||||||
|
and len(error_path) > 0
|
||||||
|
and error_path[-1] == "config"
|
||||||
|
and isinstance(error.instance, dict)
|
||||||
|
and len(error.instance.keys()) > 0
|
||||||
|
):
|
||||||
|
deprecations.warn(
|
||||||
|
"custom-key-in-config-deprecation",
|
||||||
|
key=(list(error.instance.keys()))[0],
|
||||||
|
file=file_path,
|
||||||
|
key_path=error_path_to_string(error),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
deprecations.warn(
|
||||||
|
"generic-json-schema-validation-deprecation",
|
||||||
|
violation=error.message,
|
||||||
|
file=file_path,
|
||||||
|
key_path=error_path_to_string(error),
|
||||||
|
)
|
||||||
@@ -3,7 +3,7 @@ import itertools
|
|||||||
import os
|
import os
|
||||||
from typing import Any, Dict, Generic, List, Optional, TypeVar
|
from typing import Any, Dict, Generic, List, Optional, TypeVar
|
||||||
|
|
||||||
from dbt import hooks, utils
|
from dbt import deprecations, hooks, utils
|
||||||
from dbt.adapters.factory import get_adapter # noqa: F401
|
from dbt.adapters.factory import get_adapter # noqa: F401
|
||||||
from dbt.artifacts.resources import Contract
|
from dbt.artifacts.resources import Contract
|
||||||
from dbt.clients.jinja import MacroGenerator, get_rendered
|
from dbt.clients.jinja import MacroGenerator, get_rendered
|
||||||
@@ -27,6 +27,7 @@ from dbt.flags import get_flags
|
|||||||
from dbt.node_types import AccessType, ModelLanguage, NodeType
|
from dbt.node_types import AccessType, ModelLanguage, NodeType
|
||||||
from dbt.parser.common import resource_types_to_schema_file_keys
|
from dbt.parser.common import resource_types_to_schema_file_keys
|
||||||
from dbt.parser.search import FileBlock
|
from dbt.parser.search import FileBlock
|
||||||
|
from dbt_common.clients._jinja_blocks import ExtractWarning
|
||||||
from dbt_common.dataclass_schema import ValidationError
|
from dbt_common.dataclass_schema import ValidationError
|
||||||
from dbt_common.utils import deep_merge
|
from dbt_common.utils import deep_merge
|
||||||
|
|
||||||
@@ -63,6 +64,9 @@ class BaseParser(Generic[FinalValue]):
|
|||||||
filter(None, [self.resource_type, self.project.project_name, resource_name, hash])
|
filter(None, [self.resource_type, self.project.project_name, resource_name, hash])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _handle_extract_warning(self, warning: ExtractWarning, file: str) -> None:
|
||||||
|
deprecations.warn("unexpected-jinja-block-deprecation", msg=warning.msg, file=file)
|
||||||
|
|
||||||
|
|
||||||
class Parser(BaseParser[FinalValue], Generic[FinalValue]):
|
class Parser(BaseParser[FinalValue], Generic[FinalValue]):
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from dbt.node_types import NodeType
|
|||||||
from dbt.parser.base import BaseParser
|
from dbt.parser.base import BaseParser
|
||||||
from dbt.parser.search import FileBlock, filesystem_search
|
from dbt.parser.search import FileBlock, filesystem_search
|
||||||
from dbt_common.clients import jinja
|
from dbt_common.clients import jinja
|
||||||
|
from dbt_common.clients._jinja_blocks import ExtractWarning
|
||||||
from dbt_common.utils import MACRO_PREFIX
|
from dbt_common.utils import MACRO_PREFIX
|
||||||
|
|
||||||
|
|
||||||
@@ -47,6 +48,10 @@ class MacroParser(BaseParser[Macro]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]:
|
def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]:
|
||||||
|
# This is a bit of a hack to get the file path to the deprecation
|
||||||
|
def wrap_handle_extract_warning(warning: ExtractWarning) -> None:
|
||||||
|
self._handle_extract_warning(warning=warning, file=base_node.original_file_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
blocks: List[jinja.BlockTag] = [
|
blocks: List[jinja.BlockTag] = [
|
||||||
t
|
t
|
||||||
@@ -54,6 +59,7 @@ class MacroParser(BaseParser[Macro]):
|
|||||||
base_node.raw_code,
|
base_node.raw_code,
|
||||||
allowed_blocks={"macro", "materialization", "test", "data_test"},
|
allowed_blocks={"macro", "materialization", "test", "data_test"},
|
||||||
collect_raw_data=False,
|
collect_raw_data=False,
|
||||||
|
warning_callback=wrap_handle_extract_warning,
|
||||||
)
|
)
|
||||||
if isinstance(t, jinja.BlockTag)
|
if isinstance(t, jinja.BlockTag)
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ def load_source_file(
|
|||||||
source_file.checksum = FileHash.from_contents(file_contents)
|
source_file.checksum = FileHash.from_contents(file_contents)
|
||||||
|
|
||||||
if parse_file_type == ParseFileType.Schema and source_file.contents:
|
if parse_file_type == ParseFileType.Schema and source_file.contents:
|
||||||
dfy = yaml_from_file(source_file)
|
dfy = yaml_from_file(source_file=source_file, validate=True)
|
||||||
if dfy:
|
if dfy:
|
||||||
validate_yaml(source_file.path.original_file_path, dfy)
|
validate_yaml(source_file.path.original_file_path, dfy)
|
||||||
source_file.dfy = dfy
|
source_file.dfy = dfy
|
||||||
|
|||||||
@@ -24,6 +24,10 @@ from dbt.artifacts.resources.v1.model import (
|
|||||||
ModelFreshness,
|
ModelFreshness,
|
||||||
TimeSpine,
|
TimeSpine,
|
||||||
)
|
)
|
||||||
|
from dbt.clients.checked_load import (
|
||||||
|
checked_load,
|
||||||
|
issue_deprecation_warnings_for_failures,
|
||||||
|
)
|
||||||
from dbt.clients.jinja_static import statically_parse_ref_or_source
|
from dbt.clients.jinja_static import statically_parse_ref_or_source
|
||||||
from dbt.clients.yaml_helper import load_yaml_text
|
from dbt.clients.yaml_helper import load_yaml_text
|
||||||
from dbt.config import RuntimeConfig
|
from dbt.config import RuntimeConfig
|
||||||
@@ -71,6 +75,7 @@ from dbt.exceptions import (
|
|||||||
YamlParseListError,
|
YamlParseListError,
|
||||||
)
|
)
|
||||||
from dbt.flags import get_flags
|
from dbt.flags import get_flags
|
||||||
|
from dbt.jsonschemas import jsonschema_validate, resources_schema
|
||||||
from dbt.node_types import AccessType, NodeType
|
from dbt.node_types import AccessType, NodeType
|
||||||
from dbt.parser.base import SimpleParser
|
from dbt.parser.base import SimpleParser
|
||||||
from dbt.parser.common import (
|
from dbt.parser.common import (
|
||||||
@@ -124,11 +129,29 @@ from dbt_common.utils import deep_merge
|
|||||||
# ===============================================================================
|
# ===============================================================================
|
||||||
|
|
||||||
|
|
||||||
def yaml_from_file(source_file: SchemaSourceFile) -> Optional[Dict[str, Any]]:
|
def yaml_from_file(
|
||||||
|
source_file: SchemaSourceFile, validate: bool = False
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
"""If loading the yaml fails, raise an exception."""
|
"""If loading the yaml fails, raise an exception."""
|
||||||
try:
|
try:
|
||||||
# source_file.contents can sometimes be None
|
# source_file.contents can sometimes be None
|
||||||
contents = load_yaml_text(source_file.contents or "", source_file.path)
|
to_load = source_file.contents or ""
|
||||||
|
|
||||||
|
if validate:
|
||||||
|
contents, failures = checked_load(to_load)
|
||||||
|
issue_deprecation_warnings_for_failures(
|
||||||
|
failures=failures, file=source_file.path.original_file_path
|
||||||
|
)
|
||||||
|
if contents is not None:
|
||||||
|
# Validate the yaml against the jsonschema to raise deprecation warnings
|
||||||
|
# for invalid fields.
|
||||||
|
jsonschema_validate(
|
||||||
|
schema=resources_schema(),
|
||||||
|
json=contents,
|
||||||
|
file_path=source_file.path.original_file_path,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
contents = load_yaml_text(to_load, source_file.path)
|
||||||
|
|
||||||
if contents is None:
|
if contents is None:
|
||||||
return contents
|
return contents
|
||||||
@@ -193,7 +216,6 @@ class SchemaParser(SimpleParser[YamlBlock, ModelNode]):
|
|||||||
if dct:
|
if dct:
|
||||||
# contains the FileBlock and the data (dictionary)
|
# contains the FileBlock and the data (dictionary)
|
||||||
yaml_block = YamlBlock.from_file_block(block, dct)
|
yaml_block = YamlBlock.from_file_block(block, dct)
|
||||||
|
|
||||||
parser: YamlReader
|
parser: YamlReader
|
||||||
|
|
||||||
# There are 9 different yaml lists which are parsed by different parsers:
|
# There are 9 different yaml lists which are parsed by different parsers:
|
||||||
|
|||||||
@@ -14,9 +14,11 @@ from typing import (
|
|||||||
|
|
||||||
from pathspec import PathSpec # type: ignore
|
from pathspec import PathSpec # type: ignore
|
||||||
|
|
||||||
|
from dbt import deprecations
|
||||||
from dbt.config import Project
|
from dbt.config import Project
|
||||||
from dbt.contracts.files import AnySourceFile, FilePath
|
from dbt.contracts.files import AnySourceFile, FilePath
|
||||||
from dbt.exceptions import DbtInternalError, ParsingError
|
from dbt.exceptions import DbtInternalError, ParsingError
|
||||||
|
from dbt_common.clients._jinja_blocks import ExtractWarning
|
||||||
from dbt_common.clients.jinja import BlockTag, extract_toplevel_blocks
|
from dbt_common.clients.jinja import BlockTag, extract_toplevel_blocks
|
||||||
from dbt_common.clients.system import find_matching
|
from dbt_common.clients.system import find_matching
|
||||||
|
|
||||||
@@ -115,9 +117,16 @@ class BlockSearcher(Generic[BlockSearchResult], Iterable[BlockSearchResult]):
|
|||||||
self.source_tag_factory: BlockSearchResultFactory = source_tag_factory
|
self.source_tag_factory: BlockSearchResultFactory = source_tag_factory
|
||||||
|
|
||||||
def extract_blocks(self, source_file: FileBlock) -> Iterable[BlockTag]:
|
def extract_blocks(self, source_file: FileBlock) -> Iterable[BlockTag]:
|
||||||
|
# This is a bit of a hack to get the file path to the deprecation
|
||||||
|
def wrap_handle_extract_warning(warning: ExtractWarning) -> None:
|
||||||
|
self._handle_extract_warning(warning=warning, file=source_file.path.relative_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
blocks = extract_toplevel_blocks(
|
blocks = extract_toplevel_blocks(
|
||||||
source_file.contents, allowed_blocks=self.allowed_blocks, collect_raw_data=False
|
source_file.contents,
|
||||||
|
allowed_blocks=self.allowed_blocks,
|
||||||
|
collect_raw_data=False,
|
||||||
|
warning_callback=wrap_handle_extract_warning,
|
||||||
)
|
)
|
||||||
# this makes mypy happy, and this is an invariant we really need
|
# this makes mypy happy, and this is an invariant we really need
|
||||||
for block in blocks:
|
for block in blocks:
|
||||||
@@ -129,6 +138,9 @@ class BlockSearcher(Generic[BlockSearchResult], Iterable[BlockSearchResult]):
|
|||||||
exc.add_node(source_file)
|
exc.add_node(source_file)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def _handle_extract_warning(self, warning: ExtractWarning, file: str) -> None:
|
||||||
|
deprecations.warn("unexpected-jinja-block-deprecation", msg=warning.msg, file=file)
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[BlockSearchResult]:
|
def __iter__(self) -> Iterator[BlockSearchResult]:
|
||||||
for entry in self.source:
|
for entry in self.source:
|
||||||
for block in self.extract_blocks(entry):
|
for block in self.extract_blocks(entry):
|
||||||
|
|||||||
2
core/dbt/tests/fixtures/project.py
vendored
2
core/dbt/tests/fixtures/project.py
vendored
@@ -17,6 +17,7 @@ from dbt.adapters.factory import (
|
|||||||
)
|
)
|
||||||
from dbt.config.runtime import RuntimeConfig
|
from dbt.config.runtime import RuntimeConfig
|
||||||
from dbt.context.providers import generate_runtime_macro_context
|
from dbt.context.providers import generate_runtime_macro_context
|
||||||
|
from dbt.deprecations import reset_deprecations
|
||||||
from dbt.events.logging import setup_event_logger
|
from dbt.events.logging import setup_event_logger
|
||||||
from dbt.mp_context import get_mp_context
|
from dbt.mp_context import get_mp_context
|
||||||
from dbt.parser.manifest import ManifestLoader
|
from dbt.parser.manifest import ManifestLoader
|
||||||
@@ -596,6 +597,7 @@ def project_setup(
|
|||||||
pass
|
pass
|
||||||
os.chdir(orig_cwd)
|
os.chdir(orig_cwd)
|
||||||
cleanup_event_logger()
|
cleanup_event_logger()
|
||||||
|
reset_deprecations()
|
||||||
|
|
||||||
|
|
||||||
# This is the main fixture that is used in all functional tests. It pulls in the other
|
# This is the main fixture that is used in all functional tests. It pulls in the other
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ setup(
|
|||||||
"dbt-extractor>=0.5.0,<=0.6",
|
"dbt-extractor>=0.5.0,<=0.6",
|
||||||
"dbt-semantic-interfaces>=0.8.3,<0.9",
|
"dbt-semantic-interfaces>=0.8.3,<0.9",
|
||||||
# Minor versions for these are expected to be backwards-compatible
|
# Minor versions for these are expected to be backwards-compatible
|
||||||
"dbt-common>=1.15.0,<2.0",
|
"dbt-common>=1.22.0,<2.0",
|
||||||
"dbt-adapters>=1.13.0,<2.0",
|
"dbt-adapters>=1.13.0,<2.0",
|
||||||
# ----
|
# ----
|
||||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ tox>=3.13
|
|||||||
types-docutils
|
types-docutils
|
||||||
types-PyYAML
|
types-PyYAML
|
||||||
types-Jinja2
|
types-Jinja2
|
||||||
|
types-jsonschema
|
||||||
types-mock
|
types-mock
|
||||||
types-protobuf>=5.0,<6.0
|
types-protobuf>=5.0,<6.0
|
||||||
types-python-dateutil
|
types-python-dateutil
|
||||||
|
|||||||
@@ -138,3 +138,40 @@ local_dependency__seed_csv = """id,name
|
|||||||
2,Sam
|
2,Sam
|
||||||
3,John
|
3,John
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
invalid_deprecation_date_yaml = """
|
||||||
|
models:
|
||||||
|
- name: models_trivial
|
||||||
|
description: "This is a test model"
|
||||||
|
deprecation_date: 1
|
||||||
|
"""
|
||||||
|
|
||||||
|
duplicate_keys_yaml = """
|
||||||
|
models:
|
||||||
|
- name: models_trivial
|
||||||
|
description: "This is a test model"
|
||||||
|
deprecation_date: 1999-01-01 00:00:00.00+00:00
|
||||||
|
|
||||||
|
models:
|
||||||
|
- name: models_trivial
|
||||||
|
description: "This is a test model"
|
||||||
|
deprecation_date: 1999-01-01 00:00:00.00+00:00
|
||||||
|
"""
|
||||||
|
|
||||||
|
custom_key_in_config_yaml = """
|
||||||
|
models:
|
||||||
|
- name: models_trivial
|
||||||
|
description: "This is a test model"
|
||||||
|
deprecation_date: 1999-01-01 00:00:00.00+00:00
|
||||||
|
config:
|
||||||
|
my_custom_key: "my_custom_value"
|
||||||
|
"""
|
||||||
|
|
||||||
|
custom_key_in_object_yaml = """
|
||||||
|
models:
|
||||||
|
- name: models_trivial
|
||||||
|
description: "This is a test model"
|
||||||
|
deprecation_date: 1999-01-01 00:00:00.00+00:00
|
||||||
|
my_custom_property: "It's over, I have the high ground"
|
||||||
|
"""
|
||||||
|
|||||||
@@ -7,14 +7,22 @@ import dbt_common
|
|||||||
from dbt import deprecations
|
from dbt import deprecations
|
||||||
from dbt.clients.registry import _get_cached
|
from dbt.clients.registry import _get_cached
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
|
CustomKeyInConfigDeprecation,
|
||||||
|
CustomKeyInObjectDeprecation,
|
||||||
|
DeprecationsSummary,
|
||||||
|
DuplicateYAMLKeysDeprecation,
|
||||||
|
GenericJSONSchemaValidationDeprecation,
|
||||||
PackageRedirectDeprecation,
|
PackageRedirectDeprecation,
|
||||||
PackageRedirectDeprecationSummary,
|
|
||||||
)
|
)
|
||||||
from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file
|
from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file
|
||||||
from dbt_common.exceptions import EventCompilationError
|
from dbt_common.exceptions import EventCompilationError
|
||||||
from tests.functional.deprecations.fixtures import (
|
from tests.functional.deprecations.fixtures import (
|
||||||
bad_name_yaml,
|
bad_name_yaml,
|
||||||
|
custom_key_in_config_yaml,
|
||||||
|
custom_key_in_object_yaml,
|
||||||
deprecated_model_exposure_yaml,
|
deprecated_model_exposure_yaml,
|
||||||
|
duplicate_keys_yaml,
|
||||||
|
invalid_deprecation_date_yaml,
|
||||||
models_trivial__model_sql,
|
models_trivial__model_sql,
|
||||||
)
|
)
|
||||||
from tests.utils import EventCatcher
|
from tests.utils import EventCatcher
|
||||||
@@ -258,7 +266,7 @@ class TestDeprecationSummary:
|
|||||||
|
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def event_catcher(self) -> EventCatcher:
|
def event_catcher(self) -> EventCatcher:
|
||||||
return EventCatcher(event_to_catch=PackageRedirectDeprecationSummary)
|
return EventCatcher(event_to_catch=DeprecationsSummary)
|
||||||
|
|
||||||
def test_package_redirect(self, project, event_catcher: EventCatcher):
|
def test_package_redirect(self, project, event_catcher: EventCatcher):
|
||||||
deprecations.reset_deprecations()
|
deprecations.reset_deprecations()
|
||||||
@@ -267,4 +275,90 @@ class TestDeprecationSummary:
|
|||||||
assert "package-redirect" in deprecations.active_deprecations
|
assert "package-redirect" in deprecations.active_deprecations
|
||||||
assert deprecations.active_deprecations["package-redirect"] == 2
|
assert deprecations.active_deprecations["package-redirect"] == 2
|
||||||
assert len(event_catcher.caught_events) == 1
|
assert len(event_catcher.caught_events) == 1
|
||||||
assert event_catcher.caught_events[0].data.occurrences == 2 # type: ignore
|
for summary in event_catcher.caught_events[0].data.summaries: # type: ignore
|
||||||
|
found_summary = False
|
||||||
|
if summary.event_name == "PackageRedirectDeprecation":
|
||||||
|
assert (
|
||||||
|
summary.occurrences == 2
|
||||||
|
), f"Expected 2 occurrences of PackageRedirectDeprecation, got {summary.occurrences}"
|
||||||
|
found_summary = True
|
||||||
|
|
||||||
|
assert found_summary, "Expected to find PackageRedirectDeprecation in deprecations summary"
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeprecatedInvalidDeprecationDate:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"models_trivial.sql": models_trivial__model_sql,
|
||||||
|
"models.yml": invalid_deprecation_date_yaml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_deprecated_invalid_deprecation_date(self, project):
|
||||||
|
event_catcher = EventCatcher(GenericJSONSchemaValidationDeprecation)
|
||||||
|
try:
|
||||||
|
run_dbt(["parse", "--no-partial-parse"], callbacks=[event_catcher.catch])
|
||||||
|
except: # noqa
|
||||||
|
assert (
|
||||||
|
True
|
||||||
|
), "Expected an exception to be raised, because a model object can't be created with a deprecation_date as an int"
|
||||||
|
|
||||||
|
assert len(event_catcher.caught_events) == 1
|
||||||
|
assert (
|
||||||
|
"1 is not of type 'string', 'null' in file `models/models.yml` at path\n`models[0].deprecation_date`"
|
||||||
|
in event_catcher.caught_events[0].info.msg
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDuplicateYAMLKeysInSchemaFiles:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"models_trivial.sql": models_trivial__model_sql,
|
||||||
|
"models.yml": duplicate_keys_yaml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_duplicate_yaml_keys_in_schema_files(self, project):
|
||||||
|
event_catcher = EventCatcher(DuplicateYAMLKeysDeprecation)
|
||||||
|
run_dbt(["parse", "--no-partial-parse"], callbacks=[event_catcher.catch])
|
||||||
|
assert len(event_catcher.caught_events) == 1
|
||||||
|
assert (
|
||||||
|
"Duplicate key 'models' in \"<unicode string>\", line 6, column 1 in file\n`models/models.yml`"
|
||||||
|
in event_catcher.caught_events[0].info.msg
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCustomKeyInConfigDeprecation:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"models_trivial.sql": models_trivial__model_sql,
|
||||||
|
"models.yml": custom_key_in_config_yaml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_duplicate_yaml_keys_in_schema_files(self, project):
|
||||||
|
event_catcher = EventCatcher(CustomKeyInConfigDeprecation)
|
||||||
|
run_dbt(["parse", "--no-partial-parse"], callbacks=[event_catcher.catch])
|
||||||
|
assert len(event_catcher.caught_events) == 1
|
||||||
|
assert (
|
||||||
|
"Custom key `my_custom_key` found in `config` at path `models[0].config`"
|
||||||
|
in event_catcher.caught_events[0].info.msg
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCustomKeyInObjectDeprecation:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"models_trivial.sql": models_trivial__model_sql,
|
||||||
|
"models.yml": custom_key_in_object_yaml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_custom_key_in_object_deprecation(self, project):
|
||||||
|
event_catcher = EventCatcher(CustomKeyInObjectDeprecation)
|
||||||
|
run_dbt(["parse", "--no-partial-parse"], callbacks=[event_catcher.catch])
|
||||||
|
assert len(event_catcher.caught_events) == 1
|
||||||
|
assert (
|
||||||
|
"Custom key `'my_custom_property'` found at `models[0]` in file\n`models/models.yml`."
|
||||||
|
in event_catcher.caught_events[0].info.msg
|
||||||
|
)
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ class TestCustomTargetRetry:
|
|||||||
write_file(models__sample_model, "models", "sample_model.sql")
|
write_file(models__sample_model, "models", "sample_model.sql")
|
||||||
|
|
||||||
|
|
||||||
class TestRetry:
|
class BaseTestRetry:
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def models(self):
|
def models(self):
|
||||||
return {
|
return {
|
||||||
@@ -66,6 +66,8 @@ class TestRetry:
|
|||||||
def macros(self):
|
def macros(self):
|
||||||
return {"alter_timezone.sql": macros__alter_timezone_sql}
|
return {"alter_timezone.sql": macros__alter_timezone_sql}
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryNoPreviousRun(BaseTestRetry):
|
||||||
def test_no_previous_run(self, project):
|
def test_no_previous_run(self, project):
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
DbtRuntimeError, match="Could not find previous run in 'target' target directory"
|
DbtRuntimeError, match="Could not find previous run in 'target' target directory"
|
||||||
@@ -77,6 +79,8 @@ class TestRetry:
|
|||||||
):
|
):
|
||||||
run_dbt(["retry", "--state", "walmart"])
|
run_dbt(["retry", "--state", "walmart"])
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryPreviousRun(BaseTestRetry):
|
||||||
def test_previous_run(self, project):
|
def test_previous_run(self, project):
|
||||||
# Regular build
|
# Regular build
|
||||||
results = run_dbt(["build"], expect_pass=False)
|
results = run_dbt(["build"], expect_pass=False)
|
||||||
@@ -126,6 +130,8 @@ class TestRetry:
|
|||||||
|
|
||||||
write_file(models__sample_model, "models", "sample_model.sql")
|
write_file(models__sample_model, "models", "sample_model.sql")
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryWarnError(BaseTestRetry):
|
||||||
def test_warn_error(self, project):
|
def test_warn_error(self, project):
|
||||||
# Our test command should succeed when run normally...
|
# Our test command should succeed when run normally...
|
||||||
results = run_dbt(["build", "--select", "second_model"])
|
results = run_dbt(["build", "--select", "second_model"])
|
||||||
@@ -146,6 +152,8 @@ class TestRetry:
|
|||||||
# Retry with --warn-error, should fail
|
# Retry with --warn-error, should fail
|
||||||
run_dbt(["--warn-error", "retry"], expect_pass=False)
|
run_dbt(["--warn-error", "retry"], expect_pass=False)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryRunOperation(BaseTestRetry):
|
||||||
def test_run_operation(self, project):
|
def test_run_operation(self, project):
|
||||||
results = run_dbt(
|
results = run_dbt(
|
||||||
["run-operation", "alter_timezone", "--args", "{timezone: abc}"], expect_pass=False
|
["run-operation", "alter_timezone", "--args", "{timezone: abc}"], expect_pass=False
|
||||||
@@ -160,6 +168,8 @@ class TestRetry:
|
|||||||
results = run_dbt(["retry"], expect_pass=False)
|
results = run_dbt(["retry"], expect_pass=False)
|
||||||
assert {n.unique_id: n.status for n in results.results} == expected_statuses
|
assert {n.unique_id: n.status for n in results.results} == expected_statuses
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryRemovedFile(BaseTestRetry):
|
||||||
def test_removed_file(self, project):
|
def test_removed_file(self, project):
|
||||||
run_dbt(["build"], expect_pass=False)
|
run_dbt(["build"], expect_pass=False)
|
||||||
|
|
||||||
@@ -172,6 +182,8 @@ class TestRetry:
|
|||||||
|
|
||||||
write_file(models__sample_model, "models", "sample_model.sql")
|
write_file(models__sample_model, "models", "sample_model.sql")
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryRemovedFileLeafNode(BaseTestRetry):
|
||||||
def test_removed_file_leaf_node(self, project):
|
def test_removed_file_leaf_node(self, project):
|
||||||
write_file(models__sample_model, "models", "third_model.sql")
|
write_file(models__sample_model, "models", "third_model.sql")
|
||||||
run_dbt(["build"], expect_pass=False)
|
run_dbt(["build"], expect_pass=False)
|
||||||
|
|||||||
39
tests/functional/test_project.py
Normal file
39
tests/functional/test_project.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import yaml
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from dbt.deprecations import GenericJSONSchemaValidationDeprecation
|
||||||
|
from dbt.tests.util import run_dbt, write_file
|
||||||
|
from tests.utils import EventCatcher
|
||||||
|
|
||||||
|
|
||||||
|
class TestProjectJsonschemaValidatedOnlyOnce:
|
||||||
|
"""Ensure that the dbt_project.yml file is validated only once, even if it is 'loaded' multiple times"""
|
||||||
|
|
||||||
|
def test_project(self, project, mocker: MockerFixture) -> None:
|
||||||
|
mocked_jsonschema_validate = mocker.patch("dbt.config.project.jsonschema_validate")
|
||||||
|
run_dbt(["parse"])
|
||||||
|
assert mocked_jsonschema_validate.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
class TestGenericJsonSchemaValidationDeprecation:
|
||||||
|
"""Ensure that the generic jsonschema validation deprecation can be fired"""
|
||||||
|
|
||||||
|
def test_project(self, project, project_root: str) -> None:
|
||||||
|
|
||||||
|
# `name` was already required prior to this deprecation, so this deprecation doesn't
|
||||||
|
# really add anything. However, this test shows that jsonschema validation issues raise
|
||||||
|
# deprecation warnings via the catchall `GenericJSONSchemaValidationDeprecation`
|
||||||
|
project_missing_name = {
|
||||||
|
"profile": "test",
|
||||||
|
"flags": {"send_anonymous_usage_stats": False},
|
||||||
|
}
|
||||||
|
write_file(yaml.safe_dump(project_missing_name), project_root, "dbt_project.yml")
|
||||||
|
event_catcher = EventCatcher(GenericJSONSchemaValidationDeprecation)
|
||||||
|
|
||||||
|
try:
|
||||||
|
run_dbt(["parse"], callbacks=[event_catcher.catch], expect_pass=False)
|
||||||
|
except: # noqa: E722
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert len(event_catcher.caught_events) == 1
|
||||||
|
assert "'name' is a required property at top level" in event_catcher.caught_events[0].info.msg # type: ignore
|
||||||
56
tests/unit/clients/test_yaml_helper.py
Normal file
56
tests/unit/clients/test_yaml_helper.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from dbt.clients.checked_load import checked_load
|
||||||
|
|
||||||
|
no_dupe__yml = """
|
||||||
|
a:
|
||||||
|
b: 1
|
||||||
|
|
||||||
|
b:
|
||||||
|
a: 1
|
||||||
|
"""
|
||||||
|
|
||||||
|
top_level_dupe__yml = """
|
||||||
|
a:
|
||||||
|
b: 1
|
||||||
|
|
||||||
|
a:
|
||||||
|
c: 1
|
||||||
|
d: 2
|
||||||
|
e: 3
|
||||||
|
"""
|
||||||
|
|
||||||
|
nested_dupe__yml = """
|
||||||
|
a:
|
||||||
|
b: 1
|
||||||
|
|
||||||
|
c:
|
||||||
|
d: 1
|
||||||
|
e: 2
|
||||||
|
d: 3
|
||||||
|
"""
|
||||||
|
|
||||||
|
multiple_dupes__yml = """
|
||||||
|
a:
|
||||||
|
b:
|
||||||
|
c: 1
|
||||||
|
|
||||||
|
d:
|
||||||
|
e:
|
||||||
|
f: 1
|
||||||
|
g: 2
|
||||||
|
f: 3
|
||||||
|
h: 4
|
||||||
|
f: 5
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_checked_load():
|
||||||
|
|
||||||
|
no_dupe_issues = checked_load(no_dupe__yml)[1]
|
||||||
|
assert no_dupe_issues == []
|
||||||
|
|
||||||
|
top_level_dupe_issues = checked_load(top_level_dupe__yml)[1]
|
||||||
|
assert len(top_level_dupe_issues) == 1
|
||||||
|
nested_dupe_issues = checked_load(nested_dupe__yml)[1]
|
||||||
|
assert len(nested_dupe_issues) == 1
|
||||||
|
multiple_dupes_issues = checked_load(multiple_dupes__yml)[1]
|
||||||
|
assert len(multiple_dupes_issues) == 2
|
||||||
@@ -11,12 +11,15 @@ import dbt.config
|
|||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
from dbt.adapters.contracts.connection import DEFAULT_QUERY_COMMENT, QueryComment
|
from dbt.adapters.contracts.connection import DEFAULT_QUERY_COMMENT, QueryComment
|
||||||
from dbt.adapters.factory import load_plugin
|
from dbt.adapters.factory import load_plugin
|
||||||
from dbt.config.project import Project, _get_required_version
|
from dbt.config.project import Project, _get_required_version, jsonschema_validate
|
||||||
from dbt.constants import DEPENDENCIES_FILE_NAME
|
from dbt.constants import DEPENDENCIES_FILE_NAME
|
||||||
from dbt.contracts.project import GitPackage, LocalPackage, PackageConfig
|
from dbt.contracts.project import GitPackage, LocalPackage, PackageConfig
|
||||||
|
from dbt.deprecations import GenericJSONSchemaValidationDeprecation
|
||||||
from dbt.flags import set_from_args
|
from dbt.flags import set_from_args
|
||||||
|
from dbt.jsonschemas import project_schema
|
||||||
from dbt.node_types import NodeType
|
from dbt.node_types import NodeType
|
||||||
from dbt.tests.util import safe_set_invocation_context
|
from dbt.tests.util import safe_set_invocation_context
|
||||||
|
from dbt_common.events.event_manager_client import get_event_manager
|
||||||
from dbt_common.exceptions import DbtRuntimeError
|
from dbt_common.exceptions import DbtRuntimeError
|
||||||
from dbt_common.semver import VersionSpecifier
|
from dbt_common.semver import VersionSpecifier
|
||||||
from tests.unit.config import (
|
from tests.unit.config import (
|
||||||
@@ -25,6 +28,7 @@ from tests.unit.config import (
|
|||||||
project_from_config_norender,
|
project_from_config_norender,
|
||||||
project_from_config_rendered,
|
project_from_config_rendered,
|
||||||
)
|
)
|
||||||
|
from tests.utils import EventCatcher
|
||||||
|
|
||||||
|
|
||||||
class TestProjectMethods:
|
class TestProjectMethods:
|
||||||
@@ -586,3 +590,19 @@ class TestGetRequiredVersion:
|
|||||||
match="The package version requirement can never be satisfied",
|
match="The package version requirement can never be satisfied",
|
||||||
):
|
):
|
||||||
_get_required_version(project_dict=project_dict, verify_version=True)
|
_get_required_version(project_dict=project_dict, verify_version=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDeprecations:
|
||||||
|
|
||||||
|
def test_jsonschema_validate(self) -> None:
|
||||||
|
project_dict: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
event_catcher = EventCatcher(GenericJSONSchemaValidationDeprecation)
|
||||||
|
get_event_manager().add_callback(event_catcher.catch)
|
||||||
|
|
||||||
|
jsonschema_validate(
|
||||||
|
schema=project_schema(), json=project_dict, file_path="dbt_project.yml"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(event_catcher.caught_events) == 1
|
||||||
|
assert "'name' is a required property at top level" in event_catcher.caught_events[0].info.msg # type: ignore
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from argparse import Namespace
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from dbt import tracking
|
from dbt import tracking
|
||||||
@@ -514,7 +515,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
@mock.patch("dbt.parser.sources.get_adapter")
|
@mock.patch("dbt.parser.sources.get_adapter")
|
||||||
def test_parse_source_custom_freshness_at_source(self, _):
|
def test_parse_source_custom_freshness_at_source(self, _):
|
||||||
block = self.file_block_for(SOURCE_CUSTOM_FRESHNESS_AT_SOURCE, "test_one.yml")
|
block = self.file_block_for(SOURCE_CUSTOM_FRESHNESS_AT_SOURCE, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
||||||
src_default = self.source_patcher.parse_source(unpatched_src_default)
|
src_default = self.source_patcher.parse_source(unpatched_src_default)
|
||||||
@@ -525,7 +526,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
block = self.file_block_for(
|
block = self.file_block_for(
|
||||||
SOURCE_CUSTOM_FRESHNESS_AT_SOURCE_FIELD_AT_TABLE, "test_one.yml"
|
SOURCE_CUSTOM_FRESHNESS_AT_SOURCE_FIELD_AT_TABLE, "test_one.yml"
|
||||||
)
|
)
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
||||||
src_default = self.source_patcher.parse_source(unpatched_src_default)
|
src_default = self.source_patcher.parse_source(unpatched_src_default)
|
||||||
@@ -537,7 +538,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
block = self.file_block_for(
|
block = self.file_block_for(
|
||||||
SOURCE_FIELD_AT_SOURCE_CUSTOM_FRESHNESS_AT_TABLE, "test_one.yml"
|
SOURCE_FIELD_AT_SOURCE_CUSTOM_FRESHNESS_AT_TABLE, "test_one.yml"
|
||||||
)
|
)
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
||||||
src_default = self.source_patcher.parse_source(unpatched_src_default)
|
src_default = self.source_patcher.parse_source(unpatched_src_default)
|
||||||
@@ -546,7 +547,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
@mock.patch("dbt.parser.sources.get_adapter")
|
@mock.patch("dbt.parser.sources.get_adapter")
|
||||||
def test_parse_source_field_at_custom_freshness_both_at_table_fails(self, _):
|
def test_parse_source_field_at_custom_freshness_both_at_table_fails(self, _):
|
||||||
block = self.file_block_for(SOURCE_FIELD_AT_CUSTOM_FRESHNESS_BOTH_AT_TABLE, "test_one.yml")
|
block = self.file_block_for(SOURCE_FIELD_AT_CUSTOM_FRESHNESS_BOTH_AT_TABLE, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
||||||
with self.assertRaises(ParsingError):
|
with self.assertRaises(ParsingError):
|
||||||
@@ -557,7 +558,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
block = self.file_block_for(
|
block = self.file_block_for(
|
||||||
SOURCE_FIELD_AT_CUSTOM_FRESHNESS_BOTH_AT_SOURCE, "test_one.yml"
|
SOURCE_FIELD_AT_CUSTOM_FRESHNESS_BOTH_AT_SOURCE, "test_one.yml"
|
||||||
)
|
)
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
unpatched_src_default = self.parser.manifest.sources["source.snowplow.my_source.my_table"]
|
||||||
with self.assertRaises(ParsingError):
|
with self.assertRaises(ParsingError):
|
||||||
@@ -565,7 +566,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
|
|
||||||
def test__parse_basic_source(self):
|
def test__parse_basic_source(self):
|
||||||
block = self.file_block_for(SINGLE_TABLE_SOURCE, "test_one.yml")
|
block = self.file_block_for(SINGLE_TABLE_SOURCE, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, sources=1)
|
self.assert_has_manifest_lengths(self.parser.manifest, sources=1)
|
||||||
src = list(self.parser.manifest.sources.values())[0]
|
src = list(self.parser.manifest.sources.values())[0]
|
||||||
@@ -579,7 +580,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
@mock.patch("dbt.parser.sources.get_adapter")
|
@mock.patch("dbt.parser.sources.get_adapter")
|
||||||
def test__parse_basic_source_meta(self, mock_get_adapter):
|
def test__parse_basic_source_meta(self, mock_get_adapter):
|
||||||
block = self.file_block_for(MULTIPLE_TABLE_SOURCE_META, "test_one.yml")
|
block = self.file_block_for(MULTIPLE_TABLE_SOURCE_META, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, sources=2)
|
self.assert_has_manifest_lengths(self.parser.manifest, sources=2)
|
||||||
|
|
||||||
@@ -633,7 +634,7 @@ class SchemaParserSourceTest(SchemaParserTest):
|
|||||||
def test__parse_basic_source_tests(self):
|
def test__parse_basic_source_tests(self):
|
||||||
block = self.file_block_for(SINGLE_TABLE_SOURCE_TESTS, "test_one.yml")
|
block = self.file_block_for(SINGLE_TABLE_SOURCE_TESTS, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assertEqual(len(self.parser.manifest.nodes), 0)
|
self.assertEqual(len(self.parser.manifest.nodes), 0)
|
||||||
self.assertEqual(len(self.parser.manifest.sources), 1)
|
self.assertEqual(len(self.parser.manifest.sources), 1)
|
||||||
@@ -717,15 +718,16 @@ class SchemaParserModelsTest(SchemaParserTest):
|
|||||||
|
|
||||||
def test__read_basic_model_tests(self):
|
def test__read_basic_model_tests(self):
|
||||||
block = self.yaml_block_for(SINGLE_TABLE_MODEL_TESTS, "test_one.yml")
|
block = self.yaml_block_for(SINGLE_TABLE_MODEL_TESTS, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assertEqual(len(list(self.parser.manifest.sources)), 0)
|
self.assertEqual(len(list(self.parser.manifest.sources)), 0)
|
||||||
self.assertEqual(len(list(self.parser.manifest.nodes)), 4)
|
self.assertEqual(len(list(self.parser.manifest.nodes)), 4)
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="skipping until model freshness is documented and does something")
|
||||||
def test__parse_model_freshness(self):
|
def test__parse_model_freshness(self):
|
||||||
block = self.file_block_for(SINGLE_TALBE_MODEL_FRESHNESS, "test_one.yml")
|
block = self.file_block_for(SINGLE_TALBE_MODEL_FRESHNESS, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=1)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=1)
|
||||||
|
|
||||||
@@ -735,10 +737,11 @@ class SchemaParserModelsTest(SchemaParserTest):
|
|||||||
count=1, period="day", depends_on=ModelFreshnessDependsOnOptions.any
|
count=1, period="day", depends_on=ModelFreshnessDependsOnOptions.any
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="skipping until model freshness is documented and does something")
|
||||||
def test__parse_model_freshness_depend_on(self):
|
def test__parse_model_freshness_depend_on(self):
|
||||||
block = self.file_block_for(SINGLE_TALBE_MODEL_FRESHNESS_ONLY_DEPEND_ON, "test_one.yml")
|
block = self.file_block_for(SINGLE_TALBE_MODEL_FRESHNESS_ONLY_DEPEND_ON, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=1)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=1)
|
||||||
assert self.parser.manifest.nodes[
|
assert self.parser.manifest.nodes[
|
||||||
@@ -749,7 +752,7 @@ class SchemaParserModelsTest(SchemaParserTest):
|
|||||||
|
|
||||||
def test__read_basic_model_tests_wrong_severity(self):
|
def test__read_basic_model_tests_wrong_severity(self):
|
||||||
block = self.yaml_block_for(SINGLE_TABLE_MODEL_TESTS_WRONG_SEVERITY, "test_one.yml")
|
block = self.yaml_block_for(SINGLE_TABLE_MODEL_TESTS_WRONG_SEVERITY, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
with self.assertRaisesRegex(
|
with self.assertRaisesRegex(
|
||||||
SchemaConfigError, "Severity must be either 'warn' or 'error'. Got 'WARNING'"
|
SchemaConfigError, "Severity must be either 'warn' or 'error'. Got 'WARNING'"
|
||||||
):
|
):
|
||||||
@@ -758,7 +761,7 @@ class SchemaParserModelsTest(SchemaParserTest):
|
|||||||
def test__parse_basic_model_tests(self):
|
def test__parse_basic_model_tests(self):
|
||||||
block = self.file_block_for(SINGLE_TABLE_MODEL_TESTS, "test_one.yml")
|
block = self.file_block_for(SINGLE_TABLE_MODEL_TESTS, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=4)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=4)
|
||||||
|
|
||||||
@@ -883,7 +886,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
|||||||
|
|
||||||
def test__read_versioned_model_tests(self):
|
def test__read_versioned_model_tests(self):
|
||||||
block = self.yaml_block_for(MULTIPLE_TABLE_VERSIONED_MODEL_TESTS, "test_one.yml")
|
block = self.yaml_block_for(MULTIPLE_TABLE_VERSIONED_MODEL_TESTS, "test_one.yml")
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assertEqual(len(list(self.parser.manifest.sources)), 0)
|
self.assertEqual(len(list(self.parser.manifest.sources)), 0)
|
||||||
self.assertEqual(len(list(self.parser.manifest.nodes)), 5)
|
self.assertEqual(len(list(self.parser.manifest.nodes)), 5)
|
||||||
@@ -891,7 +894,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
|||||||
def test__parse_versioned_model_tests(self):
|
def test__parse_versioned_model_tests(self):
|
||||||
block = self.file_block_for(MULTIPLE_TABLE_VERSIONED_MODEL_TESTS, "test_one.yml")
|
block = self.file_block_for(MULTIPLE_TABLE_VERSIONED_MODEL_TESTS, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=5)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=5)
|
||||||
|
|
||||||
@@ -973,7 +976,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
|||||||
def test__parsed_versioned_models(self):
|
def test__parsed_versioned_models(self):
|
||||||
block = self.file_block_for(MULTIPLE_TABLE_VERSIONED_MODEL, "test_one.yml")
|
block = self.file_block_for(MULTIPLE_TABLE_VERSIONED_MODEL, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
||||||
|
|
||||||
@@ -982,7 +985,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
|||||||
MULTIPLE_TABLE_VERSIONED_MODEL_CONTRACT_ENFORCED, "test_one.yml"
|
MULTIPLE_TABLE_VERSIONED_MODEL_CONTRACT_ENFORCED, "test_one.yml"
|
||||||
)
|
)
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
||||||
for node in self.parser.manifest.nodes.values():
|
for node in self.parser.manifest.nodes.values():
|
||||||
@@ -992,7 +995,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
|||||||
def test__parsed_versioned_models_v0(self):
|
def test__parsed_versioned_models_v0(self):
|
||||||
block = self.file_block_for(MULTIPLE_TABLE_VERSIONED_MODEL_V0, "test_one.yml")
|
block = self.file_block_for(MULTIPLE_TABLE_VERSIONED_MODEL_V0, "test_one.yml")
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
||||||
|
|
||||||
@@ -1001,7 +1004,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
|||||||
MULTIPLE_TABLE_VERSIONED_MODEL_V0_LATEST_VERSION, "test_one.yml"
|
MULTIPLE_TABLE_VERSIONED_MODEL_V0_LATEST_VERSION, "test_one.yml"
|
||||||
)
|
)
|
||||||
self.parser.manifest.files[block.file.file_id] = block.file
|
self.parser.manifest.files[block.file.file_id] = block.file
|
||||||
dct = yaml_from_file(block.file)
|
dct = yaml_from_file(block.file, validate=True)
|
||||||
self.parser.parse_file(block, dct)
|
self.parser.parse_file(block, dct)
|
||||||
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
self.assert_has_manifest_lengths(self.parser.manifest, nodes=2)
|
||||||
|
|
||||||
|
|||||||
@@ -135,7 +135,6 @@ sample_values = [
|
|||||||
core_types.ProjectCreated(project_name=""),
|
core_types.ProjectCreated(project_name=""),
|
||||||
# D - Deprecations ======================
|
# D - Deprecations ======================
|
||||||
core_types.PackageRedirectDeprecation(old_name="", new_name=""),
|
core_types.PackageRedirectDeprecation(old_name="", new_name=""),
|
||||||
core_types.PackageRedirectDeprecationSummary(occurrences=2, show_debug_hint=True),
|
|
||||||
core_types.PackageInstallPathDeprecation(),
|
core_types.PackageInstallPathDeprecation(),
|
||||||
core_types.ConfigSourcePathDeprecation(deprecated_path="", exp_path=""),
|
core_types.ConfigSourcePathDeprecation(deprecated_path="", exp_path=""),
|
||||||
core_types.ConfigDataPathDeprecation(deprecated_path="", exp_path=""),
|
core_types.ConfigDataPathDeprecation(deprecated_path="", exp_path=""),
|
||||||
@@ -160,6 +159,13 @@ sample_values = [
|
|||||||
core_types.MFTimespineWithoutYamlConfigurationDeprecation(),
|
core_types.MFTimespineWithoutYamlConfigurationDeprecation(),
|
||||||
core_types.MFCumulativeTypeParamsDeprecation(),
|
core_types.MFCumulativeTypeParamsDeprecation(),
|
||||||
core_types.MicrobatchMacroOutsideOfBatchesDeprecation(),
|
core_types.MicrobatchMacroOutsideOfBatchesDeprecation(),
|
||||||
|
core_types.GenericJSONSchemaValidationDeprecation(violation="", key_path="", file=""),
|
||||||
|
core_types.UnexpectedJinjaBlockDeprecation(msg="", file=""),
|
||||||
|
core_types.DuplicateYAMLKeysDeprecation(duplicate_description="", file=""),
|
||||||
|
core_types.CustomTopLevelKeyDeprecation(msg="", file=""),
|
||||||
|
core_types.CustomKeyInConfigDeprecation(key="", key_path="", file=""),
|
||||||
|
core_types.CustomKeyInObjectDeprecation(key="", key_path="", file=""),
|
||||||
|
core_types.DeprecationsSummary(summaries=[], show_all_hint=True),
|
||||||
# E - DB Adapter ======================
|
# E - DB Adapter ======================
|
||||||
adapter_types.AdapterEventDebug(),
|
adapter_types.AdapterEventDebug(),
|
||||||
adapter_types.AdapterEventInfo(),
|
adapter_types.AdapterEventInfo(),
|
||||||
|
|||||||
Reference in New Issue
Block a user