mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 06:31:27 +00:00
Compare commits
22 Commits
poc/microb
...
cl/graph-r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db0535f64c | ||
|
|
63262e93cb | ||
|
|
374412af53 | ||
|
|
47848b8ea8 | ||
|
|
3d09872a56 | ||
|
|
dfa7d06526 | ||
|
|
7f57dd5a30 | ||
|
|
56bfbeaedd | ||
|
|
1dd26e79af | ||
|
|
86223609dd | ||
|
|
21a46332f1 | ||
|
|
ff2726c3b5 | ||
|
|
014444dc18 | ||
|
|
25c2042dc9 | ||
|
|
0a160fc27a | ||
|
|
c598741262 | ||
|
|
f9c2b9398f | ||
|
|
cab6dabbc7 | ||
|
|
e1621ebc54 | ||
|
|
cd90d4493c | ||
|
|
560d151dcd | ||
|
|
229c537748 |
6
.changes/unreleased/Features-20240719-161841.yaml
Normal file
6
.changes/unreleased/Features-20240719-161841.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support ref and source in foreign key constraint expressions, bump dbt-common minimum to 1.6
|
||||
time: 2024-07-19T16:18:41.434278-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8062"
|
||||
6
.changes/unreleased/Features-20240722-202238.yaml
Normal file
6
.changes/unreleased/Features-20240722-202238.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Support new semantic layer time spine configs to enable sub-daily granularity.
|
||||
time: 2024-07-22T20:22:38.258249-07:00
|
||||
custom:
|
||||
Author: courtneyholcomb
|
||||
Issue: "10475"
|
||||
6
.changes/unreleased/Fixes-20240610-200522.yaml
Normal file
6
.changes/unreleased/Fixes-20240610-200522.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Use model alias for the CTE identifier generated during ephemeral materialization
|
||||
time: 2024-06-10T20:05:22.510814008Z
|
||||
custom:
|
||||
Author: jeancochrane
|
||||
Issue: "5273"
|
||||
6
.changes/unreleased/Fixes-20240714-100254.yaml
Normal file
6
.changes/unreleased/Fixes-20240714-100254.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fix typing for artifact schemas
|
||||
time: 2024-07-14T10:02:54.452099+09:00
|
||||
custom:
|
||||
Author: nakamichiworks
|
||||
Issue: "10442"
|
||||
6
.changes/unreleased/Fixes-20240731-095152.yaml
Normal file
6
.changes/unreleased/Fixes-20240731-095152.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: fix all_constraints access, disabled node parsing of non-uniquely named resources
|
||||
time: 2024-07-31T09:51:52.751135-04:00
|
||||
custom:
|
||||
Author: michelleark gshank
|
||||
Issue: "10509"
|
||||
6
.changes/unreleased/Fixes-20240806-172110.yaml
Normal file
6
.changes/unreleased/Fixes-20240806-172110.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Propagate measure label when using create_metrics
|
||||
time: 2024-08-06T17:21:10.265494-07:00
|
||||
custom:
|
||||
Author: aliceliu
|
||||
Issue: "10536"
|
||||
6
.changes/unreleased/Fixes-20240806-194843.yaml
Normal file
6
.changes/unreleased/Fixes-20240806-194843.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: respect --quiet and --warn-error-options for flag deprecations
|
||||
time: 2024-08-06T19:48:43.399453-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "10105"
|
||||
6
.changes/unreleased/Under the Hood-20240806-155406.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240806-155406.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Move from minimal-snowplow-tracker fork back to snowplow-tracker
|
||||
time: 2024-08-06T15:54:06.422444-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8409"
|
||||
6
.changes/unreleased/Under the Hood-20240809-130234.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240809-130234.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Improve speed of tree traversal when finding children, increasing build speed for some selectors
|
||||
time: 2024-08-09T13:02:34.759905-07:00
|
||||
custom:
|
||||
Author: ttusing
|
||||
Issue: "10434"
|
||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
||||
resolves #
|
||||
Resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
Include the number of the issue addressed by this PR above, if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
@@ -26,8 +26,8 @@ resolves #
|
||||
|
||||
### Checklist
|
||||
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
||||
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
||||
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
||||
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
||||
|
||||
@@ -15,6 +15,7 @@ repos:
|
||||
args: [--unsafe]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
exclude: schemas/dbt/manifest/
|
||||
- id: trailing-whitespace
|
||||
exclude_types:
|
||||
- "markdown"
|
||||
|
||||
@@ -170,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
python3 -m pytest tests/unit/test_base_column.py
|
||||
python3 -m pytest tests/unit/test_invocation_id.py
|
||||
# run a specific unit test
|
||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
||||
python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id
|
||||
# run specific Postgres functional tests
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
|
||||
4
Makefile
4
Makefile
@@ -144,3 +144,7 @@ help: ## Show this help message.
|
||||
@echo
|
||||
@echo 'options:'
|
||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||
|
||||
.PHONY: json_schema
|
||||
json_schema: ## Update generated JSON schema using code changes.
|
||||
scripts/collect-artifact-schema.py --path schemas
|
||||
|
||||
@@ -46,7 +46,7 @@ from dbt.artifacts.resources.v1.metric import (
|
||||
MetricTimeWindow,
|
||||
MetricTypeParams,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig, TimeSpine
|
||||
from dbt.artifacts.resources.v1.owner import Owner
|
||||
from dbt.artifacts.resources.v1.saved_query import (
|
||||
Export,
|
||||
|
||||
@@ -10,6 +10,7 @@ from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||
from dbt_common.contracts.util import Mergeable
|
||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
|
||||
NodeVersion = Union[str, float]
|
||||
|
||||
@@ -66,6 +67,7 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
granularity: Optional[TimeGranularity] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -11,6 +11,7 @@ from dbt.artifacts.resources.v1.components import (
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt_common.contracts.config.base import MergeBehavior
|
||||
from dbt_common.contracts.constraints import ModelLevelConstraint
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -21,6 +22,11 @@ class ModelConfig(NodeConfig):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimeSpine(dbtClassMixin):
|
||||
standard_granularity_column: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Model(CompiledResource):
|
||||
resource_type: Literal[NodeType.Model]
|
||||
@@ -32,6 +38,7 @@ class Model(CompiledResource):
|
||||
deprecation_date: Optional[datetime] = None
|
||||
defer_relation: Optional[DeferRelation] = None
|
||||
primary_key: List[str] = field(default_factory=list)
|
||||
time_spine: Optional[TimeSpine] = None
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
|
||||
@@ -77,8 +77,11 @@ class BaseArtifactMetadata(dbtClassMixin):
|
||||
# remote-compile-result
|
||||
# remote-execution-result
|
||||
# remote-run-result
|
||||
S = TypeVar("S", bound="VersionedSchema")
|
||||
|
||||
|
||||
def schema_version(name: str, version: int):
|
||||
def inner(cls: Type[VersionedSchema]):
|
||||
def inner(cls: Type[S]):
|
||||
cls.dbt_schema_version = SchemaVersion(
|
||||
name=name,
|
||||
version=version,
|
||||
|
||||
@@ -15,7 +15,7 @@ from dbt.cli.resolvers import default_log_path, default_project_dir
|
||||
from dbt.cli.types import Command as CliCommand
|
||||
from dbt.config.project import read_project_flags
|
||||
from dbt.contracts.project import ProjectFlags
|
||||
from dbt.deprecations import renamed_env_var
|
||||
from dbt.deprecations import fire_buffered_deprecations, renamed_env_var
|
||||
from dbt.events import ALL_EVENT_NAMES
|
||||
from dbt_common import ui
|
||||
from dbt_common.clients import jinja
|
||||
@@ -355,6 +355,8 @@ class Flags:
|
||||
# not get pickled when written to disk as json.
|
||||
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||
|
||||
fire_buffered_deprecations()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||
command_arg_list = command_params(command, args_dict)
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import jinja2
|
||||
|
||||
from dbt.exceptions import MacroNamespaceNotStringError
|
||||
from dbt.artifacts.resources import RefArgs
|
||||
from dbt.exceptions import MacroNamespaceNotStringError, ParsingError
|
||||
from dbt_common.clients.jinja import get_environment
|
||||
from dbt_common.exceptions.macros import MacroNameNotStringError
|
||||
from dbt_common.tests import test_caching_enabled
|
||||
from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore
|
||||
|
||||
_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {}
|
||||
|
||||
@@ -153,3 +155,39 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
possible_macro_calls.append(f"{package_name}.{func_name}")
|
||||
|
||||
return possible_macro_calls
|
||||
|
||||
|
||||
def statically_parse_ref_or_source(expression: str) -> Union[RefArgs, List[str]]:
|
||||
"""
|
||||
Returns a RefArgs or List[str] object, corresponding to ref or source respectively, given an input jinja expression.
|
||||
|
||||
input: str representing how input node is referenced in tested model sql
|
||||
* examples:
|
||||
- "ref('my_model_a')"
|
||||
- "ref('my_model_a', version=3)"
|
||||
- "ref('package', 'my_model_a', version=3)"
|
||||
- "source('my_source_schema', 'my_source_name')"
|
||||
|
||||
If input is not a well-formed jinja ref or source expression, a ParsingError is raised.
|
||||
"""
|
||||
ref_or_source: Union[RefArgs, List[str]]
|
||||
|
||||
try:
|
||||
statically_parsed = py_extract_from_source(f"{{{{ {expression} }}}}")
|
||||
except ExtractionError:
|
||||
raise ParsingError(f"Invalid jinja expression: {expression}")
|
||||
|
||||
if statically_parsed.get("refs"):
|
||||
raw_ref = list(statically_parsed["refs"])[0]
|
||||
ref_or_source = RefArgs(
|
||||
package=raw_ref.get("package"),
|
||||
name=raw_ref.get("name"),
|
||||
version=raw_ref.get("version"),
|
||||
)
|
||||
elif statically_parsed.get("sources"):
|
||||
source_name, source_table_name = list(statically_parsed["sources"])[0]
|
||||
ref_or_source = [source_name, source_table_name]
|
||||
else:
|
||||
raise ParsingError(f"Invalid ref or source expression: {expression}")
|
||||
|
||||
return ref_or_source
|
||||
|
||||
@@ -21,6 +21,7 @@ from dbt.contracts.graph.nodes import (
|
||||
InjectedCTE,
|
||||
ManifestNode,
|
||||
ManifestSQLNode,
|
||||
ModelNode,
|
||||
SeedNode,
|
||||
UnitTestDefinition,
|
||||
UnitTestNode,
|
||||
@@ -29,12 +30,15 @@ from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
||||
from dbt.exceptions import (
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
ForeignKeyConstraintToSyntaxError,
|
||||
GraphDependencyNotFoundError,
|
||||
ParsingError,
|
||||
)
|
||||
from dbt.flags import get_flags
|
||||
from dbt.graph import Graph
|
||||
from dbt.node_types import ModelLanguage, NodeType
|
||||
from dbt_common.clients.system import make_directory
|
||||
from dbt_common.contracts.constraints import ConstraintType
|
||||
from dbt_common.events.contextvars import get_node_info
|
||||
from dbt_common.events.format import pluralize
|
||||
from dbt_common.events.functions import fire_event
|
||||
@@ -371,7 +375,7 @@ class Compiler:
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.identifier)
|
||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||
|
||||
@@ -437,8 +441,31 @@ class Compiler:
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
node.relation_name = relation_name
|
||||
|
||||
# Compile 'ref' and 'source' expressions in foreign key constraints
|
||||
if isinstance(node, ModelNode):
|
||||
for constraint in node.all_constraints:
|
||||
if constraint.type == ConstraintType.foreign_key and constraint.to:
|
||||
constraint.to = self._compile_relation_for_foreign_key_constraint_to(
|
||||
manifest, node, constraint.to
|
||||
)
|
||||
|
||||
return node
|
||||
|
||||
def _compile_relation_for_foreign_key_constraint_to(
|
||||
self, manifest: Manifest, node: ManifestSQLNode, to_expression: str
|
||||
) -> str:
|
||||
try:
|
||||
foreign_key_node = manifest.find_node_from_ref_or_source(to_expression)
|
||||
except ParsingError:
|
||||
raise ForeignKeyConstraintToSyntaxError(node, to_expression)
|
||||
|
||||
if not foreign_key_node:
|
||||
raise GraphDependencyNotFoundError(node, to_expression)
|
||||
|
||||
adapter = get_adapter(self.config)
|
||||
relation_name = str(adapter.Relation.create_from(self.config, foreign_key_node))
|
||||
return relation_name
|
||||
|
||||
# This method doesn't actually "compile" any of the nodes. That is done by the
|
||||
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
||||
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
||||
|
||||
@@ -821,8 +821,8 @@ def read_project_flags(project_dir: str, profiles_dir: str) -> ProjectFlags:
|
||||
|
||||
if profile_project_flags:
|
||||
# This can't use WARN_ERROR or WARN_ERROR_OPTIONS because they're in
|
||||
# the config that we're loading. Uses special "warn" method.
|
||||
deprecations.warn("project-flags-moved")
|
||||
# the config that we're loading. Uses special "buffer" method and fired after flags are initialized in preflight.
|
||||
deprecations.buffer("project-flags-moved")
|
||||
project_flags = profile_project_flags
|
||||
|
||||
if project_flags is not None:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
|
||||
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
||||
|
||||
SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$"
|
||||
@@ -15,6 +17,8 @@ DEPENDENCIES_FILE_NAME = "dependencies.yml"
|
||||
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
|
||||
MANIFEST_FILE_NAME = "manifest.json"
|
||||
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
||||
TIME_SPINE_MODEL_NAME = "metricflow_time_spine"
|
||||
LEGACY_TIME_SPINE_MODEL_NAME = "metricflow_time_spine"
|
||||
LEGACY_TIME_SPINE_GRANULARITY = TimeGranularity.DAY
|
||||
MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY = TimeGranularity.DAY
|
||||
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
||||
PACKAGE_LOCK_HASH_KEY = "sha1_hash"
|
||||
|
||||
@@ -974,6 +974,7 @@ class ProviderContext(ManifestContext):
|
||||
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||
except ValueError as e:
|
||||
raise LoadAgateTableValueError(e, node=self.model)
|
||||
# this is used by some adapters
|
||||
table.original_abspath = os.path.abspath(path)
|
||||
return table
|
||||
|
||||
|
||||
@@ -32,9 +32,10 @@ from dbt.adapters.exceptions import (
|
||||
from dbt.adapters.factory import get_adapter_package_names
|
||||
|
||||
# to preserve import paths
|
||||
from dbt.artifacts.resources import BaseResource, DeferRelation, NodeVersion
|
||||
from dbt.artifacts.resources import BaseResource, DeferRelation, NodeVersion, RefArgs
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt.artifacts.schemas.manifest import ManifestMetadata, UniqueID, WritableManifest
|
||||
from dbt.clients.jinja_static import statically_parse_ref_or_source
|
||||
from dbt.contracts.files import (
|
||||
AnySourceFile,
|
||||
FileHash,
|
||||
@@ -412,11 +413,11 @@ class DisabledLookup(dbtClassMixin):
|
||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def populate(self, manifest):
|
||||
def populate(self, manifest: "Manifest"):
|
||||
for node in list(chain.from_iterable(manifest.disabled.values())):
|
||||
self.add_node(node)
|
||||
|
||||
def add_node(self, node):
|
||||
def add_node(self, node: GraphMemberNode) -> None:
|
||||
if node.search_name not in self.storage:
|
||||
self.storage[node.search_name] = {}
|
||||
if node.package_name not in self.storage[node.search_name]:
|
||||
@@ -426,8 +427,12 @@ class DisabledLookup(dbtClassMixin):
|
||||
# This should return a list of disabled nodes. It's different from
|
||||
# the other Lookup functions in that it returns full nodes, not just unique_ids
|
||||
def find(
|
||||
self, search_name, package: Optional[PackageName], version: Optional[NodeVersion] = None
|
||||
):
|
||||
self,
|
||||
search_name,
|
||||
package: Optional[PackageName],
|
||||
version: Optional[NodeVersion] = None,
|
||||
resource_types: Optional[List[NodeType]] = None,
|
||||
) -> Optional[List[Any]]:
|
||||
if version:
|
||||
search_name = f"{search_name}.v{version}"
|
||||
|
||||
@@ -436,16 +441,29 @@ class DisabledLookup(dbtClassMixin):
|
||||
|
||||
pkg_dct: Mapping[PackageName, List[Any]] = self.storage[search_name]
|
||||
|
||||
nodes = []
|
||||
if package is None:
|
||||
if not pkg_dct:
|
||||
return None
|
||||
else:
|
||||
return next(iter(pkg_dct.values()))
|
||||
nodes = next(iter(pkg_dct.values()))
|
||||
elif package in pkg_dct:
|
||||
return pkg_dct[package]
|
||||
nodes = pkg_dct[package]
|
||||
else:
|
||||
return None
|
||||
|
||||
if resource_types is None:
|
||||
return nodes
|
||||
else:
|
||||
new_nodes = []
|
||||
for node in nodes:
|
||||
if node.resource_type in resource_types:
|
||||
new_nodes.append(node)
|
||||
if not new_nodes:
|
||||
return None
|
||||
else:
|
||||
return new_nodes
|
||||
|
||||
|
||||
class AnalysisLookup(RefableLookup):
|
||||
_lookup_types: ClassVar[set] = set([NodeType.Analysis])
|
||||
@@ -1294,7 +1312,12 @@ class Manifest(MacroMethods, dbtClassMixin):
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.disabled_lookup.find(target_model_name, pkg, target_model_version)
|
||||
disabled = self.disabled_lookup.find(
|
||||
target_model_name,
|
||||
pkg,
|
||||
version=target_model_version,
|
||||
resource_types=REFABLE_NODE_TYPES,
|
||||
)
|
||||
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
@@ -1635,6 +1658,22 @@ class Manifest(MacroMethods, dbtClassMixin):
|
||||
|
||||
# end of methods formerly in ParseResult
|
||||
|
||||
def find_node_from_ref_or_source(
|
||||
self, expression: str
|
||||
) -> Optional[Union[ModelNode, SourceDefinition]]:
|
||||
ref_or_source = statically_parse_ref_or_source(expression)
|
||||
|
||||
node = None
|
||||
if isinstance(ref_or_source, RefArgs):
|
||||
node = self.ref_lookup.find(
|
||||
ref_or_source.name, ref_or_source.package, ref_or_source.version, self
|
||||
)
|
||||
else:
|
||||
source_name, source_table_name = ref_or_source[0], ref_or_source[1]
|
||||
node = self.source_lookup.find(f"{source_name}.{source_table_name}", None, self)
|
||||
|
||||
return node
|
||||
|
||||
# Provide support for copy.deepcopy() - we just need to avoid the lock!
|
||||
# pickle and deepcopy use this. It returns a callable object used to
|
||||
# create the initial version of the object and a tuple of arguments
|
||||
|
||||
@@ -58,6 +58,7 @@ from dbt.artifacts.resources import SingularTest as SingularTestResource
|
||||
from dbt.artifacts.resources import Snapshot as SnapshotResource
|
||||
from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource
|
||||
from dbt.artifacts.resources import SqlOperation as SqlOperationResource
|
||||
from dbt.artifacts.resources import TimeSpine
|
||||
from dbt.artifacts.resources import UnitTestDefinition as UnitTestDefinitionResource
|
||||
from dbt.contracts.graph.model_config import UnitTestNodeConfig
|
||||
from dbt.contracts.graph.node_args import ModelNodeArgs
|
||||
@@ -85,7 +86,11 @@ from dbt.node_types import (
|
||||
NodeType,
|
||||
)
|
||||
from dbt_common.clients.system import write_file
|
||||
from dbt_common.contracts.constraints import ConstraintType
|
||||
from dbt_common.contracts.constraints import (
|
||||
ColumnLevelConstraint,
|
||||
ConstraintType,
|
||||
ModelLevelConstraint,
|
||||
)
|
||||
from dbt_common.events.contextvars import set_log_contextvars
|
||||
from dbt_common.events.functions import warn_or_error
|
||||
|
||||
@@ -489,6 +494,18 @@ class ModelNode(ModelResource, CompiledNode):
|
||||
def materialization_enforces_constraints(self) -> bool:
|
||||
return self.config.materialized in ["table", "incremental"]
|
||||
|
||||
@property
|
||||
def all_constraints(self) -> List[Union[ModelLevelConstraint, ColumnLevelConstraint]]:
|
||||
constraints: List[Union[ModelLevelConstraint, ColumnLevelConstraint]] = []
|
||||
for model_level_constraint in self.constraints:
|
||||
constraints.append(model_level_constraint)
|
||||
|
||||
for column in self.columns.values():
|
||||
for column_level_constraint in column.constraints:
|
||||
constraints.append(column_level_constraint)
|
||||
|
||||
return constraints
|
||||
|
||||
def infer_primary_key(self, data_tests: List["GenericTestNode"]) -> List[str]:
|
||||
"""
|
||||
Infers the columns that can be used as primary key of a model in the following order:
|
||||
@@ -1609,6 +1626,7 @@ class ParsedNodePatch(ParsedPatch):
|
||||
latest_version: Optional[NodeVersion]
|
||||
constraints: List[Dict[str, Any]]
|
||||
deprecation_date: Optional[datetime]
|
||||
time_spine: Optional[TimeSpine] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
from dbt.constants import TIME_SPINE_MODEL_NAME
|
||||
from typing import List, Optional
|
||||
|
||||
from dbt.constants import (
|
||||
LEGACY_TIME_SPINE_GRANULARITY,
|
||||
LEGACY_TIME_SPINE_MODEL_NAME,
|
||||
MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.nodes import ModelNode
|
||||
from dbt.events.types import SemanticValidationFailure
|
||||
from dbt.exceptions import ParsingError
|
||||
from dbt_common.clients.system import write_file
|
||||
from dbt_common.events.base_types import EventLevel
|
||||
from dbt_common.events.functions import fire_event
|
||||
from dbt_semantic_interfaces.implementations.metric import PydanticMetric
|
||||
from dbt_semantic_interfaces.implementations.node_relation import PydanticNodeRelation
|
||||
from dbt_semantic_interfaces.implementations.project_configuration import (
|
||||
PydanticProjectConfiguration,
|
||||
)
|
||||
@@ -13,8 +22,12 @@ from dbt_semantic_interfaces.implementations.semantic_manifest import (
|
||||
PydanticSemanticManifest,
|
||||
)
|
||||
from dbt_semantic_interfaces.implementations.semantic_model import PydanticSemanticModel
|
||||
from dbt_semantic_interfaces.implementations.time_spine import (
|
||||
PydanticTimeSpine,
|
||||
PydanticTimeSpinePrimaryColumn,
|
||||
)
|
||||
from dbt_semantic_interfaces.implementations.time_spine_table_configuration import (
|
||||
PydanticTimeSpineTableConfiguration,
|
||||
PydanticTimeSpineTableConfiguration as LegacyTimeSpine,
|
||||
)
|
||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
from dbt_semantic_interfaces.validations.semantic_manifest_validator import (
|
||||
@@ -23,7 +36,7 @@ from dbt_semantic_interfaces.validations.semantic_manifest_validator import (
|
||||
|
||||
|
||||
class SemanticManifest:
|
||||
def __init__(self, manifest) -> None:
|
||||
def __init__(self, manifest: Manifest) -> None:
|
||||
self.manifest = manifest
|
||||
|
||||
def validate(self) -> bool:
|
||||
@@ -59,8 +72,50 @@ class SemanticManifest:
|
||||
write_file(file_path, json)
|
||||
|
||||
def _get_pydantic_semantic_manifest(self) -> PydanticSemanticManifest:
|
||||
pydantic_time_spines: List[PydanticTimeSpine] = []
|
||||
minimum_time_spine_granularity: Optional[TimeGranularity] = None
|
||||
for node in self.manifest.nodes.values():
|
||||
if not (isinstance(node, ModelNode) and node.time_spine):
|
||||
continue
|
||||
time_spine = node.time_spine
|
||||
standard_granularity_column = None
|
||||
for column in node.columns.values():
|
||||
if column.name == time_spine.standard_granularity_column:
|
||||
standard_granularity_column = column
|
||||
break
|
||||
# Assertions needed for type checking
|
||||
if not standard_granularity_column:
|
||||
raise ParsingError(
|
||||
"Expected to find time spine standard granularity column in model columns, but did not. "
|
||||
"This should have been caught in YAML parsing."
|
||||
)
|
||||
if not standard_granularity_column.granularity:
|
||||
raise ParsingError(
|
||||
"Expected to find granularity set for time spine standard granularity column, but did not. "
|
||||
"This should have been caught in YAML parsing."
|
||||
)
|
||||
pydantic_time_spine = PydanticTimeSpine(
|
||||
node_relation=PydanticNodeRelation(
|
||||
alias=node.alias,
|
||||
schema_name=node.schema,
|
||||
database=node.database,
|
||||
relation_name=node.relation_name,
|
||||
),
|
||||
primary_column=PydanticTimeSpinePrimaryColumn(
|
||||
name=time_spine.standard_granularity_column,
|
||||
time_granularity=standard_granularity_column.granularity,
|
||||
),
|
||||
)
|
||||
pydantic_time_spines.append(pydantic_time_spine)
|
||||
if (
|
||||
not minimum_time_spine_granularity
|
||||
or standard_granularity_column.granularity.to_int()
|
||||
< minimum_time_spine_granularity.to_int()
|
||||
):
|
||||
minimum_time_spine_granularity = standard_granularity_column.granularity
|
||||
|
||||
project_config = PydanticProjectConfiguration(
|
||||
time_spine_table_configurations=[],
|
||||
time_spine_table_configurations=[], time_spines=pydantic_time_spines
|
||||
)
|
||||
pydantic_semantic_manifest = PydanticSemanticManifest(
|
||||
metrics=[], semantic_models=[], project_configuration=project_config
|
||||
@@ -79,24 +134,39 @@ class SemanticManifest:
|
||||
PydanticSavedQuery.parse_obj(saved_query.to_dict())
|
||||
)
|
||||
|
||||
# Look for time-spine table model and create time spine table configuration
|
||||
if self.manifest.semantic_models:
|
||||
# Get model for time_spine_table
|
||||
model = self.manifest.ref_lookup.find(TIME_SPINE_MODEL_NAME, None, None, self.manifest)
|
||||
if not model:
|
||||
raise ParsingError(
|
||||
"The semantic layer requires a 'metricflow_time_spine' model in the project, but none was found. "
|
||||
"Guidance on creating this model can be found on our docs site ("
|
||||
"https://docs.getdbt.com/docs/build/metricflow-time-spine) "
|
||||
)
|
||||
# Create time_spine_table_config, set it in project_config, and add to semantic manifest
|
||||
time_spine_table_config = PydanticTimeSpineTableConfiguration(
|
||||
location=model.relation_name,
|
||||
column_name="date_day",
|
||||
grain=TimeGranularity.DAY,
|
||||
legacy_time_spine_model = self.manifest.ref_lookup.find(
|
||||
LEGACY_TIME_SPINE_MODEL_NAME, None, None, self.manifest
|
||||
)
|
||||
pydantic_semantic_manifest.project_configuration.time_spine_table_configurations = [
|
||||
time_spine_table_config
|
||||
]
|
||||
if legacy_time_spine_model:
|
||||
if (
|
||||
not minimum_time_spine_granularity
|
||||
or LEGACY_TIME_SPINE_GRANULARITY.to_int()
|
||||
< minimum_time_spine_granularity.to_int()
|
||||
):
|
||||
minimum_time_spine_granularity = LEGACY_TIME_SPINE_GRANULARITY
|
||||
|
||||
# If no time spines have been configured at DAY or smaller AND legacy time spine model does not exist, error.
|
||||
if (
|
||||
not minimum_time_spine_granularity
|
||||
or minimum_time_spine_granularity.to_int()
|
||||
> MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY.to_int()
|
||||
):
|
||||
raise ParsingError(
|
||||
"The semantic layer requires a time spine model with granularity DAY or smaller in the project, "
|
||||
"but none was found. Guidance on creating this model can be found on our docs site "
|
||||
"(https://docs.getdbt.com/docs/build/metricflow-time-spine)." # TODO: update docs link when available!
|
||||
)
|
||||
|
||||
# For backward compatibility: if legacy time spine exists, include it in the manifest.
|
||||
if legacy_time_spine_model:
|
||||
legacy_time_spine = LegacyTimeSpine(
|
||||
location=legacy_time_spine_model.relation_name,
|
||||
column_name="date_day",
|
||||
grain=LEGACY_TIME_SPINE_GRANULARITY,
|
||||
)
|
||||
pydantic_semantic_manifest.project_configuration.time_spine_table_configurations = [
|
||||
legacy_time_spine
|
||||
]
|
||||
|
||||
return pydantic_semantic_manifest
|
||||
|
||||
@@ -116,6 +116,7 @@ class HasColumnAndTestProps(HasColumnProps):
|
||||
class UnparsedColumn(HasColumnAndTestProps):
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
granularity: Optional[str] = None # str is really a TimeGranularity Enum
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -206,6 +207,11 @@ class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasColumnAndTestProps, HasYa
|
||||
access: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedTimeSpine(dbtClassMixin):
|
||||
standard_granularity_column: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
quote_columns: Optional[bool] = None
|
||||
@@ -213,6 +219,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
latest_version: Optional[NodeVersion] = None
|
||||
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
||||
deprecation_date: Optional[datetime.datetime] = None
|
||||
time_spine: Optional[UnparsedTimeSpine] = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.latest_version:
|
||||
@@ -234,6 +241,26 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
|
||||
|
||||
self.deprecation_date = normalize_date(self.deprecation_date)
|
||||
|
||||
if self.time_spine:
|
||||
columns = (
|
||||
self.get_columns_for_version(self.latest_version)
|
||||
if self.latest_version
|
||||
else self.columns
|
||||
)
|
||||
column_names_to_columns = {column.name: column for column in columns}
|
||||
if self.time_spine.standard_granularity_column not in column_names_to_columns:
|
||||
raise ParsingError(
|
||||
f"Time spine standard granularity column must be defined on the model. Got invalid "
|
||||
f"column name '{self.time_spine.standard_granularity_column}' for model '{self.name}'. Valid names"
|
||||
f"{' for latest version' if self.latest_version else ''}: {list(column_names_to_columns.keys())}."
|
||||
)
|
||||
column = column_names_to_columns[self.time_spine.standard_granularity_column]
|
||||
if not column.granularity:
|
||||
raise ParsingError(
|
||||
f"Time spine standard granularity column must have a granularity defined. Please add one for "
|
||||
f"column '{self.time_spine.standard_granularity_column}' in model '{self.name}'."
|
||||
)
|
||||
|
||||
def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
|
||||
if version not in self._version_map:
|
||||
raise DbtInternalError(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import abc
|
||||
from typing import ClassVar, Dict, List, Optional, Set
|
||||
from typing import Callable, ClassVar, Dict, List, Optional, Set
|
||||
|
||||
import dbt.tracking
|
||||
from dbt.events import types as core_types
|
||||
from dbt_common.events.functions import fire_event, warn_or_error
|
||||
from dbt_common.events.functions import warn_or_error
|
||||
|
||||
|
||||
class DBTDeprecation:
|
||||
@@ -107,15 +107,6 @@ class ProjectFlagsMovedDeprecation(DBTDeprecation):
|
||||
_name = "project-flags-moved"
|
||||
_event = "ProjectFlagsMovedDeprecation"
|
||||
|
||||
def show(self, *args, **kwargs) -> None:
|
||||
if self.name not in active_deprecations:
|
||||
event = self.event(**kwargs)
|
||||
# We can't do warn_or_error because the ProjectFlags
|
||||
# is where that is set up and we're just reading it.
|
||||
fire_event(event)
|
||||
self.track_deprecation_warn()
|
||||
active_deprecations.add(self.name)
|
||||
|
||||
|
||||
class PackageMaterializationOverrideDeprecation(DBTDeprecation):
|
||||
_name = "package-materialization-override"
|
||||
@@ -155,6 +146,13 @@ def warn(name, *args, **kwargs):
|
||||
deprecations[name].show(*args, **kwargs)
|
||||
|
||||
|
||||
def buffer(name: str, *args, **kwargs):
|
||||
def show_callback():
|
||||
deprecations[name].show(*args, **kwargs)
|
||||
|
||||
buffered_deprecations.append(show_callback)
|
||||
|
||||
|
||||
# these are globally available
|
||||
# since modules are only imported once, active_deprecations is a singleton
|
||||
|
||||
@@ -178,6 +176,13 @@ deprecations_list: List[DBTDeprecation] = [
|
||||
|
||||
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
||||
|
||||
buffered_deprecations: List[Callable] = []
|
||||
|
||||
|
||||
def reset_deprecations():
|
||||
active_deprecations.clear()
|
||||
|
||||
|
||||
def fire_buffered_deprecations():
|
||||
[dep_fn() for dep_fn in buffered_deprecations]
|
||||
buffered_deprecations.clear()
|
||||
|
||||
@@ -136,6 +136,18 @@ class GraphDependencyNotFoundError(CompilationError):
|
||||
return msg
|
||||
|
||||
|
||||
class ForeignKeyConstraintToSyntaxError(CompilationError):
|
||||
def __init__(self, node, expression: str) -> None:
|
||||
self.expression = expression
|
||||
self.node = node
|
||||
super().__init__(msg=self.get_message())
|
||||
|
||||
def get_message(self) -> str:
|
||||
msg = f"'{self.node.unique_id}' defines a foreign key constraint 'to' expression which is not valid 'ref' or 'source' syntax: {self.expression}."
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
# client level exceptions
|
||||
|
||||
|
||||
|
||||
@@ -59,18 +59,40 @@ class Graph:
|
||||
def select_children(
|
||||
self, selected: Set[UniqueId], max_depth: Optional[int] = None
|
||||
) -> Set[UniqueId]:
|
||||
descendants: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
descendants.update(self.descendants(node, max_depth))
|
||||
return descendants
|
||||
"""Returns all nodes which are descendants of the 'selected' set.
|
||||
Nodes in the 'selected' set are counted as children only if
|
||||
they are descendants of other nodes in the 'selected' set."""
|
||||
children: Set[UniqueId] = set()
|
||||
i = 0
|
||||
while len(selected) > 0 and (max_depth is None or i < max_depth):
|
||||
next_layer: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
next_layer.update(self.descendants(node, 1))
|
||||
next_layer = next_layer - children # Avoid re-searching
|
||||
children.update(next_layer)
|
||||
selected = next_layer
|
||||
i += 1
|
||||
|
||||
return children
|
||||
|
||||
def select_parents(
|
||||
self, selected: Set[UniqueId], max_depth: Optional[int] = None
|
||||
) -> Set[UniqueId]:
|
||||
ancestors: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
ancestors.update(self.ancestors(node, max_depth))
|
||||
return ancestors
|
||||
"""Returns all nodes which are ancestors of the 'selected' set.
|
||||
Nodes in the 'selected' set are counted as parents only if
|
||||
they are ancestors of other nodes in the 'selected' set."""
|
||||
parents: Set[UniqueId] = set()
|
||||
i = 0
|
||||
while len(selected) > 0 and (max_depth is None or i < max_depth):
|
||||
next_layer: Set[UniqueId] = set()
|
||||
for node in selected:
|
||||
next_layer.update(self.ancestors(node, 1))
|
||||
next_layer = next_layer - parents # Avoid re-searching
|
||||
parents.update(next_layer)
|
||||
selected = next_layer
|
||||
i += 1
|
||||
|
||||
return parents
|
||||
|
||||
def select_successors(self, selected: Set[UniqueId]) -> Set[UniqueId]:
|
||||
successors: Set[UniqueId] = set()
|
||||
|
||||
@@ -4,13 +4,17 @@ from typing import Dict, Generator, List, Optional, Set
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
|
||||
from dbt.artifacts.schemas.run import RunResult
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.nodes import (
|
||||
Exposure,
|
||||
GraphMemberNode,
|
||||
Metric,
|
||||
ModelNode,
|
||||
SourceDefinition,
|
||||
)
|
||||
from dbt.contracts.state import PreviousState
|
||||
from dbt.graph.selector_spec import SelectionSpec
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
from .graph import UniqueId
|
||||
@@ -212,3 +216,80 @@ class GraphQueue:
|
||||
with self.lock:
|
||||
self.some_task_done.wait()
|
||||
return self.inner.unfinished_tasks
|
||||
|
||||
|
||||
class ExecutionQueue:
|
||||
"""
|
||||
ExecutionQueue manage what nodes to execute in what order, based on the supplied inputs.
|
||||
It is responsible for managing the queue of nodes to execute, and for marking nodes as
|
||||
done when they have been executed.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
manifest: Manifest,
|
||||
previous_state: PreviousState,
|
||||
resource_types: List[NodeType],
|
||||
include_empty_nodes: Optional[bool] = False,
|
||||
selection_spec: Optional[SelectionSpec] = None,
|
||||
fail_fast: Optional[bool] = False,
|
||||
) -> None:
|
||||
"""Create a new ExecutionQueue.
|
||||
Nodes to execute are selected based on the manifest, previous state, selection spec, inlcude_empty_nodes, and resource_types.
|
||||
See Args for more details.
|
||||
Example usage:
|
||||
|
||||
pool = ThreadPool(4)
|
||||
queue = ExecutionQueue(manifest, previous_state, [NodeType.Model, NodeType.Test])
|
||||
def callback(result: RunResult):
|
||||
queue.handle_node_result(result)
|
||||
def run(node: GraphMemberNode):
|
||||
result = node.run()
|
||||
return result
|
||||
|
||||
while queue.count() > 0:
|
||||
node = queue.get()
|
||||
pool.apply_async(run, args=(node), callback=callback)
|
||||
results = queue.join()
|
||||
|
||||
Args:
|
||||
manifest (Manifest): the manifest of the project
|
||||
previous_state (PreviousState): the previous state of the project, used in state selection.
|
||||
resource_types (List[NodeType]): the types of resources to include in the selection.
|
||||
include_empty_nodes (Optional[bool]): whether to include nodes that do not have values in the selection. Defaults to False.
|
||||
selection_spec (Optional[SelectionSpec]): the selection spec to use. Defaults to None
|
||||
fail_fast (Optional[bool]): when set to True, the will will stop execution after the first error. Defaults to False.
|
||||
"""
|
||||
pass
|
||||
|
||||
def count(self) -> int:
|
||||
"""
|
||||
Returns:
|
||||
int: the number of nodes in the queue (excluding in-progress nodes)
|
||||
"""
|
||||
return 0
|
||||
|
||||
def handle_node_result(self, result: RunResult) -> None:
|
||||
"""Given a RunResult, mark the node as done and update the queue to make more nodes avaliable.
|
||||
|
||||
Args:
|
||||
result (RunResult): _description_
|
||||
"""
|
||||
pass
|
||||
|
||||
def get(self, block: bool = True) -> GraphMemberNode:
|
||||
"""
|
||||
Get the next node to execute.
|
||||
|
||||
Args:
|
||||
block (bool, optional): whether to block until a node is available. Defaults to True.
|
||||
"""
|
||||
return ModelNode() # type: ignore
|
||||
|
||||
def join(self) -> list[RunResult]:
|
||||
"""Wait for all nodes to finish executing, and return the results of all nodes.
|
||||
|
||||
Returns:
|
||||
list[RunResult]: the results of all nodes.
|
||||
"""
|
||||
return []
|
||||
|
||||
@@ -18,6 +18,7 @@ from dbt.exceptions import ParsingError
|
||||
from dbt.parser.search import FileBlock
|
||||
from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType
|
||||
from dbt_common.exceptions import DbtInternalError
|
||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
|
||||
|
||||
def trimmed(inp: str) -> str:
|
||||
@@ -185,13 +186,12 @@ class ParserRef:
|
||||
self.column_info: Dict[str, ColumnInfo] = {}
|
||||
|
||||
def _add(self, column: HasColumnProps) -> None:
|
||||
tags: List[str] = []
|
||||
tags.extend(getattr(column, "tags", ()))
|
||||
quote: Optional[bool]
|
||||
tags: List[str] = getattr(column, "tags", [])
|
||||
quote: Optional[bool] = None
|
||||
granularity: Optional[TimeGranularity] = None
|
||||
if isinstance(column, UnparsedColumn):
|
||||
quote = column.quote
|
||||
else:
|
||||
quote = None
|
||||
granularity = TimeGranularity(column.granularity) if column.granularity else None
|
||||
|
||||
if any(
|
||||
c
|
||||
@@ -209,6 +209,7 @@ class ParserRef:
|
||||
tags=tags,
|
||||
quote=quote,
|
||||
_extra=column.extra,
|
||||
granularity=granularity,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -612,7 +612,7 @@ class SemanticModelParser(YamlReader):
|
||||
) -> None:
|
||||
unparsed_metric = UnparsedMetric(
|
||||
name=measure.name,
|
||||
label=measure.name,
|
||||
label=measure.label or measure.name,
|
||||
type="simple",
|
||||
type_params=UnparsedMetricTypeParams(measure=measure.name, expr=measure.name),
|
||||
description=measure.description or f"Metric created from measure {measure.name}",
|
||||
|
||||
@@ -5,6 +5,9 @@ from dataclasses import dataclass, field
|
||||
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.artifacts.resources import RefArgs
|
||||
from dbt.artifacts.resources.v1.model import TimeSpine
|
||||
from dbt.clients.jinja_static import statically_parse_ref_or_source
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context
|
||||
@@ -66,18 +69,20 @@ from dbt_common.events.functions import warn_or_error
|
||||
from dbt_common.exceptions import DbtValidationError
|
||||
from dbt_common.utils import deep_merge
|
||||
|
||||
schema_file_keys = (
|
||||
"models",
|
||||
"seeds",
|
||||
"snapshots",
|
||||
"sources",
|
||||
"macros",
|
||||
"analyses",
|
||||
"exposures",
|
||||
"metrics",
|
||||
"semantic_models",
|
||||
"saved_queries",
|
||||
)
|
||||
schema_file_keys_to_resource_types = {
|
||||
"models": NodeType.Model,
|
||||
"seeds": NodeType.Seed,
|
||||
"snapshots": NodeType.Snapshot,
|
||||
"sources": NodeType.Source,
|
||||
"macros": NodeType.Macro,
|
||||
"analyses": NodeType.Analysis,
|
||||
"exposures": NodeType.Exposure,
|
||||
"metrics": NodeType.Metric,
|
||||
"semantic_models": NodeType.SemanticModel,
|
||||
"saved_queries": NodeType.SavedQuery,
|
||||
}
|
||||
|
||||
schema_file_keys = list(schema_file_keys_to_resource_types.keys())
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
@@ -617,9 +622,16 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
||||
# could possibly skip creating one. Leaving here for now for
|
||||
# code consistency.
|
||||
deprecation_date: Optional[datetime.datetime] = None
|
||||
time_spine: Optional[TimeSpine] = None
|
||||
if isinstance(block.target, UnparsedModelUpdate):
|
||||
deprecation_date = block.target.deprecation_date
|
||||
|
||||
time_spine = (
|
||||
TimeSpine(
|
||||
standard_granularity_column=block.target.time_spine.standard_granularity_column
|
||||
)
|
||||
if block.target.time_spine
|
||||
else None
|
||||
)
|
||||
patch = ParsedNodePatch(
|
||||
name=block.target.name,
|
||||
original_file_path=block.target.original_file_path,
|
||||
@@ -635,6 +647,7 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
||||
latest_version=None,
|
||||
constraints=block.target.constraints,
|
||||
deprecation_date=deprecation_date,
|
||||
time_spine=time_spine,
|
||||
)
|
||||
assert isinstance(self.yaml.file, SchemaSourceFile)
|
||||
source_file: SchemaSourceFile = self.yaml.file
|
||||
@@ -667,7 +680,10 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
|
||||
# handle disabled nodes
|
||||
if unique_id is None:
|
||||
# Node might be disabled. Following call returns list of matching disabled nodes
|
||||
found_nodes = self.manifest.disabled_lookup.find(patch.name, patch.package_name)
|
||||
resource_type = schema_file_keys_to_resource_types[patch.yaml_key]
|
||||
found_nodes = self.manifest.disabled_lookup.find(
|
||||
patch.name, patch.package_name, resource_types=[resource_type]
|
||||
)
|
||||
if found_nodes:
|
||||
if len(found_nodes) > 1 and patch.config.get("enabled"):
|
||||
# There are multiple disabled nodes for this model and the schema file wants to enable one.
|
||||
@@ -799,7 +815,9 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
||||
|
||||
if versioned_model_unique_id is None:
|
||||
# Node might be disabled. Following call returns list of matching disabled nodes
|
||||
found_nodes = self.manifest.disabled_lookup.find(versioned_model_name, None)
|
||||
found_nodes = self.manifest.disabled_lookup.find(
|
||||
versioned_model_name, None, resource_types=[NodeType.Model]
|
||||
)
|
||||
if found_nodes:
|
||||
if len(found_nodes) > 1 and target.config.get("enabled"):
|
||||
# There are multiple disabled nodes for this model and the schema file wants to enable one.
|
||||
@@ -900,6 +918,11 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
||||
|
||||
def patch_node_properties(self, node, patch: "ParsedNodePatch") -> None:
|
||||
super().patch_node_properties(node, patch)
|
||||
|
||||
# Remaining patch properties are only relevant to ModelNode objects
|
||||
if not isinstance(node, ModelNode):
|
||||
return
|
||||
|
||||
node.version = patch.version
|
||||
node.latest_version = patch.latest_version
|
||||
node.deprecation_date = patch.deprecation_date
|
||||
@@ -913,9 +936,10 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
||||
)
|
||||
# These two will have to be reapplied after config is built for versioned models
|
||||
self.patch_constraints(node, patch.constraints)
|
||||
self.patch_time_spine(node, patch.time_spine)
|
||||
node.build_contract_checksum()
|
||||
|
||||
def patch_constraints(self, node, constraints) -> None:
|
||||
def patch_constraints(self, node: ModelNode, constraints: List[Dict[str, Any]]) -> None:
|
||||
contract_config = node.config.get("contract")
|
||||
if contract_config.enforced is True:
|
||||
self._validate_constraint_prerequisites(node)
|
||||
@@ -930,6 +954,29 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
||||
|
||||
self._validate_pk_constraints(node, constraints)
|
||||
node.constraints = [ModelLevelConstraint.from_dict(c) for c in constraints]
|
||||
self._process_constraints_refs_and_sources(node)
|
||||
|
||||
def _process_constraints_refs_and_sources(self, model_node: ModelNode) -> None:
|
||||
"""
|
||||
Populate model_node.refs and model_node.sources based on foreign-key constraint references,
|
||||
whether defined at the model-level or column-level.
|
||||
"""
|
||||
for constraint in model_node.all_constraints:
|
||||
if constraint.type == ConstraintType.foreign_key and constraint.to:
|
||||
try:
|
||||
ref_or_source = statically_parse_ref_or_source(constraint.to)
|
||||
except ParsingError:
|
||||
raise ParsingError(
|
||||
f"Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model {model_node.name}: {constraint.to}."
|
||||
)
|
||||
|
||||
if isinstance(ref_or_source, RefArgs):
|
||||
model_node.refs.append(ref_or_source)
|
||||
else:
|
||||
model_node.sources.append(ref_or_source)
|
||||
|
||||
def patch_time_spine(self, node: ModelNode, time_spine: Optional[TimeSpine]) -> None:
|
||||
node.time_spine = time_spine
|
||||
|
||||
def _validate_pk_constraints(
|
||||
self, model_node: ModelNode, constraints: List[Dict[str, Any]]
|
||||
|
||||
@@ -59,6 +59,7 @@ setup(
|
||||
"networkx>=2.3,<4.0",
|
||||
"protobuf>=4.0.0,<5",
|
||||
"requests<3.0.0", # should match dbt-common
|
||||
"snowplow-tracker>=1.0.2,<2.0",
|
||||
# ----
|
||||
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
|
||||
# and check compatibility / bump in each new minor version of dbt-core.
|
||||
@@ -68,11 +69,10 @@ setup(
|
||||
# These are major-version-0 packages also maintained by dbt-labs.
|
||||
# Accept patches but avoid automatically updating past a set minor version range.
|
||||
"dbt-extractor>=0.5.0,<=0.6",
|
||||
"minimal-snowplow-tracker>=0.0.2,<0.1",
|
||||
"dbt-semantic-interfaces>=0.6.8,<0.7",
|
||||
"dbt-semantic-interfaces>=0.6.11,<0.7",
|
||||
# Minor versions for these are expected to be backwards-compatible
|
||||
"dbt-common>=1.3.0,<2.0",
|
||||
"dbt-adapters>=1.1.1,<2.0",
|
||||
"dbt-common>=1.6.0,<2.0",
|
||||
"dbt-adapters>=1.3.0,<2.0",
|
||||
# ----
|
||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
||||
"packaging>20.9",
|
||||
|
||||
@@ -5689,4 +5689,4 @@
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v10.json"
|
||||
}
|
||||
}
|
||||
@@ -7060,4 +7060,4 @@
|
||||
}
|
||||
},
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v11.json"
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5981,4 +5981,4 @@
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v5.json"
|
||||
}
|
||||
}
|
||||
@@ -6206,4 +6206,4 @@
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v6.json"
|
||||
}
|
||||
}
|
||||
@@ -6572,4 +6572,4 @@
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v7.json"
|
||||
}
|
||||
}
|
||||
@@ -4431,4 +4431,4 @@
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v8.json"
|
||||
}
|
||||
}
|
||||
@@ -4962,4 +4962,4 @@
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schemas.getdbt.com/dbt/manifest/v9.json"
|
||||
}
|
||||
}
|
||||
@@ -292,6 +292,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"first_name": {
|
||||
"name": "first_name",
|
||||
@@ -301,6 +302,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"email": {
|
||||
"name": "email",
|
||||
@@ -310,6 +312,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ip_address": {
|
||||
"name": "ip_address",
|
||||
@@ -319,6 +322,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
@@ -328,6 +332,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"contract": {"checksum": None, "enforced": False, "alias_types": True},
|
||||
@@ -343,6 +348,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"access": "protected",
|
||||
"version": None,
|
||||
"latest_version": None,
|
||||
"time_spine": None,
|
||||
},
|
||||
"model.test.second_model": {
|
||||
"compiled_path": os.path.join(compiled_model_path, "second_model.sql"),
|
||||
@@ -385,6 +391,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"first_name": {
|
||||
"name": "first_name",
|
||||
@@ -394,6 +401,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"email": {
|
||||
"name": "email",
|
||||
@@ -403,6 +411,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ip_address": {
|
||||
"name": "ip_address",
|
||||
@@ -412,6 +421,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
@@ -421,6 +431,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"contract": {"checksum": None, "enforced": False, "alias_types": True},
|
||||
@@ -436,6 +447,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"access": "protected",
|
||||
"version": None,
|
||||
"latest_version": None,
|
||||
"time_spine": None,
|
||||
},
|
||||
"seed.test.seed": {
|
||||
"build_path": None,
|
||||
@@ -468,6 +480,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"first_name": {
|
||||
"name": "first_name",
|
||||
@@ -477,6 +490,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"email": {
|
||||
"name": "email",
|
||||
@@ -486,6 +500,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ip_address": {
|
||||
"name": "ip_address",
|
||||
@@ -495,6 +510,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
@@ -504,6 +520,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"docs": {"node_color": None, "show": True},
|
||||
@@ -730,6 +747,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -957,6 +975,7 @@ def expected_references_manifest(project):
|
||||
"version": None,
|
||||
"latest_version": None,
|
||||
"constraints": [],
|
||||
"time_spine": None,
|
||||
},
|
||||
"model.test.ephemeral_summary": {
|
||||
"alias": "ephemeral_summary",
|
||||
@@ -972,6 +991,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ct": {
|
||||
"description": "The number of instances of the first name",
|
||||
@@ -981,6 +1001,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"config": get_rendered_model_config(materialized="table", group="test_group"),
|
||||
@@ -1026,6 +1047,7 @@ def expected_references_manifest(project):
|
||||
"version": None,
|
||||
"latest_version": None,
|
||||
"constraints": [],
|
||||
"time_spine": None,
|
||||
},
|
||||
"model.test.view_summary": {
|
||||
"alias": "view_summary",
|
||||
@@ -1041,6 +1063,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ct": {
|
||||
"description": "The number of instances of the first name",
|
||||
@@ -1050,6 +1073,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"config": get_rendered_model_config(),
|
||||
@@ -1091,6 +1115,7 @@ def expected_references_manifest(project):
|
||||
"version": None,
|
||||
"latest_version": None,
|
||||
"constraints": [],
|
||||
"time_spine": None,
|
||||
},
|
||||
"seed.test.seed": {
|
||||
"alias": "seed",
|
||||
@@ -1105,6 +1130,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"first_name": {
|
||||
"name": "first_name",
|
||||
@@ -1114,6 +1140,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"email": {
|
||||
"name": "email",
|
||||
@@ -1123,6 +1150,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ip_address": {
|
||||
"name": "ip_address",
|
||||
@@ -1132,6 +1160,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
@@ -1141,6 +1170,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"config": get_rendered_seed_config(),
|
||||
@@ -1219,6 +1249,7 @@ def expected_references_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -1487,6 +1518,7 @@ def expected_versions_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"ct": {
|
||||
"description": "The number of instances of the first name",
|
||||
@@ -1496,6 +1528,7 @@ def expected_versions_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"config": get_rendered_model_config(
|
||||
@@ -1544,6 +1577,7 @@ def expected_versions_manifest(project):
|
||||
"access": "protected",
|
||||
"version": 1,
|
||||
"latest_version": 2,
|
||||
"time_spine": None,
|
||||
},
|
||||
"model.test.versioned_model.v2": {
|
||||
"alias": "versioned_model_v2",
|
||||
@@ -1559,6 +1593,7 @@ def expected_versions_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
"extra": {
|
||||
"description": "",
|
||||
@@ -1568,6 +1603,7 @@ def expected_versions_manifest(project):
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"constraints": [],
|
||||
"granularity": None,
|
||||
},
|
||||
},
|
||||
"config": get_rendered_model_config(
|
||||
@@ -1612,6 +1648,7 @@ def expected_versions_manifest(project):
|
||||
"access": "protected",
|
||||
"version": 2,
|
||||
"latest_version": 2,
|
||||
"time_spine": None,
|
||||
},
|
||||
"model.test.ref_versioned_model": {
|
||||
"alias": "ref_versioned_model",
|
||||
@@ -1669,6 +1706,7 @@ def expected_versions_manifest(project):
|
||||
"access": "protected",
|
||||
"version": None,
|
||||
"latest_version": None,
|
||||
"time_spine": None,
|
||||
},
|
||||
"test.test.unique_versioned_model_v1_first_name.6138195dec": {
|
||||
"alias": "unique_versioned_model_v1_first_name",
|
||||
|
||||
@@ -42,6 +42,15 @@ with recursive t(n) as (
|
||||
select sum(n) from t;
|
||||
"""
|
||||
|
||||
first_ephemeral_model_with_alias_sql = """
|
||||
{{ config(materialized = 'ephemeral', alias = 'first_alias') }}
|
||||
select 1 as fun
|
||||
"""
|
||||
|
||||
second_ephemeral_model_with_alias_sql = """
|
||||
select * from {{ ref('first_ephemeral_model_with_alias') }}
|
||||
"""
|
||||
|
||||
schema_yml = """
|
||||
version: 2
|
||||
|
||||
|
||||
@@ -10,10 +10,12 @@ from dbt_common.exceptions import DbtRuntimeError
|
||||
from tests.functional.assertions.test_runner import dbtTestRunner
|
||||
from tests.functional.compile.fixtures import (
|
||||
first_ephemeral_model_sql,
|
||||
first_ephemeral_model_with_alias_sql,
|
||||
first_model_sql,
|
||||
model_multiline_jinja,
|
||||
schema_yml,
|
||||
second_ephemeral_model_sql,
|
||||
second_ephemeral_model_with_alias_sql,
|
||||
second_model_sql,
|
||||
third_ephemeral_model_sql,
|
||||
with_recursive_model_sql,
|
||||
@@ -128,6 +130,24 @@ class TestEphemeralModels:
|
||||
]
|
||||
|
||||
|
||||
class TestEphemeralModelWithAlias:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"first_ephemeral_model_with_alias.sql": first_ephemeral_model_with_alias_sql,
|
||||
"second_ephemeral_model_with_alias.sql": second_ephemeral_model_with_alias_sql,
|
||||
}
|
||||
|
||||
def test_compile(self, project):
|
||||
run_dbt(["compile"])
|
||||
|
||||
assert get_lines("second_ephemeral_model_with_alias") == [
|
||||
"with __dbt__cte__first_alias as (",
|
||||
"select 1 as fun",
|
||||
") select * from __dbt__cte__first_alias",
|
||||
]
|
||||
|
||||
|
||||
class TestCompile:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
@@ -331,7 +331,7 @@ class TestModelLevelContractEnabledConfigs:
|
||||
|
||||
assert contract_actual_config.enforced is True
|
||||
|
||||
expected_columns = "{'id': ColumnInfo(name='id', description='hello', meta={}, data_type='integer', constraints=[ColumnLevelConstraint(type=<ConstraintType.not_null: 'not_null'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[]), ColumnLevelConstraint(type=<ConstraintType.primary_key: 'primary_key'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[]), ColumnLevelConstraint(type=<ConstraintType.check: 'check'>, name=None, expression='(id > 0)', warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[])], quote=True, tags=[], _extra={}), 'color': ColumnInfo(name='color', description='', meta={}, data_type='string', constraints=[], quote=None, tags=[], _extra={}), 'date_day': ColumnInfo(name='date_day', description='', meta={}, data_type='date', constraints=[], quote=None, tags=[], _extra={})}"
|
||||
expected_columns = "{'id': ColumnInfo(name='id', description='hello', meta={}, data_type='integer', constraints=[ColumnLevelConstraint(type=<ConstraintType.not_null: 'not_null'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[]), ColumnLevelConstraint(type=<ConstraintType.primary_key: 'primary_key'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[]), ColumnLevelConstraint(type=<ConstraintType.check: 'check'>, name=None, expression='(id > 0)', warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[])], quote=True, tags=[], _extra={}, granularity=None), 'color': ColumnInfo(name='color', description='', meta={}, data_type='string', constraints=[], quote=None, tags=[], _extra={}, granularity=None), 'date_day': ColumnInfo(name='date_day', description='', meta={}, data_type='date', constraints=[], quote=None, tags=[], _extra={}, granularity=None)}"
|
||||
|
||||
assert expected_columns == str(my_model_columns)
|
||||
|
||||
|
||||
@@ -88,3 +88,47 @@ class TestDisabledConfigs(BaseConfigProject):
|
||||
assert len(results) == 2
|
||||
results = run_dbt(["test"])
|
||||
assert len(results) == 5
|
||||
|
||||
|
||||
my_analysis_sql = """
|
||||
{{
|
||||
config(enabled=False)
|
||||
}}
|
||||
select 1 as id
|
||||
"""
|
||||
|
||||
|
||||
schema_yml = """
|
||||
models:
|
||||
- name: my_analysis
|
||||
description: "A Sample model"
|
||||
config:
|
||||
meta:
|
||||
owner: Joe
|
||||
|
||||
analyses:
|
||||
- name: my_analysis
|
||||
description: "A sample analysis"
|
||||
config:
|
||||
enabled: false
|
||||
"""
|
||||
|
||||
|
||||
class TestDisabledConfigsSameName:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"my_analysis.sql": my_analysis_sql,
|
||||
"schema.yml": schema_yml,
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def analyses(self):
|
||||
return {
|
||||
"my_analysis.sql": my_analysis_sql,
|
||||
}
|
||||
|
||||
def test_disabled_analysis(self, project):
|
||||
manifest = run_dbt(["parse"])
|
||||
assert len(manifest.disabled) == 2
|
||||
assert len(manifest.nodes) == 0
|
||||
|
||||
4
tests/functional/conftest.py
Normal file
4
tests/functional/conftest.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from tests.functional.fixtures.happy_path_fixture import ( # noqa:D
|
||||
happy_path_project,
|
||||
happy_path_project_files,
|
||||
)
|
||||
115
tests/functional/constraints/fixtures.py
Normal file
115
tests/functional/constraints/fixtures.py
Normal file
@@ -0,0 +1,115 @@
|
||||
model_foreign_key_model_schema_yml = """
|
||||
models:
|
||||
- name: my_model
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
columns: [id]
|
||||
to: ref('my_model_to')
|
||||
to_columns: [id]
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
"""
|
||||
|
||||
|
||||
model_foreign_key_source_schema_yml = """
|
||||
sources:
|
||||
- name: test_source
|
||||
tables:
|
||||
- name: test_table
|
||||
|
||||
models:
|
||||
- name: my_model
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
columns: [id]
|
||||
to: source('test_source', 'test_table')
|
||||
to_columns: [id]
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
"""
|
||||
|
||||
|
||||
model_foreign_key_model_node_not_found_schema_yml = """
|
||||
models:
|
||||
- name: my_model
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
columns: [id]
|
||||
to: ref('doesnt_exist')
|
||||
to_columns: [id]
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
"""
|
||||
|
||||
|
||||
model_foreign_key_model_invalid_syntax_schema_yml = """
|
||||
models:
|
||||
- name: my_model
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
columns: [id]
|
||||
to: invalid
|
||||
to_columns: [id]
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
"""
|
||||
|
||||
|
||||
model_foreign_key_model_column_schema_yml = """
|
||||
models:
|
||||
- name: my_model
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
to: ref('my_model_to')
|
||||
to_columns: [id]
|
||||
"""
|
||||
|
||||
|
||||
model_foreign_key_column_invalid_syntax_schema_yml = """
|
||||
models:
|
||||
- name: my_model
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
to: invalid
|
||||
to_columns: [id]
|
||||
"""
|
||||
|
||||
|
||||
model_foreign_key_column_node_not_found_schema_yml = """
|
||||
models:
|
||||
- name: my_model
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
to: ref('doesnt_exist')
|
||||
to_columns: [id]
|
||||
"""
|
||||
|
||||
model_column_level_foreign_key_source_schema_yml = """
|
||||
sources:
|
||||
- name: test_source
|
||||
tables:
|
||||
- name: test_table
|
||||
|
||||
models:
|
||||
- name: my_model
|
||||
columns:
|
||||
- name: id
|
||||
data_type: integer
|
||||
constraints:
|
||||
- type: foreign_key
|
||||
to: source('test_source', 'test_table')
|
||||
to_columns: [id]
|
||||
"""
|
||||
241
tests/functional/constraints/test_foreign_key_constraints.py
Normal file
241
tests/functional/constraints/test_foreign_key_constraints.py
Normal file
@@ -0,0 +1,241 @@
|
||||
import pytest
|
||||
|
||||
from dbt.artifacts.resources import RefArgs
|
||||
from dbt.exceptions import CompilationError, ParsingError
|
||||
from dbt.tests.util import get_artifact, run_dbt
|
||||
from dbt_common.contracts.constraints import (
|
||||
ColumnLevelConstraint,
|
||||
ConstraintType,
|
||||
ModelLevelConstraint,
|
||||
)
|
||||
from tests.functional.constraints.fixtures import (
|
||||
model_column_level_foreign_key_source_schema_yml,
|
||||
model_foreign_key_column_invalid_syntax_schema_yml,
|
||||
model_foreign_key_column_node_not_found_schema_yml,
|
||||
model_foreign_key_model_column_schema_yml,
|
||||
model_foreign_key_model_invalid_syntax_schema_yml,
|
||||
model_foreign_key_model_node_not_found_schema_yml,
|
||||
model_foreign_key_model_schema_yml,
|
||||
model_foreign_key_source_schema_yml,
|
||||
)
|
||||
|
||||
|
||||
class TestModelLevelForeignKeyConstraintToRef:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_model_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to(self, project, unique_schema):
|
||||
manifest = run_dbt(["parse"])
|
||||
node_with_fk_constraint = manifest.nodes["model.test.my_model"]
|
||||
assert len(node_with_fk_constraint.constraints) == 1
|
||||
|
||||
parsed_constraint = node_with_fk_constraint.constraints[0]
|
||||
assert parsed_constraint == ModelLevelConstraint(
|
||||
type=ConstraintType.foreign_key,
|
||||
columns=["id"],
|
||||
to="ref('my_model_to')",
|
||||
to_columns=["id"],
|
||||
)
|
||||
# Assert column-level constraint source included in node.depends_on
|
||||
assert node_with_fk_constraint.refs == [RefArgs("my_model_to")]
|
||||
assert node_with_fk_constraint.depends_on.nodes == ["model.test.my_model_to"]
|
||||
assert node_with_fk_constraint.sources == []
|
||||
|
||||
# Assert compilation renders to from 'ref' to relation identifer
|
||||
run_dbt(["compile"])
|
||||
manifest = get_artifact(project.project_root, "target", "manifest.json")
|
||||
assert len(manifest["nodes"]["model.test.my_model"]["constraints"]) == 1
|
||||
|
||||
compiled_constraint = manifest["nodes"]["model.test.my_model"]["constraints"][0]
|
||||
assert compiled_constraint["to"] == f'"dbt"."{unique_schema}"."my_model_to"'
|
||||
# Other constraint fields should remain as parsed
|
||||
assert compiled_constraint["to_columns"] == parsed_constraint.to_columns
|
||||
assert compiled_constraint["columns"] == parsed_constraint.columns
|
||||
assert compiled_constraint["type"] == parsed_constraint.type
|
||||
|
||||
|
||||
class TestModelLevelForeignKeyConstraintToSource:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_source_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to(self, project, unique_schema):
|
||||
manifest = run_dbt(["parse"])
|
||||
node_with_fk_constraint = manifest.nodes["model.test.my_model"]
|
||||
assert len(node_with_fk_constraint.constraints) == 1
|
||||
|
||||
parsed_constraint = node_with_fk_constraint.constraints[0]
|
||||
assert parsed_constraint == ModelLevelConstraint(
|
||||
type=ConstraintType.foreign_key,
|
||||
columns=["id"],
|
||||
to="source('test_source', 'test_table')",
|
||||
to_columns=["id"],
|
||||
)
|
||||
# Assert column-level constraint source included in node.depends_on
|
||||
assert node_with_fk_constraint.refs == []
|
||||
assert node_with_fk_constraint.depends_on.nodes == ["source.test.test_source.test_table"]
|
||||
assert node_with_fk_constraint.sources == [["test_source", "test_table"]]
|
||||
|
||||
# Assert compilation renders to from 'ref' to relation identifer
|
||||
run_dbt(["compile"])
|
||||
manifest = get_artifact(project.project_root, "target", "manifest.json")
|
||||
assert len(manifest["nodes"]["model.test.my_model"]["constraints"]) == 1
|
||||
|
||||
compiled_constraint = manifest["nodes"]["model.test.my_model"]["constraints"][0]
|
||||
assert compiled_constraint["to"] == '"dbt"."test_source"."test_table"'
|
||||
# Other constraint fields should remain as parsed
|
||||
assert compiled_constraint["to_columns"] == parsed_constraint.to_columns
|
||||
assert compiled_constraint["columns"] == parsed_constraint.columns
|
||||
assert compiled_constraint["type"] == parsed_constraint.type
|
||||
|
||||
|
||||
class TestModelLevelForeignKeyConstraintRefNotFoundError:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_model_node_not_found_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to_doesnt_exist(self, project):
|
||||
with pytest.raises(
|
||||
CompilationError, match="depends on a node named 'doesnt_exist' which was not found"
|
||||
):
|
||||
run_dbt(["parse"])
|
||||
|
||||
|
||||
class TestModelLevelForeignKeyConstraintRefSyntaxError:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_model_invalid_syntax_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to(self, project):
|
||||
with pytest.raises(
|
||||
ParsingError,
|
||||
match="Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model my_model: invalid",
|
||||
):
|
||||
run_dbt(["parse"])
|
||||
|
||||
|
||||
class TestColumnLevelForeignKeyConstraintToRef:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_model_column_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_column_level_fk_to(self, project, unique_schema):
|
||||
manifest = run_dbt(["parse"])
|
||||
node_with_fk_constraint = manifest.nodes["model.test.my_model"]
|
||||
assert len(node_with_fk_constraint.columns["id"].constraints) == 1
|
||||
|
||||
parsed_constraint = node_with_fk_constraint.columns["id"].constraints[0]
|
||||
# Assert column-level constraint parsed
|
||||
assert parsed_constraint == ColumnLevelConstraint(
|
||||
type=ConstraintType.foreign_key, to="ref('my_model_to')", to_columns=["id"]
|
||||
)
|
||||
# Assert column-level constraint ref included in node.depends_on
|
||||
assert node_with_fk_constraint.refs == [RefArgs(name="my_model_to")]
|
||||
assert node_with_fk_constraint.sources == []
|
||||
assert node_with_fk_constraint.depends_on.nodes == ["model.test.my_model_to"]
|
||||
|
||||
# Assert compilation renders to from 'ref' to relation identifer
|
||||
run_dbt(["compile"])
|
||||
manifest = get_artifact(project.project_root, "target", "manifest.json")
|
||||
assert len(manifest["nodes"]["model.test.my_model"]["columns"]["id"]["constraints"]) == 1
|
||||
|
||||
compiled_constraint = manifest["nodes"]["model.test.my_model"]["columns"]["id"][
|
||||
"constraints"
|
||||
][0]
|
||||
assert compiled_constraint["to"] == f'"dbt"."{unique_schema}"."my_model_to"'
|
||||
# Other constraint fields should remain as parsed
|
||||
assert compiled_constraint["to_columns"] == parsed_constraint.to_columns
|
||||
assert compiled_constraint["type"] == parsed_constraint.type
|
||||
|
||||
|
||||
class TestColumnLevelForeignKeyConstraintToSource:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_column_level_foreign_key_source_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to(self, project, unique_schema):
|
||||
manifest = run_dbt(["parse"])
|
||||
node_with_fk_constraint = manifest.nodes["model.test.my_model"]
|
||||
assert len(node_with_fk_constraint.columns["id"].constraints) == 1
|
||||
|
||||
parsed_constraint = node_with_fk_constraint.columns["id"].constraints[0]
|
||||
assert parsed_constraint == ColumnLevelConstraint(
|
||||
type=ConstraintType.foreign_key,
|
||||
to="source('test_source', 'test_table')",
|
||||
to_columns=["id"],
|
||||
)
|
||||
# Assert column-level constraint source included in node.depends_on
|
||||
assert node_with_fk_constraint.refs == []
|
||||
assert node_with_fk_constraint.depends_on.nodes == ["source.test.test_source.test_table"]
|
||||
assert node_with_fk_constraint.sources == [["test_source", "test_table"]]
|
||||
|
||||
# Assert compilation renders to from 'ref' to relation identifer
|
||||
run_dbt(["compile"])
|
||||
manifest = get_artifact(project.project_root, "target", "manifest.json")
|
||||
assert len(manifest["nodes"]["model.test.my_model"]["columns"]["id"]["constraints"]) == 1
|
||||
|
||||
compiled_constraint = manifest["nodes"]["model.test.my_model"]["columns"]["id"][
|
||||
"constraints"
|
||||
][0]
|
||||
assert compiled_constraint["to"] == '"dbt"."test_source"."test_table"'
|
||||
# # Other constraint fields should remain as parsed
|
||||
assert compiled_constraint["to_columns"] == parsed_constraint.to_columns
|
||||
assert compiled_constraint["type"] == parsed_constraint.type
|
||||
|
||||
|
||||
class TestColumnLevelForeignKeyConstraintRefNotFoundError:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_column_node_not_found_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to_doesnt_exist(self, project):
|
||||
with pytest.raises(
|
||||
CompilationError, match="depends on a node named 'doesnt_exist' which was not found"
|
||||
):
|
||||
run_dbt(["parse"])
|
||||
|
||||
|
||||
class TestColumnLevelForeignKeyConstraintRefSyntaxError:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"constraints_schema.yml": model_foreign_key_column_invalid_syntax_schema_yml,
|
||||
"my_model.sql": "select 1 as id",
|
||||
"my_model_to.sql": "select 1 as id",
|
||||
}
|
||||
|
||||
def test_model_level_fk_to(self, project):
|
||||
with pytest.raises(
|
||||
ParsingError,
|
||||
match="Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model my_model: invalid.",
|
||||
):
|
||||
run_dbt(["parse"])
|
||||
@@ -3,7 +3,8 @@ import yaml
|
||||
|
||||
import dbt_common
|
||||
from dbt import deprecations
|
||||
from dbt.tests.util import run_dbt, write_file
|
||||
from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file
|
||||
from dbt_common.exceptions import EventCompilationError
|
||||
from tests.functional.deprecations.fixtures import (
|
||||
bad_name_yaml,
|
||||
models_trivial__model_sql,
|
||||
@@ -143,6 +144,45 @@ class TestProjectFlagsMovedDeprecation:
|
||||
def test_profile_config_deprecation(self, project):
|
||||
deprecations.reset_deprecations()
|
||||
assert deprecations.active_deprecations == set()
|
||||
run_dbt(["parse"])
|
||||
expected = {"project-flags-moved"}
|
||||
assert expected == deprecations.active_deprecations
|
||||
|
||||
_, logs = run_dbt_and_capture(["parse"])
|
||||
|
||||
assert (
|
||||
"User config should be moved from the 'config' key in profiles.yml to the 'flags' key in dbt_project.yml."
|
||||
in logs
|
||||
)
|
||||
assert deprecations.active_deprecations == {"project-flags-moved"}
|
||||
|
||||
|
||||
class TestProjectFlagsMovedDeprecationQuiet(TestProjectFlagsMovedDeprecation):
|
||||
def test_profile_config_deprecation(self, project):
|
||||
deprecations.reset_deprecations()
|
||||
assert deprecations.active_deprecations == set()
|
||||
|
||||
_, logs = run_dbt_and_capture(["--quiet", "parse"])
|
||||
|
||||
assert (
|
||||
"User config should be moved from the 'config' key in profiles.yml to the 'flags' key in dbt_project.yml."
|
||||
not in logs
|
||||
)
|
||||
assert deprecations.active_deprecations == {"project-flags-moved"}
|
||||
|
||||
|
||||
class TestProjectFlagsMovedDeprecationWarnErrorOptions(TestProjectFlagsMovedDeprecation):
|
||||
def test_profile_config_deprecation(self, project):
|
||||
deprecations.reset_deprecations()
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(["--warn-error-options", "{'include': 'all'}", "parse"])
|
||||
|
||||
with pytest.raises(EventCompilationError):
|
||||
run_dbt(
|
||||
["--warn-error-options", "{'include': ['ProjectFlagsMovedDeprecation']}", "parse"]
|
||||
)
|
||||
|
||||
_, logs = run_dbt_and_capture(
|
||||
["--warn-error-options", "{'silence': ['ProjectFlagsMovedDeprecation']}", "parse"]
|
||||
)
|
||||
assert (
|
||||
"User config should be moved from the 'config' key in profiles.yml to the 'flags' key in dbt_project.yml."
|
||||
not in logs
|
||||
)
|
||||
|
||||
@@ -91,6 +91,7 @@ class TestGoodDocsBlocks:
|
||||
"meta": {},
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"granularity": None,
|
||||
} == model_data["columns"]["id"]
|
||||
|
||||
assert {
|
||||
@@ -101,6 +102,7 @@ class TestGoodDocsBlocks:
|
||||
"meta": {},
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"granularity": None,
|
||||
} == model_data["columns"]["first_name"]
|
||||
|
||||
assert {
|
||||
@@ -111,6 +113,7 @@ class TestGoodDocsBlocks:
|
||||
"meta": {},
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"granularity": None,
|
||||
} == model_data["columns"]["last_name"]
|
||||
|
||||
assert len(model_data["columns"]) == 3
|
||||
@@ -152,6 +155,7 @@ class TestGoodDocsBlocksAltPath:
|
||||
"meta": {},
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"granularity": None,
|
||||
} == model_data["columns"]["id"]
|
||||
|
||||
assert {
|
||||
@@ -162,6 +166,7 @@ class TestGoodDocsBlocksAltPath:
|
||||
"meta": {},
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"granularity": None,
|
||||
} == model_data["columns"]["first_name"]
|
||||
|
||||
assert {
|
||||
@@ -172,6 +177,7 @@ class TestGoodDocsBlocksAltPath:
|
||||
"meta": {},
|
||||
"quote": None,
|
||||
"tags": [],
|
||||
"granularity": None,
|
||||
} == model_data["columns"]["last_name"]
|
||||
|
||||
assert len(model_data["columns"]) == 3
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
select
|
||||
{{ dbt.date_trunc('second', dbt.current_timestamp()) }} as ts_second
|
||||
@@ -8,6 +8,16 @@ models:
|
||||
data_tests:
|
||||
- unique
|
||||
- not_null
|
||||
- name: metricflow_time_spine
|
||||
description: Day time spine
|
||||
columns:
|
||||
- name: date_day
|
||||
granularity: day
|
||||
- name: metricflow_time_spine_second
|
||||
description: Second time spine
|
||||
columns:
|
||||
- name: ts_second
|
||||
granularity: second
|
||||
|
||||
sources:
|
||||
- name: my_source
|
||||
|
||||
105
tests/functional/list/test_commands.py
Normal file
105
tests/functional/list/test_commands.py
Normal file
@@ -0,0 +1,105 @@
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.cli.main import dbtRunner
|
||||
from dbt.cli.types import Command
|
||||
from dbt.events.types import NoNodesSelected
|
||||
from dbt.tests.util import run_dbt
|
||||
from tests.utils import EventCatcher
|
||||
|
||||
"""
|
||||
Testing different commands against the happy path fixture
|
||||
|
||||
The general flow
|
||||
1. Declare the commands to be tested
|
||||
2. Write a paramaterized test ensure a given command reaches causes and associated desired state.
|
||||
"""
|
||||
|
||||
# These are commands we're skipping as they don't make sense or don't work with the
|
||||
# happy path fixture currently
|
||||
commands_to_skip = {
|
||||
"clone",
|
||||
"generate",
|
||||
"server",
|
||||
"init",
|
||||
"list",
|
||||
"run-operation",
|
||||
"show",
|
||||
"snapshot",
|
||||
"freshness",
|
||||
}
|
||||
|
||||
# Commands to happy path test
|
||||
commands = [command.value for command in Command if command.value not in commands_to_skip]
|
||||
|
||||
|
||||
class TestRunCommands:
|
||||
@pytest.fixture(scope="class", autouse=True)
|
||||
def drop_snapshots(self, happy_path_project, project_root: str) -> None:
|
||||
"""The snapshots are erroring out, so lets drop them.
|
||||
|
||||
Seems to be database related. Ideally snapshots should work in these tests. It's a bad sign that they don't. That
|
||||
may have more to do with our fixture setup than the source code though.
|
||||
|
||||
Note: that the `happy_path_fixture_files` are a _class_ based fixture. Thus although this fixture _modifies_ the
|
||||
files available to the happy path project, it doesn't affect that fixture for tests in other test classes.
|
||||
"""
|
||||
|
||||
shutil.rmtree(f"{project_root}/snapshots")
|
||||
|
||||
@pytest.mark.parametrize("dbt_command", [(command,) for command in commands])
|
||||
def test_run_commmand(
|
||||
self,
|
||||
happy_path_project,
|
||||
dbt_command,
|
||||
):
|
||||
run_dbt([dbt_command])
|
||||
|
||||
|
||||
"""
|
||||
Testing command interactions with specific node types
|
||||
|
||||
The general flow
|
||||
1. Declare resource (node) types to be tested
|
||||
2. Write a parameterized test that ensures commands interact successfully with each resource type
|
||||
"""
|
||||
|
||||
# TODO: Figure out which of these are just missing from the happy path fixture vs which ones aren't selectable
|
||||
skipped_resource_types = {
|
||||
"analysis",
|
||||
"operation",
|
||||
"rpc",
|
||||
"sql_operation",
|
||||
"doc",
|
||||
"macro",
|
||||
"exposure",
|
||||
"group",
|
||||
"unit_test",
|
||||
"fixture",
|
||||
}
|
||||
resource_types = [
|
||||
node_type.value for node_type in NodeType if node_type.value not in skipped_resource_types
|
||||
]
|
||||
|
||||
|
||||
class TestSelectResourceType:
|
||||
@pytest.fixture(scope="function")
|
||||
def catcher(self) -> EventCatcher:
|
||||
return EventCatcher(event_to_catch=NoNodesSelected)
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def runner(self, catcher: EventCatcher) -> dbtRunner:
|
||||
return dbtRunner(callbacks=[catcher.catch])
|
||||
|
||||
@pytest.mark.parametrize("resource_type", resource_types)
|
||||
def test_select_by_resource_type(
|
||||
self,
|
||||
resource_type: str,
|
||||
happy_path_project,
|
||||
runner: dbtRunner,
|
||||
catcher: EventCatcher,
|
||||
) -> None:
|
||||
runner.invoke(["list", "--select", f"resource_type:{resource_type}"])
|
||||
assert len(catcher.caught_events) == 0
|
||||
@@ -133,12 +133,20 @@ class TestList:
|
||||
|
||||
def expect_model_output(self):
|
||||
expectations = {
|
||||
"name": ("ephemeral", "incremental", "inner", "metricflow_time_spine", "outer"),
|
||||
"name": (
|
||||
"ephemeral",
|
||||
"incremental",
|
||||
"inner",
|
||||
"metricflow_time_spine",
|
||||
"metricflow_time_spine_second",
|
||||
"outer",
|
||||
),
|
||||
"selector": (
|
||||
"test.ephemeral",
|
||||
"test.incremental",
|
||||
"test.sub.inner",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
"test.outer",
|
||||
),
|
||||
"json": (
|
||||
@@ -294,6 +302,44 @@ class TestList:
|
||||
"alias": "metricflow_time_spine",
|
||||
"resource_type": "model",
|
||||
},
|
||||
{
|
||||
"name": "metricflow_time_spine_second",
|
||||
"package_name": "test",
|
||||
"depends_on": {
|
||||
"nodes": [],
|
||||
"macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"],
|
||||
},
|
||||
"tags": [],
|
||||
"config": {
|
||||
"enabled": True,
|
||||
"group": None,
|
||||
"materialized": "view",
|
||||
"post-hook": [],
|
||||
"tags": [],
|
||||
"pre-hook": [],
|
||||
"quoting": {},
|
||||
"column_types": {},
|
||||
"persist_docs": {},
|
||||
"full_refresh": None,
|
||||
"unique_key": None,
|
||||
"on_schema_change": "ignore",
|
||||
"on_configuration_change": "apply",
|
||||
"database": None,
|
||||
"schema": None,
|
||||
"alias": None,
|
||||
"meta": {},
|
||||
"grants": {},
|
||||
"packages": [],
|
||||
"incremental_strategy": None,
|
||||
"docs": {"node_color": None, "show": True},
|
||||
"contract": {"enforced": False, "alias_types": True},
|
||||
"access": "protected",
|
||||
},
|
||||
"original_file_path": normalize("models/metricflow_time_spine_second.sql"),
|
||||
"unique_id": "model.test.metricflow_time_spine_second",
|
||||
"alias": "metricflow_time_spine_second",
|
||||
"resource_type": "model",
|
||||
},
|
||||
{
|
||||
"name": "outer",
|
||||
"package_name": "test",
|
||||
@@ -338,6 +384,7 @@ class TestList:
|
||||
self.dir("models/incremental.sql"),
|
||||
self.dir("models/sub/inner.sql"),
|
||||
self.dir("models/metricflow_time_spine.sql"),
|
||||
self.dir("models/metricflow_time_spine_second.sql"),
|
||||
self.dir("models/outer.sql"),
|
||||
),
|
||||
}
|
||||
@@ -573,6 +620,7 @@ class TestList:
|
||||
"test.not_null_outer_id",
|
||||
"test.unique_outer_id",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
"test.t",
|
||||
"semantic_model:test.my_sm",
|
||||
"metric:test.total_outer",
|
||||
@@ -618,6 +666,7 @@ class TestList:
|
||||
"test.ephemeral",
|
||||
"test.outer",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
"test.incremental",
|
||||
}
|
||||
|
||||
@@ -638,6 +687,7 @@ class TestList:
|
||||
"test.outer",
|
||||
"test.sub.inner",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
"test.t",
|
||||
"test.unique_outer_id",
|
||||
}
|
||||
@@ -658,6 +708,7 @@ class TestList:
|
||||
"test.not_null_outer_id",
|
||||
"test.outer",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
"test.sub.inner",
|
||||
"test.t",
|
||||
}
|
||||
@@ -693,6 +744,7 @@ class TestList:
|
||||
"test.outer",
|
||||
"test.sub.inner",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
"test.t",
|
||||
"test.unique_outer_id",
|
||||
}
|
||||
@@ -707,6 +759,7 @@ class TestList:
|
||||
"test.outer",
|
||||
"test.sub.inner",
|
||||
"test.metricflow_time_spine",
|
||||
"test.metricflow_time_spine_second",
|
||||
}
|
||||
del os.environ["DBT_EXCLUDE_RESOURCE_TYPES"]
|
||||
|
||||
|
||||
@@ -240,6 +240,12 @@ semantic_models:
|
||||
agg: sum
|
||||
agg_time_dimension: ds
|
||||
create_metric: true
|
||||
- name: txn_revenue_with_label
|
||||
label: "Transaction Revenue with label"
|
||||
expr: revenue
|
||||
agg: sum
|
||||
agg_time_dimension: ds
|
||||
create_metric: true
|
||||
- name: sum_of_things
|
||||
expr: 2
|
||||
agg: sum
|
||||
|
||||
@@ -38,11 +38,14 @@ class TestSemanticModelParsing:
|
||||
semantic_model.node_relation.relation_name
|
||||
== f'"dbt"."{project.test_schema}"."fct_revenue"'
|
||||
)
|
||||
assert len(semantic_model.measures) == 6
|
||||
# manifest should have one metric (that was created from a measure)
|
||||
assert len(manifest.metrics) == 2
|
||||
assert len(semantic_model.measures) == 7
|
||||
# manifest should have two metrics created from measures
|
||||
assert len(manifest.metrics) == 3
|
||||
metric = manifest.metrics["metric.test.txn_revenue"]
|
||||
assert metric.name == "txn_revenue"
|
||||
metric_with_label = manifest.metrics["metric.test.txn_revenue_with_label"]
|
||||
assert metric_with_label.name == "txn_revenue_with_label"
|
||||
assert metric_with_label.label == "Transaction Revenue with label"
|
||||
|
||||
def test_semantic_model_error(self, project):
|
||||
# Next, modify the default schema.yml to remove the semantic model.
|
||||
@@ -107,6 +110,7 @@ class TestSemanticModelPartialParsing:
|
||||
|
||||
def test_semantic_model_flipping_create_metric_partial_parsing(self, project):
|
||||
generated_metric = "metric.test.txn_revenue"
|
||||
generated_metric_with_label = "metric.test.txn_revenue_with_label"
|
||||
# First, use the default schema.yml to define our semantic model, and
|
||||
# run the dbt parse command
|
||||
write_file(schema_yml, project.project_root, "models", "schema.yml")
|
||||
@@ -117,6 +121,11 @@ class TestSemanticModelPartialParsing:
|
||||
# Verify the metric created by `create_metric: true` exists
|
||||
metric = result.result.metrics[generated_metric]
|
||||
assert metric.name == "txn_revenue"
|
||||
assert metric.label == "txn_revenue"
|
||||
|
||||
metric_with_label = result.result.metrics[generated_metric_with_label]
|
||||
assert metric_with_label.name == "txn_revenue_with_label"
|
||||
assert metric_with_label.label == "Transaction Revenue with label"
|
||||
|
||||
# --- Next, modify the default schema.yml to have no `create_metric: true` ---
|
||||
no_create_metric_schema_yml = schema_yml.replace(
|
||||
|
||||
86
tests/functional/time_spines/fixtures.py
Normal file
86
tests/functional/time_spines/fixtures.py
Normal file
@@ -0,0 +1,86 @@
|
||||
models_people_sql = """
|
||||
select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at
|
||||
union all
|
||||
select 2 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at
|
||||
union all
|
||||
select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at
|
||||
"""
|
||||
|
||||
semantic_model_people_yml = """
|
||||
version: 2
|
||||
|
||||
semantic_models:
|
||||
- name: semantic_people
|
||||
model: ref('people')
|
||||
dimensions:
|
||||
- name: favorite_color
|
||||
type: categorical
|
||||
- name: created_at
|
||||
type: TIME
|
||||
type_params:
|
||||
time_granularity: day
|
||||
measures:
|
||||
- name: years_tenure
|
||||
agg: SUM
|
||||
expr: tenure
|
||||
- name: people
|
||||
agg: count
|
||||
expr: id
|
||||
entities:
|
||||
- name: id
|
||||
type: primary
|
||||
defaults:
|
||||
agg_time_dimension: created_at
|
||||
"""
|
||||
|
||||
metricflow_time_spine_sql = """
|
||||
SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day
|
||||
"""
|
||||
|
||||
metricflow_time_spine_second_sql = """
|
||||
SELECT to_datetime('02/20/2023, 'mm/dd/yyyy hh:mm:ss') as ts_second
|
||||
"""
|
||||
|
||||
valid_time_spines_yml = """
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: metricflow_time_spine_second
|
||||
time_spine:
|
||||
standard_granularity_column: ts_second
|
||||
columns:
|
||||
- name: ts_second
|
||||
granularity: second
|
||||
- name: metricflow_time_spine
|
||||
time_spine:
|
||||
standard_granularity_column: date_day
|
||||
columns:
|
||||
- name: date_day
|
||||
granularity: day
|
||||
"""
|
||||
|
||||
missing_time_spine_yml = """
|
||||
models:
|
||||
- name: metricflow_time_spine
|
||||
columns:
|
||||
- name: ts_second
|
||||
granularity: second
|
||||
"""
|
||||
|
||||
time_spine_missing_granularity_yml = """
|
||||
models:
|
||||
- name: metricflow_time_spine_second
|
||||
time_spine:
|
||||
standard_granularity_column: ts_second
|
||||
columns:
|
||||
- name: ts_second
|
||||
"""
|
||||
|
||||
time_spine_missing_column_yml = """
|
||||
models:
|
||||
- name: metricflow_time_spine_second
|
||||
time_spine:
|
||||
standard_granularity_column: ts_second
|
||||
columns:
|
||||
- name: date_day
|
||||
"""
|
||||
198
tests/functional/time_spines/test_time_spines.py
Normal file
198
tests/functional/time_spines/test_time_spines.py
Normal file
@@ -0,0 +1,198 @@
|
||||
from typing import Set
|
||||
|
||||
import pytest
|
||||
|
||||
from dbt.cli.main import dbtRunner
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.semantic_manifest import SemanticManifest
|
||||
from dbt.exceptions import ParsingError
|
||||
from dbt.tests.util import get_manifest
|
||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
from tests.functional.time_spines.fixtures import (
|
||||
metricflow_time_spine_second_sql,
|
||||
metricflow_time_spine_sql,
|
||||
models_people_sql,
|
||||
semantic_model_people_yml,
|
||||
time_spine_missing_column_yml,
|
||||
time_spine_missing_granularity_yml,
|
||||
valid_time_spines_yml,
|
||||
)
|
||||
|
||||
|
||||
class TestValidTimeSpines:
|
||||
"""Tests that YAML using current time spine configs parses as expected."""
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"metricflow_time_spine.sql": metricflow_time_spine_sql,
|
||||
"metricflow_time_spine_second.sql": metricflow_time_spine_second_sql,
|
||||
"time_spines.yml": valid_time_spines_yml,
|
||||
"semantic_model_people.yml": semantic_model_people_yml,
|
||||
"people.sql": models_people_sql,
|
||||
}
|
||||
|
||||
def test_time_spines(self, project):
|
||||
runner = dbtRunner()
|
||||
result = runner.invoke(["parse"])
|
||||
assert result.success
|
||||
assert isinstance(result.result, Manifest)
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert manifest
|
||||
|
||||
# Test that models and columns are set as expected
|
||||
time_spine_models = {
|
||||
id.split(".")[-1]: node for id, node in manifest.nodes.items() if node.time_spine
|
||||
}
|
||||
day_model_name = "metricflow_time_spine"
|
||||
second_model_name = "metricflow_time_spine_second"
|
||||
day_column_name = "date_day"
|
||||
second_column_name = "ts_second"
|
||||
model_names_to_col_names = {
|
||||
day_model_name: day_column_name,
|
||||
second_model_name: second_column_name,
|
||||
}
|
||||
model_names_to_granularities = {
|
||||
day_model_name: TimeGranularity.DAY,
|
||||
second_model_name: TimeGranularity.SECOND,
|
||||
}
|
||||
assert len(time_spine_models) == 2
|
||||
expected_time_spine_aliases = {second_model_name, day_model_name}
|
||||
assert set(time_spine_models.keys()) == expected_time_spine_aliases
|
||||
for model in time_spine_models.values():
|
||||
assert (
|
||||
model.time_spine.standard_granularity_column
|
||||
== model_names_to_col_names[model.name]
|
||||
)
|
||||
assert len(model.columns) == 1
|
||||
assert (
|
||||
list(model.columns.values())[0].granularity
|
||||
== model_names_to_granularities[model.name]
|
||||
)
|
||||
|
||||
# Test that project configs are set as expected in semantic manifest
|
||||
semantic_manifest = SemanticManifest(manifest)
|
||||
assert semantic_manifest.validate()
|
||||
project_config = semantic_manifest._get_pydantic_semantic_manifest().project_configuration
|
||||
# Legacy config
|
||||
assert len(project_config.time_spine_table_configurations) == 1
|
||||
legacy_time_spine_config = project_config.time_spine_table_configurations[0]
|
||||
assert legacy_time_spine_config.column_name == day_column_name
|
||||
assert legacy_time_spine_config.location.replace('"', "").split(".")[-1] == day_model_name
|
||||
assert legacy_time_spine_config.grain == TimeGranularity.DAY
|
||||
# Current configs
|
||||
assert len(project_config.time_spines) == 2
|
||||
sl_time_spine_aliases: Set[str] = set()
|
||||
for sl_time_spine in project_config.time_spines:
|
||||
alias = sl_time_spine.node_relation.alias
|
||||
sl_time_spine_aliases.add(alias)
|
||||
assert sl_time_spine.primary_column.name == model_names_to_col_names[alias]
|
||||
assert (
|
||||
sl_time_spine.primary_column.time_granularity
|
||||
== model_names_to_granularities[alias]
|
||||
)
|
||||
assert sl_time_spine_aliases == expected_time_spine_aliases
|
||||
|
||||
|
||||
class TestValidLegacyTimeSpine:
|
||||
"""Tests that YAML using only legacy time spine config parses as expected."""
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"metricflow_time_spine.sql": metricflow_time_spine_sql,
|
||||
"semantic_model_people.yml": semantic_model_people_yml,
|
||||
"people.sql": models_people_sql,
|
||||
}
|
||||
|
||||
def test_time_spines(self, project):
|
||||
runner = dbtRunner()
|
||||
result = runner.invoke(["parse"])
|
||||
assert result.success
|
||||
assert isinstance(result.result, Manifest)
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert manifest
|
||||
|
||||
# Test that project configs are set as expected in semantic manifest
|
||||
semantic_manifest = SemanticManifest(manifest)
|
||||
assert semantic_manifest.validate()
|
||||
project_config = semantic_manifest._get_pydantic_semantic_manifest().project_configuration
|
||||
# Legacy config
|
||||
assert len(project_config.time_spine_table_configurations) == 1
|
||||
legacy_time_spine_config = project_config.time_spine_table_configurations[0]
|
||||
assert legacy_time_spine_config.column_name == "date_day"
|
||||
assert (
|
||||
legacy_time_spine_config.location.replace('"', "").split(".")[-1]
|
||||
== "metricflow_time_spine"
|
||||
)
|
||||
assert legacy_time_spine_config.grain == TimeGranularity.DAY
|
||||
# Current configs
|
||||
assert len(project_config.time_spines) == 0
|
||||
|
||||
|
||||
class TestMissingTimeSpine:
|
||||
"""Tests that YAML with semantic models but no time spines errors."""
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"semantic_model_people.yml": semantic_model_people_yml,
|
||||
"people.sql": models_people_sql,
|
||||
}
|
||||
|
||||
def test_time_spines(self, project):
|
||||
runner = dbtRunner()
|
||||
result = runner.invoke(["parse"])
|
||||
assert isinstance(result.exception, ParsingError)
|
||||
assert (
|
||||
"The semantic layer requires a time spine model with granularity DAY or smaller"
|
||||
in result.exception.msg
|
||||
)
|
||||
|
||||
|
||||
class TestTimeSpineColumnMissing:
|
||||
"""Tests that YAML with time spine column not in model errors."""
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"semantic_model_people.yml": semantic_model_people_yml,
|
||||
"people.sql": models_people_sql,
|
||||
"metricflow_time_spine.sql": metricflow_time_spine_sql,
|
||||
"metricflow_time_spine_second.sql": metricflow_time_spine_second_sql,
|
||||
"time_spines.yml": time_spine_missing_column_yml,
|
||||
}
|
||||
|
||||
def test_time_spines(self, project):
|
||||
runner = dbtRunner()
|
||||
result = runner.invoke(["parse"])
|
||||
assert isinstance(result.exception, ParsingError)
|
||||
assert (
|
||||
"Time spine standard granularity column must be defined on the model."
|
||||
in result.exception.msg
|
||||
)
|
||||
|
||||
|
||||
class TestTimeSpineGranularityMissing:
|
||||
"""Tests that YAML with time spine column without granularity errors."""
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"semantic_model_people.yml": semantic_model_people_yml,
|
||||
"people.sql": models_people_sql,
|
||||
"metricflow_time_spine.sql": metricflow_time_spine_sql,
|
||||
"metricflow_time_spine_second.sql": metricflow_time_spine_second_sql,
|
||||
"time_spines.yml": time_spine_missing_granularity_yml,
|
||||
}
|
||||
|
||||
def test_time_spines(self, project):
|
||||
runner = dbtRunner()
|
||||
result = runner.invoke(["parse"])
|
||||
assert isinstance(result.exception, ParsingError)
|
||||
assert (
|
||||
"Time spine standard granularity column must have a granularity defined."
|
||||
in result.exception.msg
|
||||
)
|
||||
@@ -1,44 +1,79 @@
|
||||
import unittest
|
||||
import pytest
|
||||
|
||||
from dbt.clients.jinja_static import statically_extract_macro_calls
|
||||
from dbt.artifacts.resources import RefArgs
|
||||
from dbt.clients.jinja_static import (
|
||||
statically_extract_macro_calls,
|
||||
statically_parse_ref_or_source,
|
||||
)
|
||||
from dbt.context.base import generate_base_context
|
||||
from dbt.exceptions import ParsingError
|
||||
|
||||
|
||||
class MacroCalls(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.macro_strings = [
|
||||
@pytest.mark.parametrize(
|
||||
"macro_string,expected_possible_macro_calls",
|
||||
[
|
||||
(
|
||||
"{% macro parent_macro() %} {% do return(nested_macro()) %} {% endmacro %}",
|
||||
"{% macro lr_macro() %} {{ return(load_result('relations').table) }} {% endmacro %}",
|
||||
"{% macro get_snapshot_unique_id() -%} {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} {%- endmacro %}",
|
||||
"{% macro get_columns_in_query(select_sql) -%} {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }} {% endmacro %}",
|
||||
"""{% macro test_mutually_exclusive_ranges(model) %}
|
||||
with base as (
|
||||
select {{ get_snapshot_unique_id() }} as dbt_unique_id,
|
||||
*
|
||||
from {{ model }} )
|
||||
{% endmacro %}""",
|
||||
"{% macro test_my_test(model) %} select {{ current_timestamp_backcompat() }} {% endmacro %}",
|
||||
"{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind4', 'foo_utils4')) }} {%- endmacro %}",
|
||||
"{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind5', macro_namespace = 'foo_utils5')) }} {%- endmacro %}",
|
||||
]
|
||||
|
||||
self.possible_macro_calls = [
|
||||
["nested_macro"],
|
||||
),
|
||||
(
|
||||
"{% macro lr_macro() %} {{ return(load_result('relations').table) }} {% endmacro %}",
|
||||
["load_result"],
|
||||
),
|
||||
(
|
||||
"{% macro get_snapshot_unique_id() -%} {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} {%- endmacro %}",
|
||||
["get_snapshot_unique_id"],
|
||||
),
|
||||
(
|
||||
"{% macro get_columns_in_query(select_sql) -%} {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }} {% endmacro %}",
|
||||
["get_columns_in_query"],
|
||||
),
|
||||
(
|
||||
"""{% macro test_mutually_exclusive_ranges(model) %}
|
||||
with base as (
|
||||
select {{ get_snapshot_unique_id() }} as dbt_unique_id,
|
||||
*
|
||||
from {{ model }} )
|
||||
{% endmacro %}""",
|
||||
["get_snapshot_unique_id"],
|
||||
),
|
||||
(
|
||||
"{% macro test_my_test(model) %} select {{ current_timestamp_backcompat() }} {% endmacro %}",
|
||||
["current_timestamp_backcompat"],
|
||||
),
|
||||
(
|
||||
"{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind4', 'foo_utils4')) }} {%- endmacro %}",
|
||||
["test_some_kind4", "foo_utils4.test_some_kind4"],
|
||||
),
|
||||
(
|
||||
"{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind5', macro_namespace = 'foo_utils5')) }} {%- endmacro %}",
|
||||
["test_some_kind5", "foo_utils5.test_some_kind5"],
|
||||
]
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_extract_macro_calls(macro_string, expected_possible_macro_calls):
|
||||
cli_vars = {"local_utils_dispatch_list": ["foo_utils4"]}
|
||||
ctx = generate_base_context(cli_vars)
|
||||
|
||||
def test_macro_calls(self):
|
||||
cli_vars = {"local_utils_dispatch_list": ["foo_utils4"]}
|
||||
ctx = generate_base_context(cli_vars)
|
||||
possible_macro_calls = statically_extract_macro_calls(macro_string, ctx)
|
||||
assert possible_macro_calls == expected_possible_macro_calls
|
||||
|
||||
index = 0
|
||||
for macro_string in self.macro_strings:
|
||||
possible_macro_calls = statically_extract_macro_calls(macro_string, ctx)
|
||||
self.assertEqual(self.possible_macro_calls[index], possible_macro_calls)
|
||||
index += 1
|
||||
|
||||
class TestStaticallyParseRefOrSource:
|
||||
def test_invalid_expression(self):
|
||||
with pytest.raises(ParsingError):
|
||||
statically_parse_ref_or_source("invalid")
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expression,expected_ref_or_source",
|
||||
[
|
||||
("ref('model')", RefArgs(name="model")),
|
||||
("ref('package','model')", RefArgs(name="model", package="package")),
|
||||
("ref('model',v=3)", RefArgs(name="model", version=3)),
|
||||
("ref('package','model',v=3)", RefArgs(name="model", package="package", version=3)),
|
||||
("source('schema', 'table')", ["schema", "table"]),
|
||||
],
|
||||
)
|
||||
def test_valid_ref_expression(self, expression, expected_ref_or_source):
|
||||
ref_or_source = statically_parse_ref_or_source(expression)
|
||||
assert ref_or_source == expected_ref_or_source
|
||||
|
||||
@@ -26,7 +26,7 @@ from dbt.artifacts.resources import (
|
||||
WhereFilterIntersection,
|
||||
)
|
||||
from dbt.contracts.files import FileHash
|
||||
from dbt.contracts.graph.manifest import Manifest, ManifestMetadata
|
||||
from dbt.contracts.graph.manifest import DisabledLookup, Manifest, ManifestMetadata
|
||||
from dbt.contracts.graph.nodes import (
|
||||
DependsOn,
|
||||
Exposure,
|
||||
@@ -37,7 +37,7 @@ from dbt.contracts.graph.nodes import (
|
||||
SeedNode,
|
||||
SourceDefinition,
|
||||
)
|
||||
from dbt.exceptions import AmbiguousResourceNameRefError
|
||||
from dbt.exceptions import AmbiguousResourceNameRefError, ParsingError
|
||||
from dbt.flags import set_from_args
|
||||
from dbt.node_types import NodeType
|
||||
from dbt_common.events.functions import reset_metadata_vars
|
||||
@@ -94,6 +94,7 @@ REQUIRED_PARSED_NODE_KEYS = frozenset(
|
||||
"constraints",
|
||||
"deprecation_date",
|
||||
"defer_relation",
|
||||
"time_spine",
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1962,3 +1963,176 @@ def test_resolve_doc(docs, package, expected):
|
||||
expected_package, expected_name = expected
|
||||
assert result.name == expected_name
|
||||
assert result.package_name == expected_package
|
||||
|
||||
|
||||
class TestManifestFindNodeFromRefOrSource:
|
||||
@pytest.fixture
|
||||
def mock_node(self):
|
||||
return MockNode("my_package", "my_model")
|
||||
|
||||
@pytest.fixture
|
||||
def mock_disabled_node(self):
|
||||
return MockNode("my_package", "disabled_node", config={"enabled": False})
|
||||
|
||||
@pytest.fixture
|
||||
def mock_source(self):
|
||||
return MockSource("root", "my_source", "source_table")
|
||||
|
||||
@pytest.fixture
|
||||
def mock_disabled_source(self):
|
||||
return MockSource("root", "my_source", "disabled_source_table", config={"enabled": False})
|
||||
|
||||
@pytest.fixture
|
||||
def mock_manifest(self, mock_node, mock_source, mock_disabled_node, mock_disabled_source):
|
||||
return make_manifest(
|
||||
nodes=[mock_node, mock_disabled_node], sources=[mock_source, mock_disabled_source]
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expression,expected_node",
|
||||
[
|
||||
("ref('my_package', 'my_model')", "mock_node"),
|
||||
("ref('my_package', 'doesnt_exist')", None),
|
||||
("ref('my_package', 'disabled_node')", "mock_disabled_node"),
|
||||
("source('my_source', 'source_table')", "mock_source"),
|
||||
("source('my_source', 'doesnt_exist')", None),
|
||||
("source('my_source', 'disabled_source_table')", "mock_disabled_source"),
|
||||
],
|
||||
)
|
||||
def test_find_node_from_ref_or_source(self, expression, expected_node, mock_manifest, request):
|
||||
node = mock_manifest.find_node_from_ref_or_source(expression)
|
||||
|
||||
if expected_node is None:
|
||||
assert node is None
|
||||
else:
|
||||
assert node == request.getfixturevalue(expected_node)
|
||||
|
||||
@pytest.mark.parametrize("invalid_expression", ["invalid", "ref(')"])
|
||||
def test_find_node_from_ref_or_source_invalid_expression(
|
||||
self, invalid_expression, mock_manifest
|
||||
):
|
||||
with pytest.raises(ParsingError):
|
||||
mock_manifest.find_node_from_ref_or_source(invalid_expression)
|
||||
|
||||
|
||||
class TestDisabledLookup:
|
||||
@pytest.fixture(scope="class")
|
||||
def manifest(self):
|
||||
return Manifest(
|
||||
nodes={},
|
||||
sources={},
|
||||
macros={},
|
||||
docs={},
|
||||
disabled={},
|
||||
files={},
|
||||
exposures={},
|
||||
selectors={},
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def mock_model(self):
|
||||
return MockNode("package", "name", NodeType.Model)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def mock_model_with_version(self):
|
||||
return MockNode("package", "name", NodeType.Model, version=3)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def mock_seed(self):
|
||||
return MockNode("package", "name", NodeType.Seed)
|
||||
|
||||
def test_find(self, manifest, mock_model):
|
||||
manifest.disabled = {"model.package.name": [mock_model]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package") == [mock_model]
|
||||
|
||||
def test_find_wrong_name(self, manifest, mock_model):
|
||||
manifest.disabled = {"model.package.name": [mock_model]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("missing_name", "package") is None
|
||||
|
||||
def test_find_wrong_package(self, manifest, mock_model):
|
||||
manifest.disabled = {"model.package.name": [mock_model]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "missing_package") is None
|
||||
|
||||
def test_find_wrong_version(self, manifest, mock_model):
|
||||
manifest.disabled = {"model.package.name": [mock_model]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", version=3) is None
|
||||
|
||||
def test_find_wrong_resource_types(self, manifest, mock_model):
|
||||
manifest.disabled = {"model.package.name": [mock_model]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", resource_types=[NodeType.Analysis]) is None
|
||||
|
||||
def test_find_no_package(self, manifest, mock_model):
|
||||
manifest.disabled = {"model.package.name": [mock_model]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", None) == [mock_model]
|
||||
|
||||
def test_find_versioned_node(self, manifest, mock_model_with_version):
|
||||
manifest.disabled = {"model.package.name": [mock_model_with_version]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", version=3) == [mock_model_with_version]
|
||||
|
||||
def test_find_versioned_node_no_package(self, manifest, mock_model_with_version):
|
||||
manifest.disabled = {"model.package.name": [mock_model_with_version]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", None, version=3) == [mock_model_with_version]
|
||||
|
||||
def test_find_versioned_node_no_version(self, manifest, mock_model_with_version):
|
||||
manifest.disabled = {"model.package.name": [mock_model_with_version]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package") is None
|
||||
|
||||
def test_find_versioned_node_wrong_version(self, manifest, mock_model_with_version):
|
||||
manifest.disabled = {"model.package.name": [mock_model_with_version]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", version=2) is None
|
||||
|
||||
def test_find_versioned_node_wrong_name(self, manifest, mock_model_with_version):
|
||||
manifest.disabled = {"model.package.name": [mock_model_with_version]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("wrong_name", "package", version=3) is None
|
||||
|
||||
def test_find_versioned_node_wrong_package(self, manifest, mock_model_with_version):
|
||||
manifest.disabled = {"model.package.name": [mock_model_with_version]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "wrong_package", version=3) is None
|
||||
|
||||
def test_find_multiple_nodes(self, manifest, mock_model, mock_seed):
|
||||
manifest.disabled = {"model.package.name": [mock_model, mock_seed]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package") == [mock_model, mock_seed]
|
||||
|
||||
def test_find_multiple_nodes_with_resource_types(self, manifest, mock_model, mock_seed):
|
||||
manifest.disabled = {"model.package.name": [mock_model, mock_seed]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", resource_types=[NodeType.Model]) == [mock_model]
|
||||
|
||||
def test_find_multiple_nodes_with_wrong_resource_types(self, manifest, mock_model, mock_seed):
|
||||
manifest.disabled = {"model.package.name": [mock_model, mock_seed]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", resource_types=[NodeType.Analysis]) is None
|
||||
|
||||
def test_find_multiple_nodes_with_resource_types_empty(self, manifest, mock_model, mock_seed):
|
||||
manifest.disabled = {"model.package.name": [mock_model, mock_seed]}
|
||||
lookup = DisabledLookup(manifest)
|
||||
|
||||
assert lookup.find("name", "package", resource_types=[]) is None
|
||||
|
||||
@@ -68,6 +68,48 @@ class TestModelNode:
|
||||
|
||||
assert default_model_node.is_past_deprecation_date is expected_is_past_deprecation_date
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model_constraints,columns,expected_all_constraints",
|
||||
[
|
||||
([], {}, []),
|
||||
(
|
||||
[ModelLevelConstraint(type=ConstraintType.foreign_key)],
|
||||
{},
|
||||
[ModelLevelConstraint(type=ConstraintType.foreign_key)],
|
||||
),
|
||||
(
|
||||
[],
|
||||
{
|
||||
"id": ColumnInfo(
|
||||
name="id",
|
||||
constraints=[ColumnLevelConstraint(type=ConstraintType.foreign_key)],
|
||||
)
|
||||
},
|
||||
[ColumnLevelConstraint(type=ConstraintType.foreign_key)],
|
||||
),
|
||||
(
|
||||
[ModelLevelConstraint(type=ConstraintType.foreign_key)],
|
||||
{
|
||||
"id": ColumnInfo(
|
||||
name="id",
|
||||
constraints=[ColumnLevelConstraint(type=ConstraintType.foreign_key)],
|
||||
)
|
||||
},
|
||||
[
|
||||
ModelLevelConstraint(type=ConstraintType.foreign_key),
|
||||
ColumnLevelConstraint(type=ConstraintType.foreign_key),
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_all_constraints(
|
||||
self, default_model_node, model_constraints, columns, expected_all_constraints
|
||||
):
|
||||
default_model_node.constraints = model_constraints
|
||||
default_model_node.columns = columns
|
||||
|
||||
assert default_model_node.all_constraints == expected_all_constraints
|
||||
|
||||
|
||||
class TestSemanticModel:
|
||||
@pytest.fixture(scope="function")
|
||||
|
||||
38
tests/unit/test_deprecations.py
Normal file
38
tests/unit/test_deprecations.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import pytest
|
||||
|
||||
import dbt.deprecations as deprecations
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def active_deprecations():
|
||||
deprecations.reset_deprecations()
|
||||
assert not deprecations.active_deprecations
|
||||
|
||||
yield deprecations.active_deprecations
|
||||
|
||||
deprecations.reset_deprecations()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def buffered_deprecations():
|
||||
deprecations.buffered_deprecations.clear()
|
||||
assert not deprecations.buffered_deprecations
|
||||
|
||||
yield deprecations.buffered_deprecations
|
||||
|
||||
deprecations.buffered_deprecations.clear()
|
||||
|
||||
|
||||
def test_buffer_deprecation(active_deprecations, buffered_deprecations):
|
||||
deprecations.buffer("project-flags-moved")
|
||||
|
||||
assert active_deprecations == set()
|
||||
assert len(buffered_deprecations) == 1
|
||||
|
||||
|
||||
def test_fire_buffered_deprecations(active_deprecations, buffered_deprecations):
|
||||
deprecations.buffer("project-flags-moved")
|
||||
deprecations.fire_buffered_deprecations()
|
||||
|
||||
assert active_deprecations == set(["project-flags-moved"])
|
||||
assert len(buffered_deprecations) == 0
|
||||
@@ -1,5 +1,5 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from typing import Callable, List
|
||||
|
||||
from dbt_common.events.base_types import BaseEvent, EventMsg
|
||||
|
||||
@@ -8,9 +8,10 @@ from dbt_common.events.base_types import BaseEvent, EventMsg
|
||||
class EventCatcher:
|
||||
event_to_catch: BaseEvent
|
||||
caught_events: List[EventMsg] = field(default_factory=list)
|
||||
predicate: Callable[[EventMsg], bool] = lambda event: True
|
||||
|
||||
def catch(self, event: EventMsg):
|
||||
if event.info.name == self.event_to_catch.__name__:
|
||||
if event.info.name == self.event_to_catch.__name__ and self.predicate(event):
|
||||
self.caught_events.append(event)
|
||||
|
||||
def flush(self) -> None:
|
||||
|
||||
Reference in New Issue
Block a user