Compare commits

...

9 Commits

Author SHA1 Message Date
Courtney Holcomb
e9e562299d Cleanup 2024-07-23 16:56:53 -07:00
Courtney Holcomb
e9c748f384 Time spine tests 2024-07-23 13:45:37 -07:00
Courtney Holcomb
73b115692b Selectors 2024-07-23 11:16:24 -07:00
Courtney Holcomb
eef480983a Integration 2024-07-23 10:58:27 -07:00
Courtney Holcomb
1d1236f3c0 Fix types 2024-07-22 21:33:00 -07:00
Courtney Holcomb
64ddeac58b WIP 2024-07-22 21:11:57 -07:00
Courtney Holcomb
66b24af00e Update JSON schemas 2024-07-22 20:29:48 -07:00
Courtney Holcomb
d81e70e48a Changelog 2024-07-22 20:23:23 -07:00
Courtney Holcomb
46ff11f32e WIP 2024-07-22 20:09:48 -07:00
41 changed files with 1351 additions and 58 deletions

View File

@@ -0,0 +1,6 @@
kind: Features
body: Support new semantic layer time spine configs to enable sub-daily granularity.
time: 2024-07-22T20:22:38.258249-07:00
custom:
Author: courtneyholcomb
Issue: "10475"

View File

@@ -86,6 +86,7 @@ from dbt.artifacts.resources.v1.source_definition import (
SourceDefinition,
)
from dbt.artifacts.resources.v1.sql_operation import SqlOperation
from dbt.artifacts.resources.v1.time_spines import TimeSpine, TimeSpinePrimaryColumn
from dbt.artifacts.resources.v1.unit_test_definition import (
UnitTestConfig,
UnitTestDefinition,

View File

@@ -35,6 +35,7 @@ class NodeType(StrEnum):
SemanticModel = "semantic_model"
Unit = "unit_test"
Fixture = "fixture"
TimeSpine = "time_spine"
def pluralize(self) -> str:
if self is self.Analysis:

View File

@@ -0,0 +1,53 @@
import time
from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional
from dbt.artifacts.resources.base import GraphResource
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
from dbt.artifacts.resources.v1.semantic_model import NodeRelation
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
from dbt_common.dataclass_schema import dbtClassMixin
from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity
# ====================================
# TimeSpine objects
# TimeSpine protocols: https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_interfaces/protocols/time_spine.py
# ====================================
@dataclass
class TimeSpinePrimaryColumn(dbtClassMixin):
"""The column in the time spine that maps to a standard granularity."""
name: str
time_granularity: TimeGranularity
@dataclass
class TimeSpineConfig(BaseConfig):
enabled: bool = True
group: Optional[str] = field(
default=None,
metadata=CompareBehavior.Exclude.meta(),
)
meta: Dict[str, Any] = field(
default_factory=dict,
metadata=MergeBehavior.Update.meta(),
)
@dataclass
class TimeSpine(GraphResource):
"""Describes a table that contains dates at a specific time grain.
One column must map to a standard granularity (one of the TimeGranularity enum members). Others might represent
custom granularity columns. Custom granularity columns are not yet implemented in parser.
"""
model: str
node_relation: Optional[NodeRelation]
primary_column: TimeSpinePrimaryColumn
depends_on: DependsOn = field(default_factory=DependsOn)
refs: List[RefArgs] = field(default_factory=list)
created_at: float = field(default_factory=lambda: time.time())
config: TimeSpineConfig = field(default_factory=TimeSpineConfig)

View File

@@ -19,6 +19,7 @@ from dbt.artifacts.resources import (
Snapshot,
SourceDefinition,
SqlOperation,
TimeSpine,
UnitTestDefinition,
)
from dbt.artifacts.schemas.base import (
@@ -158,6 +159,10 @@ class WritableManifest(ArtifactMixin):
description="The unit tests defined in the project",
)
)
time_spines: Mapping[UniqueID, TimeSpine] = field(
metadata=dict(description=("The time spine models defined in the dbt project.")),
default_factory=dict,
)
@classmethod
def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]:

View File

@@ -169,6 +169,8 @@ def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict:
doc_content["resource_type"] = "doc"
if "semantic_models" not in manifest:
manifest["semantic_models"] = {}
if "time_spines" not in manifest:
manifest["time_spines"] = {}
if "saved_queries" not in manifest:
manifest["saved_queries"] = {}
return manifest

View File

@@ -401,6 +401,7 @@ resource_type = click.option(
"metric",
"semantic_model",
"saved_query",
"time_spine",
"source",
"analysis",
"model",
@@ -428,6 +429,7 @@ exclude_resource_type = click.option(
[
"metric",
"semantic_model",
"time_spine",
"saved_query",
"source",
"analysis",

View File

@@ -78,6 +78,7 @@ def _generate_stats(manifest: Manifest) -> Dict[NodeType, int]:
stats[NodeType.Macro] += len(manifest.macros)
stats[NodeType.Group] += len(manifest.groups)
stats[NodeType.SemanticModel] += len(manifest.semantic_models)
stats[NodeType.TimeSpine] += len(manifest.time_spines)
stats[NodeType.SavedQuery] += len(manifest.saved_queries)
stats[NodeType.Unit] += len(manifest.unit_tests)
@@ -176,6 +177,8 @@ class Linker:
self.link_node(node, manifest)
for semantic_model in manifest.semantic_models.values():
self.link_node(semantic_model, manifest)
for time_spine in manifest.time_spines.values():
self.link_node(time_spine, manifest)
for exposure in manifest.exposures.values():
self.link_node(exposure, manifest)
for metric in manifest.metrics.values():

View File

@@ -15,6 +15,6 @@ DEPENDENCIES_FILE_NAME = "dependencies.yml"
PACKAGE_LOCK_FILE_NAME = "package-lock.yml"
MANIFEST_FILE_NAME = "manifest.json"
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
TIME_SPINE_MODEL_NAME = "metricflow_time_spine"
LEGACY_TIME_SPINE_MODEL_NAME = "metricflow_time_spine"
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
PACKAGE_LOCK_HASH_KEY = "sha1_hash"

View File

@@ -54,6 +54,7 @@ from dbt.contracts.graph.nodes import (
SeedNode,
SemanticModel,
SourceDefinition,
TimeSpine,
UnitTestNode,
)
from dbt.exceptions import (
@@ -1802,6 +1803,49 @@ def generate_parse_semantic_models(
}
class TimeSpineRefResolver(BaseResolver):
def __call__(self, *args, **kwargs) -> str:
package = None
if len(args) == 1:
name = args[0]
elif len(args) == 2:
package, name = args
else:
raise RefArgsError(node=self.model, args=args)
version = kwargs.get("version") or kwargs.get("v")
self.validate_args(name, package, version)
# "model" here is any node
self.model.refs.append(RefArgs(package=package, name=name, version=version))
return ""
def validate_args(self, name, package, version):
if not isinstance(name, str):
raise ParsingError(
f"In a time spine section in {self.model.original_file_path} "
"the name argument to ref() must be a string"
)
# used for time spines
def generate_parse_time_spines(
time_spine: TimeSpine,
config: RuntimeConfig,
manifest: Manifest,
package_name: str,
) -> Dict[str, Any]:
project = config.load_dependencies()[package_name]
return {
"ref": TimeSpineRefResolver(
None,
time_spine,
project,
manifest,
),
}
# This class is currently used by the schema parser in order
# to limit the number of macros in the context by using
# the TestMacroNamespace

View File

@@ -203,6 +203,7 @@ class SchemaSourceFile(BaseSourceFile):
# node patches contain models, seeds, snapshots, analyses
ndp: List[str] = field(default_factory=list)
semantic_models: List[str] = field(default_factory=list)
time_spines: List[str] = field(default_factory=list)
unit_tests: List[str] = field(default_factory=list)
saved_queries: List[str] = field(default_factory=list)
# any macro patches in this file by macro unique_id.

View File

@@ -58,6 +58,7 @@ from dbt.contracts.graph.nodes import (
SeedNode,
SemanticModel,
SourceDefinition,
TimeSpine,
UnitTestDefinition,
UnitTestFileFixture,
UnpatchedSourceDefinition,
@@ -826,6 +827,7 @@ class Manifest(MacroMethods, dbtClassMixin):
unit_tests: MutableMapping[str, UnitTestDefinition] = field(default_factory=dict)
saved_queries: MutableMapping[str, SavedQuery] = field(default_factory=dict)
fixtures: MutableMapping[str, UnitTestFileFixture] = field(default_factory=dict)
time_spines: MutableMapping[str, TimeSpine] = field(default_factory=dict)
_doc_lookup: Optional[DocLookup] = field(
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
@@ -894,6 +896,7 @@ class Manifest(MacroMethods, dbtClassMixin):
"semantic_models": {
k: v.to_dict(omit_none=False) for k, v in self.semantic_models.items()
},
"time_spines": {k: v.to_dict(omit_none=False) for k, v in self.time_spines.items()},
"saved_queries": {
k: v.to_dict(omit_none=False) for k, v in self.saved_queries.items()
},
@@ -984,6 +987,7 @@ class Manifest(MacroMethods, dbtClassMixin):
self.sources.values(),
self.metrics.values(),
self.semantic_models.values(),
self.time_spines.values(),
self.saved_queries.values(),
self.unit_tests.values(),
)
@@ -1023,6 +1027,7 @@ class Manifest(MacroMethods, dbtClassMixin):
semantic_models={k: _deepcopy(v) for k, v in self.semantic_models.items()},
unit_tests={k: _deepcopy(v) for k, v in self.unit_tests.items()},
saved_queries={k: _deepcopy(v) for k, v in self.saved_queries.items()},
time_spines={k: _deepcopy(v) for k, v in self.time_spines.items()},
)
copy.build_flat_graph()
return copy
@@ -1035,6 +1040,7 @@ class Manifest(MacroMethods, dbtClassMixin):
self.exposures.values(),
self.metrics.values(),
self.semantic_models.values(),
self.time_spines.values(),
self.saved_queries.values(),
self.unit_tests.values(),
)
@@ -1062,6 +1068,7 @@ class Manifest(MacroMethods, dbtClassMixin):
self.metrics.values(),
)
)
group_map = {group.name: [] for group in self.groups.values()}
for node in groupable_nodes:
if node.group is not None:
@@ -1093,6 +1100,7 @@ class Manifest(MacroMethods, dbtClassMixin):
groups=cls._map_resources_to_map_nodes(writable_manifest.groups),
semantic_models=cls._map_resources_to_map_nodes(writable_manifest.semantic_models),
saved_queries=cls._map_resources_to_map_nodes(writable_manifest.saved_queries),
time_spines=cls._map_resources_to_map_nodes(writable_manifest.time_spines),
selectors={
selector_id: selector
for selector_id, selector in writable_manifest.selectors.items()
@@ -1154,6 +1162,7 @@ class Manifest(MacroMethods, dbtClassMixin):
parent_map=self.parent_map,
group_map=self.group_map,
semantic_models=self._map_nodes_to_map_resources(self.semantic_models),
time_spines=self._map_nodes_to_map_resources(self.time_spines),
unit_tests=self._map_nodes_to_map_resources(self.unit_tests),
saved_queries=self._map_nodes_to_map_resources(self.saved_queries),
)
@@ -1174,6 +1183,8 @@ class Manifest(MacroMethods, dbtClassMixin):
return self.metrics[unique_id]
elif unique_id in self.semantic_models:
return self.semantic_models[unique_id]
elif unique_id in self.time_spines:
return self.time_spines[unique_id]
elif unique_id in self.unit_tests:
return self.unit_tests[unique_id]
elif unique_id in self.saved_queries:
@@ -1616,6 +1627,11 @@ class Manifest(MacroMethods, dbtClassMixin):
self.semantic_models[semantic_model.unique_id] = semantic_model
source_file.semantic_models.append(semantic_model.unique_id)
def add_time_spine(self, source_file: SchemaSourceFile, time_spine: TimeSpine):
_check_duplicates(time_spine, self.time_spines)
self.time_spines[time_spine.unique_id] = time_spine
source_file.time_spines.append(time_spine.unique_id)
def add_unit_test(self, source_file: SchemaSourceFile, unit_test: UnitTestDefinition):
if unit_test.unique_id in self.unit_tests:
raise DuplicateResourceNameError(unit_test, self.unit_tests[unit_test.unique_id])
@@ -1662,6 +1678,8 @@ class Manifest(MacroMethods, dbtClassMixin):
self.semantic_models,
self.unit_tests,
self.saved_queries,
self.fixtures,
self.time_spines,
self._doc_lookup,
self._source_lookup,
self._ref_lookup,

View File

@@ -58,6 +58,7 @@ from dbt.artifacts.resources import SingularTest as SingularTestResource
from dbt.artifacts.resources import Snapshot as SnapshotResource
from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource
from dbt.artifacts.resources import SqlOperation as SqlOperationResource
from dbt.artifacts.resources import TimeSpine as TimeSpineResource
from dbt.artifacts.resources import UnitTestDefinition as UnitTestDefinitionResource
from dbt.contracts.graph.model_config import UnitTestNodeConfig
from dbt.contracts.graph.node_args import ModelNodeArgs
@@ -1419,7 +1420,7 @@ class Metric(GraphNode, MetricResource):
# existing when it didn't before is a change!
# metadata/tags changes are not "changes"
if old is None:
return True
return True # TODO: this contradicts the comment above - which is correct?
return (
self.same_filter(old)
@@ -1501,7 +1502,7 @@ class SemanticModel(GraphNode, SemanticModelResource):
# existing when it didn't before is a change!
# metadata/tags changes are not "changes"
if old is None:
return True
return True # TODO: this contradicts the comment above - which is correct?
return (
self.same_model(old)
@@ -1517,6 +1518,39 @@ class SemanticModel(GraphNode, SemanticModelResource):
)
# ====================================
# TimeSpine node
# ====================================
@dataclass
class TimeSpine(GraphNode, TimeSpineResource):
@property
def depends_on_nodes(self):
return self.depends_on.nodes
@property
def depends_on_macros(self):
return self.depends_on.macros
@classmethod
def resource_class(cls) -> Type[TimeSpineResource]:
return TimeSpineResource
def same_model(self, old: "TimeSpine") -> bool:
return self.model == old.model
def same_primary_column(self, old: "TimeSpine") -> bool:
return self.primary_column == old.primary_column
def same_contents(self, old: Optional["TimeSpine"]) -> bool:
# existing when it didn't before is a change!
if old is None:
return False
return self.same_model(old) and self.same_primary_column(old)
# ====================================
# SavedQuery
# ====================================
@@ -1652,6 +1686,7 @@ GraphMemberNode = Union[
Metric,
SavedQuery,
SemanticModel,
TimeSpine,
UnitTestDefinition,
]

View File

@@ -1,10 +1,15 @@
from dbt.constants import TIME_SPINE_MODEL_NAME
from typing import List, Optional
from dbt.constants import LEGACY_TIME_SPINE_MODEL_NAME
from dbt.contracts.graph.manifest import Manifest
from dbt.contracts.graph.nodes import ManifestNode, TimeSpine
from dbt.events.types import SemanticValidationFailure
from dbt.exceptions import ParsingError
from dbt_common.clients.system import write_file
from dbt_common.events.base_types import EventLevel
from dbt_common.events.functions import fire_event
from dbt_semantic_interfaces.implementations.metric import PydanticMetric
from dbt_semantic_interfaces.implementations.node_relation import PydanticNodeRelation
from dbt_semantic_interfaces.implementations.project_configuration import (
PydanticProjectConfiguration,
)
@@ -13,6 +18,10 @@ from dbt_semantic_interfaces.implementations.semantic_manifest import (
PydanticSemanticManifest,
)
from dbt_semantic_interfaces.implementations.semantic_model import PydanticSemanticModel
from dbt_semantic_interfaces.implementations.time_spine import (
PydanticTimeSpine,
PydanticTimeSpinePrimaryColumn,
)
from dbt_semantic_interfaces.implementations.time_spine_table_configuration import (
PydanticTimeSpineTableConfiguration,
)
@@ -23,7 +32,7 @@ from dbt_semantic_interfaces.validations.semantic_manifest_validator import (
class SemanticManifest:
def __init__(self, manifest) -> None:
def __init__(self, manifest: Manifest) -> None:
self.manifest = manifest
def validate(self) -> bool:
@@ -59,8 +68,35 @@ class SemanticManifest:
write_file(file_path, json)
def _get_pydantic_semantic_manifest(self) -> PydanticSemanticManifest:
time_spines = list(self.manifest.time_spines.values())
pydantic_time_spines: List[PydanticTimeSpine] = []
daily_time_spine: Optional[TimeSpine] = None
for time_spine in time_spines:
# Assertion for type checker
assert time_spine.node_relation, (
f"Node relation should have been set for time time spine {time_spine.name} during "
"manifest parsing, but it was not."
)
pydantic_time_spine = PydanticTimeSpine(
name=time_spine.name,
node_relation=PydanticNodeRelation(
alias=time_spine.node_relation.alias,
schema_name=time_spine.node_relation.schema_name,
database=time_spine.node_relation.database,
relation_name=time_spine.node_relation.relation_name,
),
primary_column=PydanticTimeSpinePrimaryColumn(
name=time_spine.primary_column.name,
time_granularity=time_spine.primary_column.time_granularity,
),
)
pydantic_time_spines.append(pydantic_time_spine)
if time_spine.primary_column.time_granularity == TimeGranularity.DAY:
daily_time_spine = time_spine
project_config = PydanticProjectConfiguration(
time_spine_table_configurations=[],
time_spine_table_configurations=[], time_spines=pydantic_time_spines
)
pydantic_semantic_manifest = PydanticSemanticManifest(
metrics=[], semantic_models=[], project_configuration=project_config
@@ -79,19 +115,26 @@ class SemanticManifest:
PydanticSavedQuery.parse_obj(saved_query.to_dict())
)
# Look for time-spine table model and create time spine table configuration
if self.manifest.semantic_models:
# Get model for time_spine_table
model = self.manifest.ref_lookup.find(TIME_SPINE_MODEL_NAME, None, None, self.manifest)
if not model:
# Validate that there is a time spine configured for the semantic manifest.
# If no daily time spine has beem configured, look for legacy time spine model. This logic is included to
# avoid breaking projects that have not migrated to the new time spine config yet.
legacy_time_spine_model: Optional[ManifestNode] = None
if not daily_time_spine:
legacy_time_spine_model = self.manifest.ref_lookup.find(
LEGACY_TIME_SPINE_MODEL_NAME, None, None, self.manifest
)
# If no time spines have been configured AND legacy time spine model does not exist, error.
if not legacy_time_spine_model:
raise ParsingError(
"The semantic layer requires a 'metricflow_time_spine' model in the project, but none was found. "
"The semantic layer requires a time spine model in the project, but none was found. "
"Guidance on creating this model can be found on our docs site ("
"https://docs.getdbt.com/docs/build/metricflow-time-spine) "
"https://docs.getdbt.com/docs/build/metricflow-time-spine) " # TODO: update docs link!
)
# Create time_spine_table_config, set it in project_config, and add to semantic manifest
time_spine_table_config = PydanticTimeSpineTableConfiguration(
location=model.relation_name,
location=legacy_time_spine_model.relation_name,
column_name="date_day",
grain=TimeGranularity.DAY,
)

View File

@@ -669,6 +669,19 @@ class UnparsedSemanticModel(dbtClassMixin):
primary_entity: Optional[str] = None
@dataclass
class UnparsedTimeSpinePrimaryColumn(dbtClassMixin):
name: str
time_granularity: str # str is really a TimeGranularity Enum
@dataclass
class UnparsedTimeSpine(dbtClassMixin):
name: str
model: str # looks like "ref(...)"
primary_column: UnparsedTimeSpinePrimaryColumn
@dataclass
class UnparsedQueryParams(dbtClassMixin):
metrics: List[str] = field(default_factory=list)

View File

@@ -174,6 +174,8 @@ class NodeSelector(MethodManager):
elif unique_id in self.manifest.saved_queries:
saved_query = self.manifest.saved_queries[unique_id]
return saved_query.config.enabled
elif unique_id in self.manifest.time_spines:
return True
node = self.manifest.nodes[unique_id]
@@ -204,6 +206,8 @@ class NodeSelector(MethodManager):
node = self.manifest.unit_tests[unique_id]
elif unique_id in self.manifest.saved_queries:
node = self.manifest.saved_queries[unique_id]
elif unique_id in self.manifest.time_spines:
node = self.manifest.time_spines[unique_id]
else:
raise DbtInternalError(f"Node {unique_id} not found in the manifest!")
return self.node_is_match(node)

View File

@@ -27,6 +27,7 @@ from dbt.contracts.graph.nodes import (
SemanticModel,
SingularTestNode,
SourceDefinition,
TimeSpine,
UnitTestDefinition,
)
from dbt.contracts.graph.unparsed import UnparsedVersion
@@ -62,6 +63,7 @@ class MethodName(StrEnum):
SourceStatus = "source_status"
Version = "version"
SemanticModel = "semantic_model"
TimeSpine = "time_spine"
SavedQuery = "saved_query"
UnitTest = "unit_test"
@@ -175,13 +177,21 @@ class SelectorMethod(metaclass=abc.ABCMeta):
def semantic_model_nodes(
self, included_nodes: Set[UniqueId]
) -> Iterator[Tuple[UniqueId, SemanticModel]]:
for key, semantic_model in self.manifest.semantic_models.items():
unique_id = UniqueId(key)
if unique_id not in included_nodes:
continue
yield unique_id, semantic_model
def time_spine_nodes(
self, included_nodes: Set[UniqueId]
) -> Iterator[Tuple[UniqueId, TimeSpine]]:
for key, time_spine in self.manifest.time_spines.items():
unique_id = UniqueId(key)
if unique_id not in included_nodes:
continue
yield unique_id, time_spine
def saved_query_nodes(
self, included_nodes: Set[UniqueId]
) -> Iterator[Tuple[UniqueId, SavedQuery]]:
@@ -202,6 +212,7 @@ class SelectorMethod(metaclass=abc.ABCMeta):
self.metric_nodes(included_nodes),
self.unit_tests(included_nodes),
self.semantic_model_nodes(included_nodes),
self.time_spine_nodes(included_nodes),
self.saved_query_nodes(included_nodes),
)
@@ -220,6 +231,7 @@ class SelectorMethod(metaclass=abc.ABCMeta):
self.metric_nodes(included_nodes),
self.unit_tests(included_nodes),
self.semantic_model_nodes(included_nodes),
self.time_spine_nodes(included_nodes),
self.saved_query_nodes(included_nodes),
)
@@ -402,6 +414,31 @@ class SemanticModelSelectorMethod(SelectorMethod):
yield unique_id
class TimeSpineSelectorMethod(SelectorMethod):
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
parts = selector.split(".")
target_package = SELECTOR_GLOB
if len(parts) == 1:
target_name = parts[0]
elif len(parts) == 2:
target_package, target_name = parts
else:
msg = (
'Invalid time spine selector value "{}". Time spines must be of '
"the form ${{time_spine_name}} or "
"${{time_spine_package.time_spine_name}}"
).format(selector)
raise DbtRuntimeError(msg)
for unique_id, node in self.time_spine_nodes(included_nodes):
if not fnmatch(node.package_name, target_package):
continue
if not fnmatch(node.name, target_name):
continue
yield unique_id
class SavedQuerySelectorMethod(SelectorMethod):
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
parts = selector.split(".")

View File

@@ -58,6 +58,7 @@ from dbt.contracts.graph.nodes import (
SeedNode,
SemanticModel,
SourceDefinition,
TimeSpine,
)
from dbt.contracts.graph.semantic_manifest import SemanticManifest
from dbt.events.types import (
@@ -1119,6 +1120,11 @@ class ManifestLoader:
continue
_process_refs(self.manifest, current_project, semantic_model, dependencies)
self.update_semantic_model(semantic_model)
for time_spine in self.manifest.time_spines.values():
if time_spine.created_at < self.started_at:
continue
_process_refs(self.manifest, current_project, time_spine, dependencies)
self.update_time_spine(time_spine)
# Takes references in 'metrics' array of nodes and exposures, finds the target
# node, and updates 'depends_on.nodes' with the unique id
@@ -1156,7 +1162,7 @@ class ManifestLoader:
primary_key = node.infer_primary_key(generic_tests)
node.primary_key = sorted(primary_key)
def update_semantic_model(self, semantic_model) -> None:
def update_semantic_model(self, semantic_model: SemanticModel) -> None:
# This has to be done at the end of parsing because the referenced model
# might have alias/schema/database fields that are updated by yaml config.
if semantic_model.depends_on_nodes[0]:
@@ -1168,6 +1174,18 @@ class ManifestLoader:
database=refd_node.database,
)
def update_time_spine(self, time_spine: TimeSpine) -> None:
# This has to be done at the end of parsing because the referenced model
# might have alias/schema/database fields that are updated by yaml config.
if time_spine.depends_on_nodes[0]:
refd_node = self.manifest.nodes[time_spine.depends_on_nodes[0]]
time_spine.node_relation = NodeRelation(
relation_name=refd_node.relation_name,
alias=refd_node.alias,
schema_name=refd_node.schema,
database=refd_node.database,
)
# nodes: node and column descriptions, version columns descriptions
# sources: source and table descriptions, column descriptions
# macros: macro argument descriptions

View File

@@ -18,6 +18,7 @@ from dbt.artifacts.resources import (
NonAdditiveDimension,
QueryParams,
SavedQueryConfig,
TimeSpinePrimaryColumn,
WhereFilter,
WhereFilterIntersection,
)
@@ -30,9 +31,17 @@ from dbt.context.context_config import (
from dbt.context.providers import (
generate_parse_exposure,
generate_parse_semantic_models,
generate_parse_time_spines,
)
from dbt.contracts.files import SchemaSourceFile
from dbt.contracts.graph.nodes import Exposure, Group, Metric, SavedQuery, SemanticModel
from dbt.contracts.graph.nodes import (
Exposure,
Group,
Metric,
SavedQuery,
SemanticModel,
TimeSpine,
)
from dbt.contracts.graph.unparsed import (
UnparsedConversionTypeParams,
UnparsedCumulativeTypeParams,
@@ -51,6 +60,8 @@ from dbt.contracts.graph.unparsed import (
UnparsedQueryParams,
UnparsedSavedQuery,
UnparsedSemanticModel,
UnparsedTimeSpine,
UnparsedTimeSpinePrimaryColumn,
)
from dbt.exceptions import JSONValidationError, YamlParseDictError
from dbt.node_types import NodeType
@@ -865,3 +876,62 @@ class SavedQueryParser(YamlReader):
# The supertype (YamlReader) requires `parse` to return a ParseResult, so
# we return an empty one because we don't have one to actually return.
return ParseResult()
class TimeSpineParser(YamlReader):
def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None:
super().__init__(schema_parser, yaml, "time_spines")
self.schema_parser = schema_parser
self.yaml = yaml
def _get_primary_column(
self, unparsed: UnparsedTimeSpinePrimaryColumn
) -> TimeSpinePrimaryColumn:
return TimeSpinePrimaryColumn(
name=unparsed.name, time_granularity=TimeGranularity(unparsed.time_granularity)
)
def parse_time_spine(self, unparsed: UnparsedTimeSpine) -> None:
package_name = self.project.project_name
unique_id = f"{NodeType.TimeSpine}.{package_name}.{unparsed.name}"
path = self.yaml.path.relative_path
fqn = self.schema_parser.get_fqn_prefix(path)
fqn.append(unparsed.name)
parsed = TimeSpine(
name=unparsed.name,
resource_type=NodeType.TimeSpine,
package_name=package_name,
path=path,
original_file_path=self.yaml.path.original_file_path,
unique_id=unique_id,
fqn=fqn,
model=unparsed.model,
primary_column=self._get_primary_column(unparsed.primary_column),
node_relation=None, # Resolved from the value of "model" after parsing
)
ctx = generate_parse_time_spines(
parsed,
self.root_project,
self.schema_parser.manifest,
package_name,
)
if parsed.model is not None:
model_ref = "{{ " + parsed.model + " }}"
get_rendered(model_ref, ctx, parsed)
assert isinstance(self.yaml.file, SchemaSourceFile)
self.manifest.add_time_spine(self.yaml.file, parsed)
def parse(self) -> None:
for data in self.get_key_dicts(): # todo? get_key_dicts
try:
UnparsedTimeSpine.validate(data)
unparsed = UnparsedTimeSpine.from_dict(data)
except (ValidationError, JSONValidationError) as exc:
raise YamlParseDictError(self.yaml.path, self.key, data, exc)
self.parse_time_spine(unparsed)

View File

@@ -261,6 +261,12 @@ class SchemaParser(SimpleParser[YamlBlock, ModelNode]):
saved_query_parser = SavedQueryParser(self, yaml_block)
saved_query_parser.parse()
if "time_spines" in dct:
from dbt.parser.schema_yaml_readers import TimeSpineParser
time_spine_parser = TimeSpineParser(self, yaml_block)
time_spine_parser.parse()
Parsed = TypeVar("Parsed", UnpatchedSourceDefinition, ParsedNodePatch, ParsedMacroPatch)
NodeTarget = TypeVar("NodeTarget", UnparsedNodeUpdate, UnparsedAnalysisUpdate, UnparsedModelUpdate)

File diff suppressed because one or more lines are too long

View File

@@ -9,6 +9,7 @@ from dbt.contracts.graph.nodes import (
SavedQuery,
SemanticModel,
SourceDefinition,
TimeSpine,
UnitTestDefinition,
)
from dbt.events.types import NoNodesSelected
@@ -88,9 +89,11 @@ class ListTask(GraphRunnableTask):
yield self.manifest.unit_tests[unique_id]
elif unique_id in self.manifest.saved_queries:
yield self.manifest.saved_queries[unique_id]
elif unique_id in self.manifest.time_spines:
yield self.manifest.time_spines[unique_id]
else:
raise DbtRuntimeError(
f'Got an unexpected result from node selection: "{unique_id}"'
f'Got an unexpected result from node selection: "{unique_id}". '
f"Listing this node type is not yet supported!"
)
@@ -119,6 +122,10 @@ class ListTask(GraphRunnableTask):
assert isinstance(node, SemanticModel)
semantic_model_selector = ".".join([node.package_name, node.name])
yield f"semantic_model:{semantic_model_selector}"
elif node.resource_type == NodeType.TimeSpine:
assert isinstance(node, TimeSpine)
time_spine_selector = ".".join([node.package_name, node.name])
yield f"time_spine:{time_spine_selector}"
elif node.resource_type == NodeType.Unit:
assert isinstance(node, UnitTestDefinition)
unit_test_selector = ".".join([node.package_name, node.versioned_name])

View File

@@ -69,7 +69,7 @@ setup(
# Accept patches but avoid automatically updating past a set minor version range.
"dbt-extractor>=0.5.0,<=0.6",
"minimal-snowplow-tracker>=0.0.2,<0.1",
"dbt-semantic-interfaces>=0.6.8,<0.7",
"dbt-semantic-interfaces>=0.6.9,<0.7",
# Minor versions for these are expected to be backwards-compatible
"dbt-common>=1.3.0,<2.0",
"dbt-adapters>=1.1.1,<2.0",

77
index.html Normal file

File diff suppressed because one or more lines are too long

View File

@@ -537,6 +537,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -744,7 +761,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"name": {
@@ -1506,6 +1524,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -2114,6 +2149,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -2847,6 +2899,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -3599,6 +3668,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -3954,6 +4040,23 @@
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
},
"columns": {
"type": "array",
"items": {
@@ -4057,7 +4160,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"name": {
@@ -4825,6 +4929,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -5433,6 +5554,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -6282,6 +6420,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -6634,7 +6789,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"name": {
@@ -7403,6 +7559,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -9754,6 +9927,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -9961,7 +10151,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"name": {
@@ -10723,6 +10914,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -11331,6 +11539,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -12064,6 +12289,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -12816,6 +13058,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -13171,6 +13430,23 @@
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
},
"columns": {
"type": "array",
"items": {
@@ -13274,7 +13550,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"name": {
@@ -14042,6 +14319,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -14650,6 +14944,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -15499,6 +15810,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -15851,7 +16179,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"name": {
@@ -16611,6 +16940,23 @@
"warn_unsupported": {
"type": "boolean",
"default": true
},
"to": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"to_columns": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false,
@@ -18636,7 +18982,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"package_name": {
@@ -19456,7 +19803,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"package_name": {
@@ -20168,7 +20516,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"package_name": {
@@ -20862,6 +21211,217 @@
"type": "string"
}
},
"time_spines": {
"type": "object",
"description": "The time spine models defined in the dbt project.",
"additionalProperties": {
"type": "object",
"title": "TimeSpine",
"properties": {
"name": {
"type": "string"
},
"resource_type": {
"enum": [
"model",
"analysis",
"test",
"snapshot",
"operation",
"seed",
"rpc",
"sql_operation",
"doc",
"source",
"macro",
"exposure",
"metric",
"group",
"saved_query",
"semantic_model",
"unit_test",
"fixture",
"time_spine"
]
},
"package_name": {
"type": "string"
},
"path": {
"type": "string"
},
"original_file_path": {
"type": "string"
},
"unique_id": {
"type": "string"
},
"fqn": {
"type": "array",
"items": {
"type": "string"
}
},
"model": {
"type": "string"
},
"node_relation": {
"anyOf": [
{
"type": "object",
"title": "NodeRelation",
"properties": {
"alias": {
"type": "string"
},
"schema_name": {
"type": "string"
},
"database": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"relation_name": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": ""
}
},
"additionalProperties": false,
"required": [
"alias",
"schema_name"
]
},
{
"type": "null"
}
]
},
"primary_column": {
"type": "object",
"title": "TimeSpinePrimaryColumn",
"properties": {
"name": {
"type": "string"
},
"time_granularity": {
"enum": [
"nanosecond",
"microsecond",
"millisecond",
"second",
"minute",
"hour",
"day",
"week",
"month",
"quarter",
"year"
]
}
},
"additionalProperties": false,
"required": [
"name",
"time_granularity"
]
},
"depends_on": {
"type": "object",
"title": "DependsOn",
"properties": {
"macros": {
"type": "array",
"items": {
"type": "string"
}
},
"nodes": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false
},
"refs": {
"type": "array",
"items": {
"type": "object",
"title": "RefArgs",
"properties": {
"name": {
"type": "string"
},
"package": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"default": null
},
"version": {
"anyOf": [
{
"type": "string"
},
{
"type": "number"
},
{
"type": "null"
}
],
"default": null
}
},
"additionalProperties": false,
"required": [
"name"
]
}
},
"created_at": {
"type": "number"
}
},
"additionalProperties": false,
"required": [
"name",
"resource_type",
"package_name",
"path",
"original_file_path",
"unique_id",
"fqn",
"model",
"node_relation",
"primary_column"
]
},
"propertyNames": {
"type": "string"
}
},
"unit_tests": {
"type": "object",
"description": "The unit tests defined in the project",
@@ -20995,7 +21555,8 @@
"saved_query",
"semantic_model",
"unit_test",
"fixture"
"fixture",
"time_spine"
]
},
"package_name": {
@@ -21237,6 +21798,7 @@
"group_map",
"saved_queries",
"semantic_models",
"time_spines",
"unit_tests"
],
"$id": "https://schemas.getdbt.com/dbt/manifest/v12.json"

File diff suppressed because one or more lines are too long

View File

@@ -888,6 +888,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
"semantic_models": {},
"unit_tests": {},
"saved_queries": {},
"time_spines": {},
}
@@ -1447,6 +1448,7 @@ def expected_references_manifest(project):
"semantic_models": {},
"unit_tests": {},
"saved_queries": {},
"time_spines": {},
}
@@ -1925,4 +1927,5 @@ def expected_versions_manifest(project):
"semantic_models": {},
"unit_tests": {},
"saved_queries": {},
"time_spines": {},
}

View File

@@ -477,6 +477,7 @@ def verify_manifest(project, expected_manifest, start_time, manifest_schema_path
"semantic_models",
"unit_tests",
"saved_queries",
"time_spines",
}
assert set(manifest.keys()) == manifest_keys
@@ -502,7 +503,7 @@ def verify_manifest(project, expected_manifest, start_time, manifest_schema_path
for unique_id, node in expected_manifest[key].items():
assert unique_id in manifest[key]
assert manifest[key][unique_id] == node, f"{unique_id} did not match"
else: # ['docs', 'parent_map', 'child_map', 'group_map', 'selectors', 'semantic_models', 'saved_queries']
else: # ['docs', 'parent_map', 'child_map', 'group_map', 'selectors', 'semantic_models', 'saved_queries', 'time_spines']
assert manifest[key] == expected_manifest[key]

View File

@@ -0,0 +1,11 @@
time_spines:
- name: time_spine_day
model: ref('metricflow_time_spine')
primary_column:
name: date_day
time_granularity: day
- name: time_spine_second
model: ref('metricflow_time_spine_second')
primary_column:
name: ts_second
time_granularity: second

View File

@@ -0,0 +1,2 @@
select
{{ dbt.date_trunc('second', dbt.current_timestamp()) }} as ts_second

View File

@@ -112,6 +112,7 @@ select 4 as id
"""
semantic_models__sm_yml = """
semantic_models:
- name: my_sm

View File

@@ -133,12 +133,20 @@ class TestList:
def expect_model_output(self):
expectations = {
"name": ("ephemeral", "incremental", "inner", "metricflow_time_spine", "outer"),
"name": (
"ephemeral",
"incremental",
"inner",
"metricflow_time_spine",
"metricflow_time_spine_second",
"outer",
),
"selector": (
"test.ephemeral",
"test.incremental",
"test.sub.inner",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
"test.outer",
),
"json": (
@@ -289,11 +297,53 @@ class TestList:
"contract": {"enforced": False, "alias_types": True},
"access": "protected",
},
"original_file_path": normalize("models/metricflow_time_spine.sql"),
"original_file_path": normalize(
"models/time_spines/metricflow_time_spine.sql"
),
"unique_id": "model.test.metricflow_time_spine",
"alias": "metricflow_time_spine",
"resource_type": "model",
},
{
"name": "metricflow_time_spine_second",
"package_name": "test",
"depends_on": {
"nodes": [],
"macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"],
},
"tags": [],
"config": {
"enabled": True,
"group": None,
"materialized": "view",
"post-hook": [],
"tags": [],
"pre-hook": [],
"quoting": {},
"column_types": {},
"persist_docs": {},
"full_refresh": None,
"unique_key": None,
"on_schema_change": "ignore",
"on_configuration_change": "apply",
"database": None,
"schema": None,
"alias": None,
"meta": {},
"grants": {},
"packages": [],
"incremental_strategy": None,
"docs": {"node_color": None, "show": True},
"contract": {"enforced": False, "alias_types": True},
"access": "protected",
},
"original_file_path": normalize(
"models/time_spines/metricflow_time_spine_second.sql"
),
"unique_id": "model.test.metricflow_time_spine_second",
"alias": "metricflow_time_spine_second",
"resource_type": "model",
},
{
"name": "outer",
"package_name": "test",
@@ -337,7 +387,8 @@ class TestList:
self.dir("models/ephemeral.sql"),
self.dir("models/incremental.sql"),
self.dir("models/sub/inner.sql"),
self.dir("models/metricflow_time_spine.sql"),
self.dir("models/time_spines/metricflow_time_spine.sql"),
self.dir("models/time_spines/metricflow_time_spine_second.sql"),
self.dir("models/outer.sql"),
),
}
@@ -572,7 +623,8 @@ class TestList:
"source:test.my_source.my_table",
"test.not_null_outer_id",
"test.unique_outer_id",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
"test.t",
"semantic_model:test.my_sm",
"metric:test.total_outer",
@@ -604,6 +656,12 @@ class TestList:
results = self.run_dbt_ls(["--resource-type", "semantic_model"])
assert set(results) == {"semantic_model:test.my_sm"}
results = self.run_dbt_ls(["--resource-type", "time_spine"])
assert set(results) == {
"time_spine:test.time_spine_day",
"time_spine:test.time_spine_second",
}
results = self.run_dbt_ls(["--resource-type", "metric"])
assert set(results) == {"metric:test.total_outer"}
@@ -617,7 +675,8 @@ class TestList:
assert set(results) == {
"test.ephemeral",
"test.outer",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
"test.incremental",
}
@@ -637,7 +696,8 @@ class TestList:
"test.not_null_outer_id",
"test.outer",
"test.sub.inner",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
"test.t",
"test.unique_outer_id",
}
@@ -657,7 +717,8 @@ class TestList:
"test.incremental",
"test.not_null_outer_id",
"test.outer",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
"test.sub.inner",
"test.t",
}
@@ -692,7 +753,8 @@ class TestList:
"test.not_null_outer_id",
"test.outer",
"test.sub.inner",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
"test.t",
"test.unique_outer_id",
}
@@ -706,7 +768,8 @@ class TestList:
"test.incremental",
"test.outer",
"test.sub.inner",
"test.metricflow_time_spine",
"test.time_spines.metricflow_time_spine",
"test.time_spines.metricflow_time_spine_second",
}
del os.environ["DBT_EXCLUDE_RESOURCE_TYPES"]

View File

@@ -102,6 +102,33 @@ metricflow_time_spine_sql = """
SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day
"""
metricflow_time_spine_second_sql = """
SELECT to_datetime('02/20/2023, 'mm/dd/yyyy hh:mm:ss') as ts_second
"""
time_spines_yml = """
time_spines:
- name: time_spine_second
model: ref('mf_time_spine_second')
primary_column:
name: ts_second
time_granularity: second
- name: time_spine_day
model: ref('mf_time_spine_day')
primary_column:
name: date_day
time_granularity: day
"""
bad_time_spine_yml = """
time_spines:
- name: bad_model_ref
model: ref('doesnt_exist')
primary_column:
name: stuff
time_granularity: day
"""
models_people_metrics_yml = """
version: 2

View File

@@ -215,6 +215,9 @@ class TestInvalidDerivedMetrics:
run_dbt(["run"])
# new tests with similar structure
class TestMetricDependsOn:
@pytest.fixture(scope="class")
def models(self):

View File

@@ -0,0 +1,118 @@
import pytest
from dbt.cli.main import dbtRunner
from dbt.contracts.graph.manifest import Manifest
from dbt.exceptions import TargetNotFoundError
from dbt.tests.util import get_manifest
from dbt_semantic_interfaces.type_enums import TimeGranularity
from tests.functional.metrics.fixtures import (
bad_time_spine_yml,
basic_metrics_yml,
metricflow_time_spine_second_sql,
metricflow_time_spine_sql,
models_people_sql,
semantic_model_people_yml,
time_spines_yml,
)
class TestSuccessfulTimeSpines:
@pytest.fixture(scope="class")
def models(self):
return {
"basic_metrics.yml": basic_metrics_yml,
"mf_time_spine_day.sql": metricflow_time_spine_sql,
"mf_time_spine_second.sql": metricflow_time_spine_second_sql,
"time_spines.yml": time_spines_yml,
"semantic_model_people.yml": semantic_model_people_yml,
"people.sql": models_people_sql,
}
def test_time_spines(self, project):
runner = dbtRunner()
result = runner.invoke(["parse"])
assert result.success
assert isinstance(result.result, Manifest)
manifest = get_manifest(project.project_root)
assert set(manifest.time_spines.keys()) == {
"time_spine.test.time_spine_second",
"time_spine.test.time_spine_day",
}
for time_spine in manifest.time_spines.values():
assert time_spine.package_name == "test"
assert time_spine.path == "time_spines.yml"
assert time_spine.original_file_path == "models/time_spines.yml"
time_spine_day = manifest.time_spines.get("time_spine.test.time_spine_day")
time_spine_second = manifest.time_spines.get("time_spine.test.time_spine_second")
assert time_spine_day.name == "time_spine_day"
assert time_spine_second.name == "time_spine_second"
assert time_spine_day.node_relation.alias == "mf_time_spine_day"
assert time_spine_second.node_relation.alias == "mf_time_spine_second"
assert time_spine_day.primary_column.name == "date_day"
assert time_spine_second.primary_column.name == "ts_second"
assert time_spine_day.primary_column.time_granularity == TimeGranularity.DAY
assert time_spine_second.primary_column.time_granularity == TimeGranularity.SECOND
class TestTimeSpineModelDoesNotExist:
@pytest.fixture(scope="class")
def models(self):
return {
"basic_metrics.yml": basic_metrics_yml,
"mf_time_spine_day.sql": metricflow_time_spine_sql,
"mf_time_spine_second.sql": metricflow_time_spine_second_sql,
"time_spines.yml": bad_time_spine_yml,
"semantic_model_people.yml": semantic_model_people_yml,
"people.sql": models_people_sql,
}
def test_time_spines(self, project):
runner = dbtRunner()
result = runner.invoke(["parse"])
assert not result.success
# Bad model ref in time spine def
assert isinstance(result.exception, TargetNotFoundError)
assert (
"Time_Spine 'time_spine.test.bad_model_ref' (models/time_spines.yml) depends on a node named 'doesnt_exist' which was not found"
in result.exception.msg
)
# TODO: test legacy time spine
class TestLegacyTimeSpine:
@pytest.fixture(scope="class")
def models(self):
return {
"basic_metrics.yml": basic_metrics_yml,
"metricflow_time_spine.sql": metricflow_time_spine_sql,
"semantic_model_people.yml": semantic_model_people_yml,
"people.sql": models_people_sql,
}
def test_time_spines(self, project):
runner = dbtRunner()
result = runner.invoke(["parse"])
assert result.success
assert isinstance(result.result, Manifest)
manifest = get_manifest(project.project_root)
# assert manifest.time_spines.keys()) == {
# "time_spine.test.time_spine_sset(econd",
# "time_spine.test.time_spine_day",
# }
# for time_spine in manifest.time_spines.values():
# assert time_spine.package_name == "test"
# assert time_spine.path == "time_spines.yml"
# assert time_spine.original_file_path == "models/time_spines.yml"
# also failure case where neither exists

View File

@@ -375,6 +375,7 @@ class ManifestTest(unittest.TestCase):
metadata=ManifestMetadata(generated_at=datetime.utcnow()),
semantic_models={},
saved_queries={},
time_spines={},
)
invocation_id = dbt_common.invocation._INVOCATION_ID
@@ -407,6 +408,7 @@ class ManifestTest(unittest.TestCase):
"semantic_models": {},
"unit_tests": {},
"saved_queries": {},
"time_spines": {},
},
)
@@ -508,6 +510,7 @@ class ManifestTest(unittest.TestCase):
"metrics",
"semantic_models",
"saved_queries",
"time_spines",
]
),
)
@@ -577,6 +580,7 @@ class ManifestTest(unittest.TestCase):
"semantic_models": {},
"unit_tests": {},
"saved_queries": {},
"time_spines": {},
},
)
@@ -922,6 +926,7 @@ class MixedManifestTest(unittest.TestCase):
"semantic_models": {},
"unit_tests": {},
"saved_queries": {},
"time_spines": {},
},
)
@@ -1006,6 +1011,7 @@ class MixedManifestTest(unittest.TestCase):
"sources",
"semantic_models",
"saved_queries",
"time_spines",
]
),
)

View File

@@ -3,10 +3,10 @@ import pytest
from dbt.contracts.graph.semantic_manifest import SemanticManifest
# Overwrite the default nods to construct the manifest
# Overwrite the default nodes to construct the manifest
@pytest.fixture
def nodes(metricflow_time_spine_model):
return [metricflow_time_spine_model]
def nodes(metricflow_time_spine_model, time_spines):
return [metricflow_time_spine_model] + [time_spine.model for time_spine in time_spines]
@pytest.fixture

View File

@@ -27,6 +27,7 @@ from dbt.graph.selector_methods import (
TagSelectorMethod,
TestNameSelectorMethod,
TestTypeSelectorMethod,
TimeSpineSelectorMethod,
UnitTestSelectorMethod,
VersionSelectorMethod,
)
@@ -50,9 +51,8 @@ def search_manifest_using_method(manifest, method, selection):
| set(manifest.sources)
| set(manifest.exposures)
| set(manifest.metrics)
| set(manifest.semantic_models)
| set(manifest.saved_queries)
| set(manifest.unit_tests),
# here & this file!
| set(manifest.semantic_models) | set(manifest.saved_queries) | set(manifest.unit_tests),
selection,
)
results = {manifest.expect(uid).search_name for uid in selected}
@@ -538,6 +538,16 @@ def test_select_semantic_model(manifest, table_model):
assert search_manifest_using_method(manifest, method, "*omer") == {"customer"}
def test_select_time_spine(manifest, time_spine):
manifest.time_spines[time_spine.unique_id] = time_spine
methods = MethodManager(manifest, None)
method = methods.get_method("time_spine", [])
assert isinstance(method, TimeSpineSelectorMethod)
assert search_manifest_using_method(manifest, method, "customer") == {"customer"}
assert not search_manifest_using_method(manifest, method, "not_customer")
assert search_manifest_using_method(manifest, method, "*omer") == {"customer"}
def test_select_semantic_model_by_tag(manifest, table_model):
semantic_model = make_semantic_model(
"pkg",

View File

@@ -21,6 +21,7 @@ node_type_pluralizations = {
NodeType.Unit: "unit_tests",
NodeType.SavedQuery: "saved_queries",
NodeType.Fixture: "fixtures",
NodeType.TimeSpine: "time_spines",
}

View File

@@ -13,6 +13,7 @@ from dbt.artifacts.resources import (
RefArgs,
TestConfig,
TestMetadata,
TimeSpinePrimaryColumn,
WhereFilter,
WhereFilterIntersection,
)
@@ -38,11 +39,12 @@ from dbt.contracts.graph.nodes import (
SemanticModel,
SingularTestNode,
SourceDefinition,
TimeSpine,
UnitTestDefinition,
)
from dbt.contracts.graph.unparsed import UnitTestInputFixture, UnitTestOutputFixture
from dbt.node_types import NodeType
from dbt_semantic_interfaces.type_enums import MetricType
from dbt_semantic_interfaces.type_enums import MetricType, TimeGranularity
def make_model(
@@ -874,6 +876,30 @@ def saved_query() -> SavedQuery:
)
# TODO: populate depends_on
@pytest.fixture
def time_spine(table_model) -> TimeSpine:
pkg = "test"
name = "test_time_spine"
path = "test_path"
return TimeSpine(
name=name,
resource_type=NodeType.TimeSpine,
package_name=pkg,
path=path,
unique_id=f"time_spine.{pkg}.{name}",
original_file_path=path,
fqn=[pkg, "time_spines", name],
model=table_model,
node_relation=NodeRelation(
alias=table_model.alias,
schema_name="dbt",
relation_name=table_model.name,
),
primary_column=TimeSpinePrimaryColumn(name="ds_day", time_granularity=TimeGranularity.DAY),
)
@pytest.fixture
def semantic_model(table_model) -> SemanticModel:
return make_semantic_model("test", "test_semantic_model", model=table_model)
@@ -1003,6 +1029,11 @@ def saved_queries(saved_query: SavedQuery) -> List[SavedQuery]:
return [saved_query]
@pytest.fixture
def time_spines(time_spine: TimeSpine) -> List[TimeSpine]:
return [time_spine]
@pytest.fixture
def files() -> Dict[str, AnySourceFile]:
return {}
@@ -1022,6 +1053,7 @@ def make_manifest(
semantic_models: List[SemanticModel] = [],
sources: List[SourceDefinition] = [],
unit_tests: List[UnitTestDefinition] = [],
time_spines: List[TimeSpine] = [],
) -> Manifest:
manifest = Manifest(
nodes={n.unique_id: n for n in nodes},
@@ -1038,6 +1070,7 @@ def make_manifest(
groups={g.unique_id: g for g in groups},
metadata=ManifestMetadata(adapter_type="postgres", project_name="pkg"),
saved_queries={s.unique_id: s for s in saved_queries},
time_spines={t.unique_id: t for t in time_spines},
)
manifest.build_parent_and_child_maps()
return manifest
@@ -1055,6 +1088,7 @@ def manifest(
semantic_models,
files,
saved_queries,
time_spines,
) -> Manifest:
return make_manifest(
nodes=nodes,
@@ -1065,4 +1099,5 @@ def manifest(
files=files,
metrics=metrics,
saved_queries=saved_queries,
time_spines=time_spines,
)