mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-23 07:31:28 +00:00
Compare commits
8 Commits
adding-sem
...
jerco/get-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d8c5af513 | ||
|
|
6954c4df1b | ||
|
|
f841a7ca76 | ||
|
|
07a004b301 | ||
|
|
b05582de39 | ||
|
|
fa7c4d19f0 | ||
|
|
066346faa2 | ||
|
|
0a03355ceb |
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Adding the entity node
|
||||
time: 2023-01-18T13:48:04.487817-06:00
|
||||
custom:
|
||||
Author: callum-mcdata
|
||||
Issue: "6627"
|
||||
6
.changes/unreleased/Fixes-20230116-123645.yaml
Normal file
6
.changes/unreleased/Fixes-20230116-123645.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Respect quoting config for dbt.ref() + dbt.source() in dbt-py models
|
||||
time: 2023-01-16T12:36:45.63092+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "6103"
|
||||
6
.changes/unreleased/Fixes-20230116-123709.yaml
Normal file
6
.changes/unreleased/Fixes-20230116-123709.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Respect quoting config for dbt.this() in dbt-py models
|
||||
time: 2023-01-16T12:37:09.000659+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "6619"
|
||||
6
.changes/unreleased/Fixes-20230117-101342.yaml
Normal file
6
.changes/unreleased/Fixes-20230117-101342.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Provide backward compatibility for `get_merge_sql` arguments
|
||||
time: 2023-01-17T10:13:42.118336-06:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "6625"
|
||||
6
.changes/unreleased/Under the Hood-20230113-150700.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230113-150700.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Port docs tests to pytest
|
||||
time: 2023-01-13T15:07:00.477038-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6573"
|
||||
2
.github/workflows/release-branch-tests.yml
vendored
2
.github/workflows/release-branch-tests.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
|
||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, 1.4.latest, main]
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
|
||||
@@ -269,6 +269,22 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
|
||||
|
||||
@available.parse(lambda *a, **k: [])
|
||||
def get_column_schema_from_query(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL. This is a thin wrapper around
|
||||
ConnectionManager.execute.
|
||||
|
||||
:param str sql: The sql to execute.
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: List[(column_name: str, data_type: str]
|
||||
"""
|
||||
return self.connections.get_column_schema_from_query(sql=sql)
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
"""Obtain partitions metadata for a BigQuery partitioned table.
|
||||
|
||||
@@ -128,6 +128,31 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, int) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_column_schema_from_cursor(cls, cursor: Any) -> List[Tuple[str, str]]:
|
||||
# (column_name, data_type)
|
||||
columns: List[Tuple[str, str]] = []
|
||||
|
||||
if cursor.description is not None:
|
||||
# https://peps.python.org/pep-0249/#description
|
||||
columns = [
|
||||
# TODO: ignoring size, precision, scale for now
|
||||
# (though it is part of DB-API standard, and our Column class does have these attributes)
|
||||
# IMO user-defined contracts shouldn't have to match an exact size/precision/scale
|
||||
(col[0], cls.data_type_code_to_name(col[1]))
|
||||
for col in cursor.description
|
||||
]
|
||||
|
||||
return columns
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
@@ -140,6 +165,20 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
table = dbt.clients.agate_helper.empty_table()
|
||||
return response, table
|
||||
|
||||
# TODO: do we need to care about auto_begin here?
|
||||
def get_column_schema_from_query(self, sql: str) -> List[Tuple[str, str]]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql)
|
||||
return self.get_column_schema_from_cursor(cursor)
|
||||
|
||||
# For dbt-bigquery
|
||||
# def get_column_schema_from_query(cls, sql: str) -> List[Tuple[str, str]]:
|
||||
# sql = self._add_query_comment(sql)
|
||||
# # auto_begin is ignored on bigquery, and only included for consistency
|
||||
# query_job, iterator = self.raw_execute(sql)
|
||||
# columns = [(field.name, field.field_type) for field in resp.iterator]
|
||||
# return columns
|
||||
|
||||
def add_begin_query(self):
|
||||
return self.add_query("BEGIN", auto_begin=False)
|
||||
|
||||
|
||||
@@ -48,7 +48,6 @@ def print_compile_stats(stats):
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Entity: "entity",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -84,8 +83,6 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[exposure.resource_type] += 1
|
||||
for metric in manifest.metrics.values():
|
||||
stats[metric.resource_type] += 1
|
||||
for entity in manifest.entities.values():
|
||||
stats[entity.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
@@ -354,13 +351,6 @@ class Compiler:
|
||||
)
|
||||
|
||||
if node.language == ModelLanguage.python:
|
||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
||||
|
||||
# quoating seems like something very specific to sql so far
|
||||
# for all python implementations we are seeing there's no quating.
|
||||
# TODO try to find better way to do this, given that
|
||||
original_quoting = self.config.quoting
|
||||
self.config.quoting = {key: False for key in original_quoting.keys()}
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
|
||||
postfix = jinja.get_rendered(
|
||||
@@ -370,8 +360,6 @@ class Compiler:
|
||||
)
|
||||
# we should NOT jinja render the python model's 'raw code'
|
||||
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
# restore quoting settings in the end since context is lazy evaluated
|
||||
self.config.quoting = original_quoting
|
||||
|
||||
else:
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
@@ -401,8 +389,6 @@ class Compiler:
|
||||
linker.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
elif dependency in manifest.metrics:
|
||||
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
elif dependency in manifest.entities:
|
||||
linker.dependency(node.unique_id, (manifest.entities[dependency].unique_id))
|
||||
else:
|
||||
raise GraphDependencyNotFoundError(node, dependency)
|
||||
|
||||
@@ -415,8 +401,6 @@ class Compiler:
|
||||
self.link_node(linker, exposure, manifest)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(linker, metric, manifest)
|
||||
for entity in manifest.entities.values():
|
||||
self.link_node(linker, entity, manifest)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
|
||||
@@ -381,7 +381,6 @@ class PartialProject(RenderComponents):
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
@@ -392,7 +391,6 @@ class PartialProject(RenderComponents):
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
metrics = cfg.metrics
|
||||
entities = cfg.entities
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -448,7 +446,6 @@ class PartialProject(RenderComponents):
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
metrics=metrics,
|
||||
entities=entities,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
@@ -553,7 +550,6 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -628,7 +624,6 @@ class Project:
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
|
||||
@@ -117,7 +117,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -313,7 +312,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"entities": self._get_config_paths(self.entities),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
@@ -502,7 +500,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -565,7 +562,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
|
||||
@@ -45,8 +45,6 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = unrendered.get("entities")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
else:
|
||||
@@ -72,8 +70,6 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = self.project.entities
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
else:
|
||||
|
||||
@@ -33,7 +33,6 @@ from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
SeedNode,
|
||||
SourceDefinition,
|
||||
Resource,
|
||||
@@ -1505,44 +1504,6 @@ def generate_parse_metrics(
|
||||
}
|
||||
|
||||
|
||||
class EntityRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
package = None
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
self.model.refs.append(list(args))
|
||||
return ""
|
||||
|
||||
def validate_args(self, name, package):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingError(
|
||||
f"In the entity associated with {self.model.original_file_path} "
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
|
||||
|
||||
def generate_parse_entities(
|
||||
entity: Entity,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
project = config.load_dependencies()[package_name]
|
||||
return {
|
||||
"ref": EntityRefResolver(
|
||||
None,
|
||||
entity,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# This class is currently used by the schema parser in order
|
||||
# to limit the number of macros in the context by using
|
||||
# the TestMacroNamespace
|
||||
|
||||
@@ -227,7 +227,6 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
entities: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
|
||||
@@ -29,7 +29,6 @@ from dbt.contracts.graph.nodes import (
|
||||
GenericTestNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
UnpatchedSourceDefinition,
|
||||
ManifestNode,
|
||||
GraphMemberNode,
|
||||
@@ -213,39 +212,6 @@ class MetricLookup(dbtClassMixin):
|
||||
return manifest.metrics[unique_id]
|
||||
|
||||
|
||||
class EntityLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, search_name, package)
|
||||
|
||||
def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"):
|
||||
unique_id = self.get_unique_id(search_name, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_entity(self, entity: Entity):
|
||||
if entity.search_name not in self.storage:
|
||||
self.storage[entity.search_name] = {}
|
||||
|
||||
self.storage[entity.search_name][entity.package_name] = entity.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for entity in manifest.entities.values():
|
||||
if hasattr(entity, "name"):
|
||||
self.add_entity(entity)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> Entity:
|
||||
if unique_id not in manifest.entities:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Entity {unique_id} found in cache but not found in manifest"
|
||||
)
|
||||
return manifest.entities[unique_id]
|
||||
|
||||
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
@@ -490,9 +456,6 @@ class Disabled(Generic[D]):
|
||||
MaybeMetricNode = Optional[Union[Metric, Disabled[Metric]]]
|
||||
|
||||
|
||||
MaybeEntityNode = Optional[Union[Entity, Disabled[Entity]]]
|
||||
|
||||
|
||||
MaybeDocumentation = Optional[Documentation]
|
||||
|
||||
|
||||
@@ -636,7 +599,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs: MutableMapping[str, Documentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, Exposure] = field(default_factory=dict)
|
||||
metrics: MutableMapping[str, Metric] = field(default_factory=dict)
|
||||
entities: MutableMapping[str, Entity] = field(default_factory=dict)
|
||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
@@ -658,9 +620,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_metric_lookup: Optional[MetricLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_entity_lookup: Optional[EntityLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_disabled_lookup: Optional[DisabledLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
@@ -711,9 +670,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
def update_metric(self, new_metric: Metric):
|
||||
_update_into(self.metrics, new_metric)
|
||||
|
||||
def update_entity(self, new_entity: Entity):
|
||||
_update_into(self.entities, new_entity)
|
||||
|
||||
def update_node(self, new_node: ManifestNode):
|
||||
_update_into(self.nodes, new_node)
|
||||
|
||||
@@ -729,7 +685,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.flat_graph = {
|
||||
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
||||
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
||||
"entities": {k: v.to_dict(omit_none=False) for k, v in self.entities.items()},
|
||||
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
||||
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
||||
}
|
||||
@@ -792,7 +747,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.nodes.values(),
|
||||
self.sources.values(),
|
||||
self.metrics.values(),
|
||||
self.entities.values(),
|
||||
)
|
||||
for resource in all_resources:
|
||||
resource_type_plural = resource.resource_type.pluralize()
|
||||
@@ -821,7 +775,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
||||
entities={k: _deepcopy(v) for k, v in self.entities.items()},
|
||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||
metadata=self.metadata,
|
||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||
@@ -838,7 +791,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.sources.values(),
|
||||
self.exposures.values(),
|
||||
self.metrics.values(),
|
||||
self.entities.values(),
|
||||
)
|
||||
)
|
||||
forward_edges, backward_edges = build_node_edges(edge_members)
|
||||
@@ -864,7 +816,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs=self.docs,
|
||||
exposures=self.exposures,
|
||||
metrics=self.metrics,
|
||||
entities=self.entities,
|
||||
selectors=self.selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=self.disabled,
|
||||
@@ -886,8 +837,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return self.exposures[unique_id]
|
||||
elif unique_id in self.metrics:
|
||||
return self.metrics[unique_id]
|
||||
elif unique_id in self.entities:
|
||||
return self.entities[unique_id]
|
||||
else:
|
||||
# something terrible has happened
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
@@ -924,12 +873,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._metric_lookup = MetricLookup(self)
|
||||
return self._metric_lookup
|
||||
|
||||
@property
|
||||
def entity_lookup(self) -> EntityLookup:
|
||||
if self._entity_lookup is None:
|
||||
self._entity_lookup = EntityLookup(self)
|
||||
return self._entity_lookup
|
||||
|
||||
def rebuild_ref_lookup(self):
|
||||
self._ref_lookup = RefableLookup(self)
|
||||
|
||||
@@ -1030,31 +973,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
def resolve_entity(
|
||||
self,
|
||||
target_entity_name: str,
|
||||
target_entity_package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeEntityNode:
|
||||
|
||||
entity: Optional[Entity] = None
|
||||
disabled: Optional[List[Entity]] = None
|
||||
|
||||
candidates = _search_packages(current_project, node_package, target_entity_package)
|
||||
for pkg in candidates:
|
||||
entity = self.entity_lookup.find(target_entity_name, pkg, self)
|
||||
|
||||
if entity is not None and entity.config.enabled:
|
||||
return entity
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.disabled_lookup.find(f"{target_entity_name}", pkg)
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
# Called by DocsRuntimeContext.doc
|
||||
def resolve_doc(
|
||||
self,
|
||||
@@ -1165,11 +1083,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
|
||||
def add_entity(self, source_file: SchemaSourceFile, entity: Entity):
|
||||
_check_duplicates(entity, self.entities)
|
||||
self.entities[entity.unique_id] = entity
|
||||
source_file.entities.append(entity.unique_id)
|
||||
|
||||
def add_disabled_nofile(self, node: GraphMemberNode):
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
if node.unique_id in self.disabled:
|
||||
@@ -1185,8 +1098,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, Metric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, Entity):
|
||||
source_file.entities.append(node.unique_id)
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
@@ -1214,7 +1125,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.docs,
|
||||
self.exposures,
|
||||
self.metrics,
|
||||
self.entities,
|
||||
self.selectors,
|
||||
self.files,
|
||||
self.metadata,
|
||||
@@ -1227,7 +1137,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._source_lookup,
|
||||
self._ref_lookup,
|
||||
self._metric_lookup,
|
||||
self._entity_lookup,
|
||||
self._disabled_lookup,
|
||||
self._analysis_lookup,
|
||||
)
|
||||
@@ -1269,9 +1178,6 @@ class WritableManifest(ArtifactMixin):
|
||||
metrics: Mapping[UniqueID, Metric] = field(
|
||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||
)
|
||||
entities: Mapping[UniqueID, Entity] = field(
|
||||
metadata=dict(description=("The entities defined in the dbt project and its dependencies"))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||
)
|
||||
@@ -1296,8 +1202,7 @@ class WritableManifest(ArtifactMixin):
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(self):
|
||||
# return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||
return []
|
||||
return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
for unique_id, node in dct["nodes"].items():
|
||||
|
||||
@@ -368,11 +368,6 @@ class MetricConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class EntityConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExposureConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
@@ -609,7 +604,6 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
|
||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||
NodeType.Metric: MetricConfig,
|
||||
NodeType.Entity: EntityConfig,
|
||||
NodeType.Exposure: ExposureConfig,
|
||||
NodeType.Source: SourceConfig,
|
||||
NodeType.Seed: SeedConfig,
|
||||
|
||||
@@ -55,7 +55,6 @@ from .model_config import (
|
||||
TestConfig,
|
||||
SourceConfig,
|
||||
MetricConfig,
|
||||
EntityConfig,
|
||||
ExposureConfig,
|
||||
EmptySnapshotConfig,
|
||||
SnapshotConfig,
|
||||
@@ -273,7 +272,7 @@ class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType):
|
||||
@classmethod
|
||||
def _deserialize(cls, dct: Dict[str, int]):
|
||||
# The serialized ParsedNodes do not differ from each other
|
||||
# in fields that would allow 'from_dict' to distinguish
|
||||
# in fields that would allow 'from_dict' to distinguis
|
||||
# between them.
|
||||
resource_type = dct["resource_type"]
|
||||
if resource_type == "model":
|
||||
@@ -393,7 +392,6 @@ class CompiledNode(ParsedNode):
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
compiled_path: Optional[str] = None
|
||||
compiled: bool = False
|
||||
@@ -908,7 +906,6 @@ class Exposure(GraphNode):
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
@@ -1000,7 +997,6 @@ class Metric(GraphNode):
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
@@ -1069,63 +1065,6 @@ class Metric(GraphNode):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Entity(GraphNode):
|
||||
name: str
|
||||
model: str
|
||||
description: str
|
||||
dimensions: List[str]
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Entity]})
|
||||
model_unique_id: Optional[str] = None
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: EntityConfig = field(default_factory=EntityConfig)
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return self.depends_on.nodes
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return self.name
|
||||
|
||||
def same_model(self, old: "Entity") -> bool:
|
||||
return self.model == old.model
|
||||
|
||||
def same_dimensions(self, old: "Entity") -> bool:
|
||||
return self.dimensions == old.dimensions
|
||||
|
||||
def same_description(self, old: "Entity") -> bool:
|
||||
return self.description == old.description
|
||||
|
||||
def same_config(self, old: "Entity") -> bool:
|
||||
return self.config.same_contents(
|
||||
self.unrendered_config,
|
||||
old.unrendered_config,
|
||||
)
|
||||
|
||||
def same_contents(self, old: Optional["Entity"]) -> bool:
|
||||
# existing when it didn't before is a change!
|
||||
# metadata/tags changes are not "changes"
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
self.same_model(old)
|
||||
and self.same_dimensions(old)
|
||||
and self.same_description(old)
|
||||
and self.same_config(old)
|
||||
and True
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
# ====================================
|
||||
@@ -1187,7 +1126,6 @@ GraphMemberNode = Union[
|
||||
ResultNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
]
|
||||
|
||||
# All "nodes" (or node-like objects) in this file
|
||||
|
||||
@@ -533,21 +533,3 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
|
||||
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
||||
raise ValidationError("Derived metrics cannot have a 'model' property")
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedEntity(dbtClassMixin, Replaceable):
|
||||
"""This class is used for entity information"""
|
||||
|
||||
name: str
|
||||
model: str
|
||||
description: str = ""
|
||||
dimensions: List[str] = field(default_factory=list)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super(UnparsedEntity, cls).validate(data)
|
||||
# TODO: Add validation here around include/exclude and others
|
||||
|
||||
@@ -214,7 +214,6 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
sources: Dict[str, Any] = field(default_factory=dict)
|
||||
tests: Dict[str, Any] = field(default_factory=dict)
|
||||
metrics: Dict[str, Any] = field(default_factory=dict)
|
||||
entities: Dict[str, Any] = field(default_factory=dict)
|
||||
exposures: Dict[str, Any] = field(default_factory=dict)
|
||||
vars: Optional[Dict[str, Any]] = field(
|
||||
default=None,
|
||||
|
||||
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Binary file not shown.
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
Binary file not shown.
@@ -419,7 +419,9 @@ table.footnote td {
|
||||
}
|
||||
|
||||
dl {
|
||||
margin: 0;
|
||||
margin-left: 0;
|
||||
margin-right: 0;
|
||||
margin-top: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
|
||||
2
core/dbt/docs/build/html/_static/basic.css
vendored
2
core/dbt/docs/build/html/_static/basic.css
vendored
@@ -4,7 +4,7 @@
|
||||
*
|
||||
* Sphinx stylesheet -- basic theme.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
2
core/dbt/docs/build/html/_static/doctools.js
vendored
2
core/dbt/docs/build/html/_static/doctools.js
vendored
@@ -4,7 +4,7 @@
|
||||
*
|
||||
* Base JavaScript utilities for all Sphinx HTML documentation.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* This script contains the language-specific data used by searchtools.js,
|
||||
* namely the list of stopwords, stemmer, scorer and splitter.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*
|
||||
* Sphinx JavaScript utilities for the full-text search.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
4
core/dbt/docs/build/html/genindex.html
vendored
4
core/dbt/docs/build/html/genindex.html
vendored
@@ -87,8 +87,8 @@
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.0.0</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.1.3</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.13</a>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
4
core/dbt/docs/build/html/index.html
vendored
4
core/dbt/docs/build/html/index.html
vendored
@@ -837,8 +837,8 @@
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.0.0</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.1.3</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.13</a>
|
||||
|
||||
|
|
||||
<a href="_sources/index.rst.txt"
|
||||
|
||||
4
core/dbt/docs/build/html/search.html
vendored
4
core/dbt/docs/build/html/search.html
vendored
@@ -106,8 +106,8 @@
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.0.0</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 6.1.3</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.13</a>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
2
core/dbt/docs/build/html/searchindex.js
vendored
2
core/dbt/docs/build/html/searchindex.js
vendored
File diff suppressed because one or more lines are too long
@@ -20,7 +20,7 @@ from .selector_spec import (
|
||||
|
||||
INTERSECTION_DELIMITER = ","
|
||||
|
||||
DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*", "entity:*"]
|
||||
DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*"]
|
||||
DEFAULT_EXCLUDES: List[str] = []
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ from dbt.contracts.graph.nodes import (
|
||||
SourceDefinition,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
GraphMemberNode,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
@@ -52,8 +51,8 @@ class GraphQueue:
|
||||
node = self.manifest.expect(node_id)
|
||||
if node.resource_type != NodeType.Model:
|
||||
return False
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure or Metric or Entity
|
||||
assert not isinstance(node, (SourceDefinition, Exposure, Metric, Entity))
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure or Metric
|
||||
assert not isinstance(node, (SourceDefinition, Exposure, Metric))
|
||||
if node.is_ephemeral:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -163,9 +163,6 @@ class NodeSelector(MethodManager):
|
||||
elif unique_id in self.manifest.metrics:
|
||||
metric = self.manifest.metrics[unique_id]
|
||||
return metric.config.enabled
|
||||
elif unique_id in self.manifest.entities:
|
||||
entity = self.manifest.entities[unique_id]
|
||||
return entity.config.enabled
|
||||
node = self.manifest.nodes[unique_id]
|
||||
return not node.empty and node.config.enabled
|
||||
|
||||
@@ -185,8 +182,6 @@ class NodeSelector(MethodManager):
|
||||
node = self.manifest.exposures[unique_id]
|
||||
elif unique_id in self.manifest.metrics:
|
||||
node = self.manifest.metrics[unique_id]
|
||||
elif unique_id in self.manifest.entities:
|
||||
node = self.manifest.entities[unique_id]
|
||||
else:
|
||||
raise DbtInternalError(f"Node {unique_id} not found in the manifest!")
|
||||
return self.node_is_match(node)
|
||||
|
||||
@@ -12,7 +12,6 @@ from dbt.contracts.graph.nodes import (
|
||||
SingularTestNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
GenericTestNode,
|
||||
SourceDefinition,
|
||||
ResultNode,
|
||||
@@ -44,7 +43,6 @@ class MethodName(StrEnum):
|
||||
State = "state"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Entity = "entity"
|
||||
Result = "result"
|
||||
SourceStatus = "source_status"
|
||||
|
||||
@@ -73,7 +71,7 @@ def is_selected_node(fqn: List[str], node_selector: str):
|
||||
return True
|
||||
|
||||
|
||||
SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric, Entity]
|
||||
SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric]
|
||||
|
||||
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
@@ -120,14 +118,6 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
continue
|
||||
yield unique_id, metric
|
||||
|
||||
def entity_nodes(self, included_nodes: Set[UniqueId]) -> Iterator[Tuple[UniqueId, Entity]]:
|
||||
|
||||
for key, metric in self.manifest.entities.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, metric
|
||||
|
||||
def all_nodes(
|
||||
self, included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, SelectorTarget]]:
|
||||
@@ -136,7 +126,6 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
self.source_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes),
|
||||
self.metric_nodes(included_nodes),
|
||||
self.entity_nodes(included_nodes),
|
||||
)
|
||||
|
||||
def configurable_nodes(
|
||||
@@ -147,12 +136,11 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def non_source_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
) -> Iterator[Tuple[UniqueId, Union[Exposure, ManifestNode, Metric, Entity]]]:
|
||||
) -> Iterator[Tuple[UniqueId, Union[Exposure, ManifestNode, Metric]]]:
|
||||
yield from chain(
|
||||
self.parsed_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes),
|
||||
self.metric_nodes(included_nodes),
|
||||
self.entity_nodes(included_nodes),
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -282,33 +270,6 @@ class MetricSelectorMethod(SelectorMethod):
|
||||
yield node
|
||||
|
||||
|
||||
class EntitySelectorMethod(SelectorMethod):
|
||||
"""TODO: Add a description of what this selector method is doing"""
|
||||
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
parts = selector.split(".")
|
||||
target_package = SELECTOR_GLOB
|
||||
if len(parts) == 1:
|
||||
target_name = parts[0]
|
||||
elif len(parts) == 2:
|
||||
target_package, target_name = parts
|
||||
else:
|
||||
msg = (
|
||||
'Invalid entity selector value "{}". Entities must be of '
|
||||
"the form ${{entity_name}} or "
|
||||
"${{entity_package.entity_name}}"
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.entity_nodes(included_nodes):
|
||||
if target_package not in (real_node.package_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_name not in (real_node.name, SELECTOR_GLOB):
|
||||
continue
|
||||
|
||||
yield node
|
||||
|
||||
|
||||
class PathSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""Yields nodes from included that match the given path."""
|
||||
@@ -569,8 +530,6 @@ class StateSelectorMethod(SelectorMethod):
|
||||
previous_node = manifest.exposures[node]
|
||||
elif node in manifest.metrics:
|
||||
previous_node = manifest.metrics[node]
|
||||
elif node in manifest.entities:
|
||||
previous_node = manifest.entities[node]
|
||||
|
||||
if checker(previous_node, real_node):
|
||||
yield node
|
||||
@@ -657,7 +616,6 @@ class MethodManager:
|
||||
MethodName.State: StateSelectorMethod,
|
||||
MethodName.Exposure: ExposureSelectorMethod,
|
||||
MethodName.Metric: MetricSelectorMethod,
|
||||
MethodName.Entity: EntitySelectorMethod,
|
||||
MethodName.Result: ResultSelectorMethod,
|
||||
MethodName.SourceStatus: SourceStatusSelectorMethod,
|
||||
}
|
||||
|
||||
@@ -17,23 +17,47 @@
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro get_empty_subquery_sql(select_sql) -%}
|
||||
{{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_empty_subquery_sql(select_sql) %}
|
||||
select * from (
|
||||
{{ select_sql }}
|
||||
) as __dbt_sbq
|
||||
where false
|
||||
limit 0
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_column_schema_from_query(select_sql) -%}
|
||||
{{ return(adapter.dispatch('get_column_schema_from_query', 'dbt')(select_sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_column_schema_from_query(select_sql) %}
|
||||
{% set columns = [] %}
|
||||
{% set sql = get_empty_subquery_sql(select_sql) %}
|
||||
{% set column_schema = adapter.get_column_schema_from_query(sql) %}
|
||||
{% for col in column_schema %}
|
||||
-- api.Column.create includes a step for translating data type
|
||||
-- TODO: could include size, precision, scale here
|
||||
{% set column = api.Column.create(col[0], col[1]) %}
|
||||
{% do columns.append(column) %}
|
||||
{% endfor %}
|
||||
{{ return(columns) }}
|
||||
{% endmacro %}
|
||||
|
||||
-- here for back compat
|
||||
{% macro get_columns_in_query(select_sql) -%}
|
||||
{{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_columns_in_query(select_sql) %}
|
||||
{% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}
|
||||
select * from (
|
||||
{{ select_sql }}
|
||||
) as __dbt_sbq
|
||||
where false
|
||||
limit 0
|
||||
{{ get_empty_subquery_sql(select_sql) }}
|
||||
{% endcall %}
|
||||
|
||||
{{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro alter_column_type(relation, column_name, new_column_type) -%}
|
||||
{{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
|
||||
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}
|
||||
-- back compat for old kwarg name
|
||||
{% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}
|
||||
{{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
|
||||
{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}
|
||||
{%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}
|
||||
{%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%}
|
||||
{%- set merge_update_columns = config.get('merge_update_columns') -%}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
{%- set ref_dict = {} -%}
|
||||
{%- for _ref in model.refs -%}
|
||||
{%- set resolved = ref(*_ref) -%}
|
||||
{%- do ref_dict.update({_ref | join("."): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}
|
||||
{%- do ref_dict.update({_ref | join("."): resolved | string | replace('"', '\"')}) -%}
|
||||
{%- endfor -%}
|
||||
|
||||
def ref(*args,dbt_load_df_function):
|
||||
@@ -18,7 +18,7 @@ def ref(*args,dbt_load_df_function):
|
||||
{%- set source_dict = {} -%}
|
||||
{%- for _source in model.sources -%}
|
||||
{%- set resolved = source(*_source) -%}
|
||||
{%- do source_dict.update({_source | join("."): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}
|
||||
{%- do source_dict.update({_source | join("."): resolved | string | replace('"', '\"')}) -%}
|
||||
{%- endfor -%}
|
||||
|
||||
def source(*args, dbt_load_df_function):
|
||||
@@ -33,8 +33,8 @@ def source(*args, dbt_load_df_function):
|
||||
{% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}
|
||||
{%- for key, default in config_dbt_used -%}
|
||||
{# weird type testing with enum, would be much easier to write this logic in Python! #}
|
||||
{%- if key == 'language' -%}
|
||||
{%- set value = 'python' -%}
|
||||
{%- if key == "language" -%}
|
||||
{%- set value = "python" -%}
|
||||
{%- endif -%}
|
||||
{%- set value = model.config.get(key, default) -%}
|
||||
{%- do config_dict.update({key: value}) -%}
|
||||
@@ -62,11 +62,12 @@ class config:
|
||||
|
||||
class this:
|
||||
"""dbt.this() or dbt.this.identifier"""
|
||||
database = '{{ this.database }}'
|
||||
schema = '{{ this.schema }}'
|
||||
identifier = '{{ this.identifier }}'
|
||||
database = "{{ this.database }}"
|
||||
schema = "{{ this.schema }}"
|
||||
identifier = "{{ this.identifier }}"
|
||||
{% set this_relation_name = this | string | replace('"', '\\"') %}
|
||||
def __repr__(self):
|
||||
return '{{ this }}'
|
||||
return "{{ this_relation_name }}"
|
||||
|
||||
|
||||
class dbtObj:
|
||||
|
||||
@@ -486,7 +486,7 @@ def _build_snapshot_subparser(subparsers, base_subparser):
|
||||
return sub
|
||||
|
||||
|
||||
def _add_defer_argument(*subparsers):
|
||||
def _add_defer_arguments(*subparsers):
|
||||
for sub in subparsers:
|
||||
sub.add_optional_argument_inverse(
|
||||
"--defer",
|
||||
@@ -499,10 +499,6 @@ def _add_defer_argument(*subparsers):
|
||||
""",
|
||||
default=flags.DEFER_MODE,
|
||||
)
|
||||
|
||||
|
||||
def _add_favor_state_argument(*subparsers):
|
||||
for sub in subparsers:
|
||||
sub.add_optional_argument_inverse(
|
||||
"--favor-state",
|
||||
enable_help="""
|
||||
@@ -580,7 +576,7 @@ def _build_docs_generate_subparser(subparsers, base_subparser):
|
||||
Do not run "dbt compile" as part of docs generation
|
||||
""",
|
||||
)
|
||||
_add_defer_argument(generate_sub)
|
||||
_add_defer_arguments(generate_sub)
|
||||
return generate_sub
|
||||
|
||||
|
||||
@@ -1192,9 +1188,7 @@ def parse_args(args, cls=DBTArgumentParser):
|
||||
# list_sub sets up its own arguments.
|
||||
_add_selection_arguments(run_sub, compile_sub, generate_sub, test_sub, snapshot_sub, seed_sub)
|
||||
# --defer
|
||||
_add_defer_argument(run_sub, test_sub, build_sub, snapshot_sub, compile_sub)
|
||||
# --favor-state
|
||||
_add_favor_state_argument(run_sub, test_sub, build_sub, snapshot_sub)
|
||||
_add_defer_arguments(run_sub, test_sub, build_sub, snapshot_sub, compile_sub)
|
||||
# --full-refresh
|
||||
_add_table_mutability_arguments(run_sub, compile_sub, build_sub)
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ class NodeType(StrEnum):
|
||||
Macro = "macro"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Entity = "entity"
|
||||
|
||||
@classmethod
|
||||
def executable(cls) -> List["NodeType"]:
|
||||
@@ -53,14 +52,11 @@ class NodeType(StrEnum):
|
||||
cls.Analysis,
|
||||
cls.Exposure,
|
||||
cls.Metric,
|
||||
cls.Entity,
|
||||
]
|
||||
|
||||
def pluralize(self) -> str:
|
||||
if self is self.Analysis:
|
||||
return "analyses"
|
||||
if self is self.Entity:
|
||||
return "entities"
|
||||
return f"{self}s"
|
||||
|
||||
|
||||
|
||||
@@ -56,7 +56,6 @@ from dbt.contracts.graph.nodes import (
|
||||
ColumnInfo,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
SeedNode,
|
||||
ManifestNode,
|
||||
ResultNode,
|
||||
@@ -341,7 +340,7 @@ class ManifestLoader:
|
||||
project, project_parser_files[project.project_name], parser_types
|
||||
)
|
||||
|
||||
# Now that we've loaded most of the nodes (except for schema tests, sources, metrics, entities)
|
||||
# Now that we've loaded most of the nodes (except for schema tests, sources, metrics)
|
||||
# load up the Lookup objects to resolve them by name, so the SourceFiles store
|
||||
# the unique_id instead of the name. Sources are loaded from yaml files, so
|
||||
# aren't in place yet
|
||||
@@ -377,7 +376,7 @@ class ManifestLoader:
|
||||
# copy the selectors from the root_project to the manifest
|
||||
self.manifest.selectors = self.root_project.manifest_selectors
|
||||
|
||||
# update the refs, sources, docs, entities and metrics depends_on.nodes
|
||||
# update the refs, sources, docs and metrics depends_on.nodes
|
||||
# These check the created_at time on the nodes to
|
||||
# determine whether they need processing.
|
||||
start_process = time.perf_counter()
|
||||
@@ -385,7 +384,6 @@ class ManifestLoader:
|
||||
self.process_refs(self.root_project.project_name)
|
||||
self.process_docs(self.root_project)
|
||||
self.process_metrics(self.root_project)
|
||||
self.process_entities(self.root_project)
|
||||
|
||||
# update tracking data
|
||||
self._perf_info.process_manifest_elapsed = time.perf_counter() - start_process
|
||||
@@ -840,10 +838,6 @@ class ManifestLoader:
|
||||
if metric.created_at < self.started_at:
|
||||
continue
|
||||
_process_refs_for_metric(self.manifest, current_project, metric)
|
||||
for entity in self.manifest.entities.values():
|
||||
if entity.created_at < self.started_at:
|
||||
continue
|
||||
_process_refs_for_entity(self.manifest, current_project, entity)
|
||||
|
||||
# Takes references in 'metrics' array of nodes and exposures, finds the target
|
||||
# node, and updates 'depends_on.nodes' with the unique id
|
||||
@@ -864,23 +858,6 @@ class ManifestLoader:
|
||||
continue
|
||||
_process_metrics_for_node(self.manifest, current_project, exposure)
|
||||
|
||||
# Takes references in 'entities' array of nodes and exposures, finds the target
|
||||
# node, and updates 'depends_on.nodes' with the unique id
|
||||
def process_entities(self, config: RuntimeConfig):
|
||||
current_project = config.project_name
|
||||
for node in self.manifest.nodes.values():
|
||||
if node.created_at < self.started_at:
|
||||
continue
|
||||
_process_entities_for_node(self.manifest, current_project, node)
|
||||
for entity in self.manifest.entities.values():
|
||||
if entity.created_at < self.started_at:
|
||||
continue
|
||||
_process_entities_for_node(self.manifest, current_project, entity)
|
||||
for exposure in self.manifest.exposures.values():
|
||||
if exposure.created_at < self.started_at:
|
||||
continue
|
||||
_process_entities_for_node(self.manifest, current_project, exposure)
|
||||
|
||||
# nodes: node and column descriptions
|
||||
# sources: source and table descriptions, column descriptions
|
||||
# macros: macro argument descriptions
|
||||
@@ -936,16 +913,6 @@ class ManifestLoader:
|
||||
config.project_name,
|
||||
)
|
||||
_process_docs_for_metrics(ctx, metric)
|
||||
for entity in self.manifest.entities.values():
|
||||
if entity.created_at < self.started_at:
|
||||
continue
|
||||
ctx = generate_runtime_docs_context(
|
||||
config,
|
||||
entity,
|
||||
self.manifest,
|
||||
config.project_name,
|
||||
)
|
||||
_process_docs_for_entities(ctx, entity)
|
||||
|
||||
# Loops through all nodes and exposures, for each element in
|
||||
# 'sources' array finds the source node and updates the
|
||||
@@ -1136,10 +1103,6 @@ def _process_docs_for_metrics(context: Dict[str, Any], metric: Metric) -> None:
|
||||
metric.description = get_rendered(metric.description, context)
|
||||
|
||||
|
||||
def _process_docs_for_entities(context: Dict[str, Any], entity: Entity) -> None:
|
||||
entity.description = get_rendered(entity.description, context)
|
||||
|
||||
|
||||
def _process_refs_for_exposure(manifest: Manifest, current_project: str, exposure: Exposure):
|
||||
"""Given a manifest and exposure in that manifest, process its refs"""
|
||||
for ref in exposure.refs:
|
||||
@@ -1227,48 +1190,6 @@ def _process_refs_for_metric(manifest: Manifest, current_project: str, metric: M
|
||||
manifest.update_metric(metric)
|
||||
|
||||
|
||||
def _process_refs_for_entity(manifest: Manifest, current_project: str, entity: Entity):
|
||||
"""Given a manifest and an entity in that manifest, process its refs"""
|
||||
for ref in entity.refs:
|
||||
target_model: Optional[Union[Disabled, ManifestNode]] = None
|
||||
target_model_name: str
|
||||
target_model_package: Optional[str] = None
|
||||
|
||||
if len(ref) == 1:
|
||||
target_model_name = ref[0]
|
||||
elif len(ref) == 2:
|
||||
target_model_package, target_model_name = ref
|
||||
else:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Refs should always be 1 or 2 arguments - got {len(ref)}"
|
||||
)
|
||||
|
||||
target_model = manifest.resolve_ref(
|
||||
target_model_name,
|
||||
target_model_package,
|
||||
current_project,
|
||||
entity.package_name,
|
||||
)
|
||||
|
||||
if target_model is None or isinstance(target_model, Disabled):
|
||||
# This may raise. Even if it doesn't, we don't want to add
|
||||
# this entity to the graph b/c there is no destination entity
|
||||
entity.config.enabled = False
|
||||
invalid_target_fail_unless_test(
|
||||
node=entity,
|
||||
target_name=target_model_name,
|
||||
target_kind="node",
|
||||
target_package=target_model_package,
|
||||
disabled=(isinstance(target_model, Disabled)),
|
||||
)
|
||||
continue
|
||||
|
||||
target_model_id = target_model.unique_id
|
||||
|
||||
entity.depends_on.nodes.append(target_model_id)
|
||||
manifest.update_entity(entity)
|
||||
|
||||
|
||||
def _process_metrics_for_node(
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
@@ -1318,55 +1239,6 @@ def _process_metrics_for_node(
|
||||
node.depends_on.nodes.append(target_metric_id)
|
||||
|
||||
|
||||
def _process_entities_for_node(
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
node: Union[ManifestNode, Entity, Exposure],
|
||||
):
|
||||
"""Given a manifest and a node in that manifest, process its entities"""
|
||||
|
||||
if isinstance(node, SeedNode):
|
||||
return
|
||||
|
||||
for entity in node.entities:
|
||||
target_entity: Optional[Union[Disabled, Entity]] = None
|
||||
target_entity_name: str
|
||||
target_entity_package: Optional[str] = None
|
||||
|
||||
if len(entity) == 1:
|
||||
target_entity_name = entity[0]
|
||||
elif len(entity) == 2:
|
||||
target_entity_package, target_entity_name = entity
|
||||
else:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Entity references should always be 1 or 2 arguments - got {len(entity)}"
|
||||
)
|
||||
|
||||
target_entity = manifest.resolve_entity(
|
||||
target_entity_name,
|
||||
target_entity_package,
|
||||
current_project,
|
||||
node.package_name,
|
||||
)
|
||||
|
||||
if target_entity is None or isinstance(target_entity, Disabled):
|
||||
# This may raise. Even if it doesn't, we don't want to add
|
||||
# this node to the graph b/c there is no destination node
|
||||
node.config.enabled = False
|
||||
invalid_target_fail_unless_test(
|
||||
node=node,
|
||||
target_name=target_entity_name,
|
||||
target_kind="source",
|
||||
target_package=target_entity_package,
|
||||
disabled=(isinstance(target_entity, Disabled)),
|
||||
)
|
||||
continue
|
||||
|
||||
target_entity_id = target_entity.unique_id
|
||||
|
||||
node.depends_on.nodes.append(target_entity_id)
|
||||
|
||||
|
||||
def _process_refs_for_node(manifest: Manifest, current_project: str, node: ManifestNode):
|
||||
"""Given a manifest and a node in that manifest, process its refs"""
|
||||
|
||||
@@ -1441,7 +1313,6 @@ def _process_sources_for_exposure(manifest: Manifest, current_project: str, expo
|
||||
manifest.update_exposure(exposure)
|
||||
|
||||
|
||||
# TODO: Remove this code because metrics can't be based on sources
|
||||
def _process_sources_for_metric(manifest: Manifest, current_project: str, metric: Metric):
|
||||
target_source: Optional[Union[Disabled, SourceDefinition]] = None
|
||||
for source_name, table_name in metric.sources:
|
||||
|
||||
@@ -242,7 +242,7 @@ class PartialParsing:
|
||||
self.remove_source_override_target(source)
|
||||
|
||||
def delete_disabled(self, unique_id, file_id):
|
||||
# This node/metric/entity/exposure is disabled. Find it and remove it from disabled dictionary.
|
||||
# This node/metric/exposure is disabled. Find it and remove it from disabled dictionary.
|
||||
for dis_index, dis_node in enumerate(self.saved_manifest.disabled[unique_id]):
|
||||
if dis_node.file_id == file_id:
|
||||
node = dis_node
|
||||
@@ -441,18 +441,6 @@ class PartialParsing:
|
||||
if metric_element:
|
||||
self.delete_schema_metric(schema_file, metric_element)
|
||||
self.merge_patch(schema_file, "metrics", metric_element)
|
||||
elif unique_id in self.saved_manifest.entities:
|
||||
entity = self.saved_manifest.entities[unique_id]
|
||||
file_id = entity.file_id
|
||||
if file_id in self.saved_files and file_id not in self.file_diff["deleted"]:
|
||||
schema_file = self.saved_files[file_id]
|
||||
entities = []
|
||||
if "entities" in schema_file.dict_from_yaml:
|
||||
entities = schema_file.dict_from_yaml["entities"]
|
||||
entity_element = self.get_schema_element(entities, entity.name)
|
||||
if entity_element:
|
||||
self.delete_schema_entity(schema_file, entity_element)
|
||||
self.merge_patch(schema_file, "entities", entity_element)
|
||||
elif unique_id in self.saved_manifest.macros:
|
||||
macro = self.saved_manifest.macros[unique_id]
|
||||
file_id = macro.file_id
|
||||
@@ -758,29 +746,6 @@ class PartialParsing:
|
||||
self.delete_schema_metric(schema_file, elem)
|
||||
self.merge_patch(schema_file, dict_key, elem)
|
||||
|
||||
# entities
|
||||
dict_key = "entities"
|
||||
entity_diff = self.get_diff_for("entities", saved_yaml_dict, new_yaml_dict)
|
||||
if entity_diff["changed"]:
|
||||
for entity in entity_diff["changed"]:
|
||||
self.delete_schema_entity(schema_file, entity)
|
||||
self.merge_patch(schema_file, dict_key, entity)
|
||||
if entity_diff["deleted"]:
|
||||
for entity in entity_diff["deleted"]:
|
||||
self.delete_schema_entity(schema_file, entity)
|
||||
if entity_diff["added"]:
|
||||
for entity in entity_diff["added"]:
|
||||
self.merge_patch(schema_file, dict_key, entity)
|
||||
# Handle schema file updates due to env_var changes
|
||||
if dict_key in env_var_changes and dict_key in new_yaml_dict:
|
||||
for name in env_var_changes[dict_key]:
|
||||
if name in entity_diff["changed_or_deleted_names"]:
|
||||
continue
|
||||
elem = self.get_schema_element(new_yaml_dict[dict_key], name)
|
||||
if elem:
|
||||
self.delete_schema_entity(schema_file, elem)
|
||||
self.merge_patch(schema_file, dict_key, elem)
|
||||
|
||||
# Take a "section" of the schema file yaml dictionary from saved and new schema files
|
||||
# and determine which parts have changed
|
||||
def get_diff_for(self, key, saved_yaml_dict, new_yaml_dict):
|
||||
@@ -956,24 +921,6 @@ class PartialParsing:
|
||||
elif unique_id in self.saved_manifest.disabled:
|
||||
self.delete_disabled(unique_id, schema_file.file_id)
|
||||
|
||||
# entities are created only from schema files, but also can be referred to by other nodes
|
||||
def delete_schema_entity(self, schema_file, entity_dict):
|
||||
entity_name = entity_dict["name"]
|
||||
entities = schema_file.entities.copy()
|
||||
for unique_id in entities:
|
||||
if unique_id in self.saved_manifest.entities:
|
||||
entity = self.saved_manifest.entities[unique_id]
|
||||
if entity.name == entity_name:
|
||||
# Need to find everything that referenced this entity and schedule for parsing
|
||||
if unique_id in self.saved_manifest.child_map:
|
||||
self.schedule_nodes_for_parsing(self.saved_manifest.child_map[unique_id])
|
||||
self.deleted_manifest.entities[unique_id] = self.saved_manifest.entities.pop(
|
||||
unique_id
|
||||
)
|
||||
schema_file.entities.remove(unique_id)
|
||||
elif unique_id in self.saved_manifest.disabled:
|
||||
self.delete_disabled(unique_id, schema_file.file_id)
|
||||
|
||||
def get_schema_element(self, elem_list, elem_name):
|
||||
for element in elem_list:
|
||||
if "name" in element and element["name"] == elem_name:
|
||||
|
||||
@@ -22,12 +22,11 @@ from dbt.context.configured import generate_schema_yml_context, SchemaYamlVars
|
||||
from dbt.context.providers import (
|
||||
generate_parse_exposure,
|
||||
generate_parse_metrics,
|
||||
generate_parse_entities,
|
||||
generate_test_context,
|
||||
)
|
||||
from dbt.context.macro_resolver import MacroResolver
|
||||
from dbt.contracts.files import FileHash, SchemaSourceFile
|
||||
from dbt.contracts.graph.model_config import MetricConfig, ExposureConfig, EntityConfig
|
||||
from dbt.contracts.graph.model_config import MetricConfig, ExposureConfig
|
||||
from dbt.contracts.graph.nodes import (
|
||||
ParsedNodePatch,
|
||||
ColumnInfo,
|
||||
@@ -36,7 +35,6 @@ from dbt.contracts.graph.nodes import (
|
||||
UnpatchedSourceDefinition,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
HasColumnDocs,
|
||||
@@ -49,7 +47,6 @@ from dbt.contracts.graph.unparsed import (
|
||||
UnparsedNodeUpdate,
|
||||
UnparsedExposure,
|
||||
UnparsedMetric,
|
||||
UnparsedEntity,
|
||||
UnparsedSourceDefinition,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
@@ -97,7 +94,6 @@ schema_file_keys = (
|
||||
"analyses",
|
||||
"exposures",
|
||||
"metrics",
|
||||
"entities",
|
||||
)
|
||||
|
||||
|
||||
@@ -118,7 +114,6 @@ class ParserRef:
|
||||
def __init__(self):
|
||||
self.column_info: Dict[str, ColumnInfo] = {}
|
||||
|
||||
# TODO: Mimic this for dimension information at the entity level
|
||||
def add(
|
||||
self,
|
||||
column: Union[HasDocs, UnparsedColumn],
|
||||
@@ -541,11 +536,6 @@ class SchemaParser(SimpleParser[GenericTestBlock, GenericTestNode]):
|
||||
metric_parser = MetricParser(self, yaml_block)
|
||||
metric_parser.parse()
|
||||
|
||||
# parse entities
|
||||
if "entities" in dct:
|
||||
entity_parser = EntityParser(self, yaml_block)
|
||||
entity_parser.parse()
|
||||
|
||||
|
||||
def check_format_version(file_path, yaml_dct) -> None:
|
||||
if "version" not in yaml_dct:
|
||||
@@ -1193,107 +1183,3 @@ class MetricParser(YamlReader):
|
||||
except (ValidationError, JSONValidationError) as exc:
|
||||
raise YamlParseDictError(self.yaml.path, self.key, data, exc)
|
||||
self.parse_metric(unparsed)
|
||||
|
||||
|
||||
class EntityParser(YamlReader):
|
||||
def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock):
|
||||
super().__init__(schema_parser, yaml, NodeType.Entity.pluralize())
|
||||
self.schema_parser = schema_parser
|
||||
self.yaml = yaml
|
||||
|
||||
def parse_entity(self, unparsed: UnparsedEntity):
|
||||
package_name = self.project.project_name
|
||||
unique_id = f"{NodeType.Entity}.{package_name}.{unparsed.name}"
|
||||
path = self.yaml.path.relative_path
|
||||
|
||||
fqn = self.schema_parser.get_fqn_prefix(path)
|
||||
fqn.append(unparsed.name)
|
||||
|
||||
config = self._generate_entity_config(
|
||||
target=unparsed,
|
||||
fqn=fqn,
|
||||
package_name=package_name,
|
||||
rendered=True,
|
||||
)
|
||||
|
||||
config = config.finalize_and_validate()
|
||||
|
||||
unrendered_config = self._generate_entity_config(
|
||||
target=unparsed,
|
||||
fqn=fqn,
|
||||
package_name=package_name,
|
||||
rendered=False,
|
||||
)
|
||||
|
||||
if not isinstance(config, EntityConfig):
|
||||
raise DbtInternalError(
|
||||
f"Calculated a {type(config)} for an entity, but expected a EntityConfig"
|
||||
)
|
||||
|
||||
parsed = Entity(
|
||||
resource_type=NodeType.Entity,
|
||||
package_name=package_name,
|
||||
path=path,
|
||||
original_file_path=self.yaml.path.original_file_path,
|
||||
unique_id=unique_id,
|
||||
fqn=fqn,
|
||||
model=unparsed.model,
|
||||
name=unparsed.name,
|
||||
description=unparsed.description,
|
||||
dimensions=unparsed.dimensions,
|
||||
meta=unparsed.meta,
|
||||
tags=unparsed.tags,
|
||||
config=config,
|
||||
unrendered_config=unrendered_config,
|
||||
)
|
||||
|
||||
ctx = generate_parse_entities(
|
||||
parsed,
|
||||
self.root_project,
|
||||
self.schema_parser.manifest,
|
||||
package_name,
|
||||
)
|
||||
|
||||
if parsed.model is not None:
|
||||
model_ref = "{{ " + parsed.model + " }}"
|
||||
get_rendered(model_ref, ctx, parsed)
|
||||
|
||||
# if the metric is disabled we do not want it included in the manifest, only in the disabled dict
|
||||
if parsed.config.enabled:
|
||||
# self.manifest.add_metric(self.yaml.file, parsed)
|
||||
self.manifest.add_entity(self.yaml.file, parsed)
|
||||
else:
|
||||
self.manifest.add_disabled(self.yaml.file, parsed)
|
||||
|
||||
def _generate_entity_config(
|
||||
self, target: UnparsedEntity, fqn: List[str], package_name: str, rendered: bool
|
||||
):
|
||||
generator: BaseContextConfigGenerator
|
||||
if rendered:
|
||||
generator = ContextConfigGenerator(self.root_project)
|
||||
else:
|
||||
generator = UnrenderedConfigGenerator(self.root_project)
|
||||
|
||||
# configs with precendence set
|
||||
precedence_configs = dict()
|
||||
# first apply metric configs
|
||||
precedence_configs.update(target.config)
|
||||
|
||||
return generator.calculate_node_config(
|
||||
config_call_dict={},
|
||||
fqn=fqn,
|
||||
resource_type=NodeType.Entity,
|
||||
project_name=package_name,
|
||||
base=False,
|
||||
patch_config_dict=precedence_configs,
|
||||
)
|
||||
|
||||
def parse(self):
|
||||
for data in self.get_key_dicts():
|
||||
try:
|
||||
UnparsedEntity.validate(data)
|
||||
unparsed = UnparsedEntity.from_dict(data)
|
||||
|
||||
except (ValidationError, JSONValidationError) as exc:
|
||||
raise YamlParseDictError(self.yaml.path, self.key, data, exc)
|
||||
self.parse_entity(unparsed)
|
||||
|
||||
@@ -83,6 +83,7 @@ class CompileTask(GraphRunnableTask):
|
||||
adapter=adapter,
|
||||
other=deferred_manifest,
|
||||
selected=selected_uids,
|
||||
favor_state=bool(self.args.favor_state),
|
||||
)
|
||||
# TODO: is it wrong to write the manifest here? I think it's right...
|
||||
self.write_manifest()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
|
||||
from dbt.contracts.graph.nodes import Exposure, SourceDefinition, Metric, Entity
|
||||
from dbt.contracts.graph.nodes import Exposure, SourceDefinition, Metric
|
||||
from dbt.graph import ResourceTypeSelector
|
||||
from dbt.task.runnable import GraphRunnableTask, ManifestTask
|
||||
from dbt.task.test import TestSelector
|
||||
@@ -22,7 +22,6 @@ class ListTask(GraphRunnableTask):
|
||||
NodeType.Source,
|
||||
NodeType.Exposure,
|
||||
NodeType.Metric,
|
||||
NodeType.Entity,
|
||||
)
|
||||
)
|
||||
ALL_RESOURCE_VALUES = DEFAULT_RESOURCE_VALUES | frozenset((NodeType.Analysis,))
|
||||
@@ -83,8 +82,6 @@ class ListTask(GraphRunnableTask):
|
||||
yield self.manifest.exposures[node]
|
||||
elif node in self.manifest.metrics:
|
||||
yield self.manifest.metrics[node]
|
||||
elif node in self.manifest.entities:
|
||||
yield self.manifest.entities[node]
|
||||
else:
|
||||
raise DbtRuntimeError(
|
||||
f'Got an unexpected result from node selection: "{node}"'
|
||||
@@ -108,11 +105,6 @@ class ListTask(GraphRunnableTask):
|
||||
# metrics are searched for by pkg.metric_name
|
||||
metric_selector = ".".join([node.package_name, node.name])
|
||||
yield f"metric:{metric_selector}"
|
||||
elif node.resource_type == NodeType.Entity:
|
||||
assert isinstance(node, Entity)
|
||||
# entities are searched for by pkg.entity_name
|
||||
entity_selector = ".".join([node.package_name, node.name])
|
||||
yield f"entity:{entity_selector}"
|
||||
else:
|
||||
# everything else is from `fqn`
|
||||
yield ".".join(node.fqn)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
import psycopg2
|
||||
from psycopg2.extensions import string_types
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import Credentials
|
||||
@@ -190,3 +191,11 @@ class PostgresConnectionManager(SQLConnectionManager):
|
||||
status_messsage_strings = [part for part in status_message_parts if not part.isdigit()]
|
||||
code = " ".join(status_messsage_strings)
|
||||
return AdapterResponse(_message=message, code=code, rows_affected=rows)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: int) -> str:
|
||||
return string_types[type_code].name
|
||||
|
||||
# For dbt-snowflake
|
||||
# from snowflake.connector.constants import FIELD_ID_TO_NAME
|
||||
# return FIELD_ID_TO_NAME[type_code]
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "CatalogArtifact(metadata: dbt.contracts.results.CatalogMetadata, nodes: Dict[str, dbt.contracts.results.CatalogTable], sources: Dict[str, dbt.contracts.results.CatalogTable], errors: Optional[List[str]] = None, _compile_results: Optional[Any] = None)",
|
||||
"description": "CatalogArtifact(metadata: dbt.contracts.results.CatalogMetadata, nodes: Dict[str, dbt.contracts.results.CatalogTable], sources: Dict[str, dbt.contracts.results.CatalogTable], errors: Union[List[str], NoneType] = None, _compile_results: Union[Any, NoneType] = None)",
|
||||
"definitions": {
|
||||
"CatalogMetadata": {
|
||||
"type": "object",
|
||||
@@ -48,12 +48,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.789289Z"
|
||||
"default": "2022-04-15T20:38:22.701177Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -64,7 +64,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -75,7 +75,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "CatalogMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "CatalogMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"CatalogTable": {
|
||||
"type": "object",
|
||||
@@ -112,7 +112,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "CatalogTable(metadata: dbt.contracts.results.TableMetadata, columns: Dict[str, dbt.contracts.results.ColumnMetadata], stats: Dict[str, dbt.contracts.results.StatsItem], unique_id: Optional[str] = None)"
|
||||
"description": "CatalogTable(metadata: dbt.contracts.results.TableMetadata, columns: Dict[str, dbt.contracts.results.ColumnMetadata], stats: Dict[str, dbt.contracts.results.StatsItem], unique_id: Union[str, NoneType] = None)"
|
||||
},
|
||||
"TableMetadata": {
|
||||
"type": "object",
|
||||
@@ -163,7 +163,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "TableMetadata(type: str, schema: str, name: str, database: Optional[str] = None, comment: Optional[str] = None, owner: Optional[str] = None)"
|
||||
"description": "TableMetadata(type: str, schema: str, name: str, database: Union[str, NoneType] = None, comment: Union[str, NoneType] = None, owner: Union[str, NoneType] = None)"
|
||||
},
|
||||
"ColumnMetadata": {
|
||||
"type": "object",
|
||||
@@ -194,7 +194,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "ColumnMetadata(type: str, index: int, name: str, comment: Optional[str] = None)"
|
||||
"description": "ColumnMetadata(type: str, index: int, name: str, comment: Union[str, NoneType] = None)"
|
||||
},
|
||||
"StatsItem": {
|
||||
"type": "object",
|
||||
@@ -241,7 +241,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "StatsItem(id: str, label: str, value: Union[bool, str, float, NoneType], include: bool, description: Optional[str] = None)"
|
||||
"description": "StatsItem(id: str, label: str, value: Union[bool, str, float, NoneType], include: bool, description: Union[str, NoneType] = None)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
5984
schemas/dbt/manifest/v5.json
Normal file
5984
schemas/dbt/manifest/v5.json
Normal file
File diff suppressed because it is too large
Load Diff
6209
schemas/dbt/manifest/v6.json
Normal file
6209
schemas/dbt/manifest/v6.json
Normal file
File diff suppressed because it is too large
Load Diff
6575
schemas/dbt/manifest/v7.json
Normal file
6575
schemas/dbt/manifest/v7.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,6 @@
|
||||
"docs",
|
||||
"exposures",
|
||||
"metrics",
|
||||
"entities",
|
||||
"selectors"
|
||||
],
|
||||
"properties": {
|
||||
@@ -86,13 +85,6 @@
|
||||
},
|
||||
"description": "The metrics defined in the dbt project and its dependencies"
|
||||
},
|
||||
"entities": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Entity"
|
||||
},
|
||||
"description": "The entities defined in the dbt project and its dependencies"
|
||||
},
|
||||
"selectors": {
|
||||
"type": "object",
|
||||
"description": "The selectors defined in selectors.yml"
|
||||
@@ -181,7 +173,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], entities: Mapping[str, dbt.contracts.graph.nodes.Entity], selectors: Mapping[str, Any], disabled: Optional[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition]]]], parent_map: Optional[Dict[str, List[str]]], child_map: Optional[Dict[str, List[str]]])",
|
||||
"description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], selectors: Mapping[str, Any], disabled: Optional[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition]]]], parent_map: Optional[Dict[str, List[str]]], child_map: Optional[Dict[str, List[str]]])",
|
||||
"definitions": {
|
||||
"ManifestMetadata": {
|
||||
"type": "object",
|
||||
@@ -193,12 +185,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.4.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.790304Z"
|
||||
"default": "2022-12-13T03:30:15.966964Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -209,7 +201,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "4f2b967b-7e02-46de-a7ea-268a05e3fab1"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -270,6 +262,7 @@
|
||||
"AnalysisNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -283,14 +276,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -414,7 +400,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.792257
|
||||
"default": 1670902215.970579
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -468,16 +454,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -522,7 +498,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "AnalysisNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "AnalysisNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"FileHash": {
|
||||
"type": "object",
|
||||
@@ -835,6 +811,7 @@
|
||||
"SingularTestNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -848,14 +825,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -971,7 +941,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.79368
|
||||
"default": 1670902215.973521
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1025,16 +995,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1079,7 +1039,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SingularTestNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "SingularTestNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"TestConfig": {
|
||||
"type": "object",
|
||||
@@ -1196,6 +1156,7 @@
|
||||
"HookNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -1209,14 +1170,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -1340,7 +1294,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.795094
|
||||
"default": 1670902215.975156
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1394,16 +1348,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1458,11 +1402,12 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "HookNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, index: Optional[int] = None)"
|
||||
"description": "HookNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, index: Optional[int] = None)"
|
||||
},
|
||||
"ModelNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -1476,14 +1421,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -1607,7 +1545,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.7959611
|
||||
"default": 1670902215.976732
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1661,16 +1599,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1715,11 +1643,12 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "ModelNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "ModelNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"RPCNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -1733,14 +1662,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -1864,7 +1786,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.796774
|
||||
"default": 1670902215.978195
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1918,16 +1840,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1972,11 +1884,12 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "RPCNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "RPCNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"SqlNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -1990,14 +1903,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -2121,7 +2027,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.797567
|
||||
"default": 1670902215.979718
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2175,16 +2081,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -2229,12 +2125,13 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SqlNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "SqlNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"GenericTestNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"test_metadata",
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -2251,14 +2148,7 @@
|
||||
"$ref": "#/definitions/TestMetadata"
|
||||
},
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -2374,7 +2264,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.79852
|
||||
"default": 1670902215.981434
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2428,16 +2318,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -2502,7 +2382,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "GenericTestNode(test_metadata: dbt.contracts.graph.nodes.TestMetadata, database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, column_name: Optional[str] = None, file_key_name: Optional[str] = None)"
|
||||
"description": "GenericTestNode(test_metadata: dbt.contracts.graph.nodes.TestMetadata, database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, column_name: Optional[str] = None, file_key_name: Optional[str] = None)"
|
||||
},
|
||||
"TestMetadata": {
|
||||
"type": "object",
|
||||
@@ -2534,6 +2414,7 @@
|
||||
"SnapshotNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -2548,14 +2429,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -2655,7 +2529,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.79998
|
||||
"default": 1670902215.984685
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2709,16 +2583,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -2763,7 +2627,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SnapshotNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "SnapshotNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"SnapshotConfig": {
|
||||
"type": "object",
|
||||
@@ -2973,6 +2837,7 @@
|
||||
"SeedNode": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -2986,14 +2851,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -3118,7 +2976,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.801306
|
||||
"default": 1670902215.987447
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -3150,7 +3008,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SeedNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', root_path: Optional[str] = None)"
|
||||
"description": "SeedNode(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', root_path: Optional[str] = None)"
|
||||
},
|
||||
"SeedConfig": {
|
||||
"type": "object",
|
||||
@@ -3320,6 +3178,7 @@
|
||||
"SourceDefinition": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"database",
|
||||
"schema",
|
||||
"name",
|
||||
"resource_type",
|
||||
@@ -3335,14 +3194,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "string"
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
@@ -3483,11 +3335,11 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.802621
|
||||
"default": 1670902215.989922
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SourceDefinition(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], source_name: str, source_description: str, loader: str, identifier: str, _event_status: Dict[str, Any] = <factory>, quoting: dbt.contracts.graph.unparsed.Quoting = <factory>, loaded_at_field: Optional[str] = None, freshness: Optional[dbt.contracts.graph.unparsed.FreshnessThreshold] = None, external: Optional[dbt.contracts.graph.unparsed.ExternalTable] = None, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, source_meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.SourceConfig = <factory>, patch_path: Optional[str] = None, unrendered_config: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, created_at: float = <factory>)"
|
||||
"description": "SourceDefinition(database: str, schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], source_name: str, source_description: str, loader: str, identifier: str, _event_status: Dict[str, Any] = <factory>, quoting: dbt.contracts.graph.unparsed.Quoting = <factory>, loaded_at_field: Optional[str] = None, freshness: Optional[dbt.contracts.graph.unparsed.FreshnessThreshold] = None, external: Optional[dbt.contracts.graph.unparsed.ExternalTable] = None, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, source_meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.SourceConfig = <factory>, patch_path: Optional[str] = None, unrendered_config: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, created_at: float = <factory>)"
|
||||
},
|
||||
"Quoting": {
|
||||
"type": "object",
|
||||
@@ -3593,12 +3445,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.4.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.787436Z"
|
||||
"default": "2022-12-13T03:30:15.961825Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -3609,7 +3461,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "4f2b967b-7e02-46de-a7ea-268a05e3fab1"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -3620,7 +3472,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.4.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"SourceFreshnessRuntimeError": {
|
||||
"type": "object",
|
||||
@@ -3962,7 +3814,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.8031092
|
||||
"default": 1670902215.990816
|
||||
},
|
||||
"supported_languages": {
|
||||
"oneOf": [
|
||||
@@ -4218,23 +4070,13 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.8040562
|
||||
"default": 1670902215.993354
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Exposure(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], type: dbt.contracts.graph.unparsed.ExposureType, owner: dbt.contracts.graph.unparsed.ExposureOwner, description: str = '', label: Optional[str] = None, maturity: Optional[dbt.contracts.graph.unparsed.MaturityType] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.ExposureConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, url: Optional[str] = None, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
"description": "Exposure(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], type: dbt.contracts.graph.unparsed.ExposureType, owner: dbt.contracts.graph.unparsed.ExposureOwner, description: str = '', label: Optional[str] = None, maturity: Optional[dbt.contracts.graph.unparsed.MaturityType] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.ExposureConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, url: Optional[str] = None, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
},
|
||||
"ExposureOwner": {
|
||||
"type": "object",
|
||||
@@ -4284,6 +4126,7 @@
|
||||
"description",
|
||||
"label",
|
||||
"calculation_method",
|
||||
"timestamp",
|
||||
"expression",
|
||||
"filters",
|
||||
"time_grains",
|
||||
@@ -4326,6 +4169,9 @@
|
||||
"calculation_method": {
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "string"
|
||||
},
|
||||
"expression": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -4347,16 +4193,6 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"window": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -4445,23 +4281,13 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.804972
|
||||
"default": 1670902215.995033
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Metric(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], description: str, label: str, calculation_method: str, expression: str, filters: List[dbt.contracts.graph.unparsed.MetricFilter], time_grains: List[str], dimensions: List[str], timestamp: Optional[str] = None, window: Optional[dbt.contracts.graph.unparsed.MetricTime] = None, model: Optional[str] = None, model_unique_id: Optional[str] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.MetricConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, sources: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
"description": "Metric(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], description: str, label: str, calculation_method: str, timestamp: str, expression: str, filters: List[dbt.contracts.graph.unparsed.MetricFilter], time_grains: List[str], dimensions: List[str], window: Optional[dbt.contracts.graph.unparsed.MetricTime] = None, model: Optional[str] = None, model_unique_id: Optional[str] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.MetricConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, sources: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
},
|
||||
"MetricFilter": {
|
||||
"type": "object",
|
||||
@@ -4529,148 +4355,6 @@
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"description": "MetricConfig(_extra: Dict[str, Any] = <factory>, enabled: bool = True)"
|
||||
},
|
||||
"Entity": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"resource_type",
|
||||
"package_name",
|
||||
"path",
|
||||
"original_file_path",
|
||||
"unique_id",
|
||||
"fqn",
|
||||
"model",
|
||||
"description",
|
||||
"dimensions"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"resource_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"entity"
|
||||
]
|
||||
},
|
||||
"package_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"original_file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"unique_id": {
|
||||
"type": "string"
|
||||
},
|
||||
"fqn": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"model": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"dimensions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"model_unique_id": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"meta": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"config": {
|
||||
"$ref": "#/definitions/EntityConfig",
|
||||
"default": {
|
||||
"enabled": true
|
||||
}
|
||||
},
|
||||
"unrendered_config": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
},
|
||||
"sources": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
"macros": [],
|
||||
"nodes": []
|
||||
}
|
||||
},
|
||||
"refs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.805523
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Entity(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], model: str, description: str, dimensions: List[str], model_unique_id: Optional[str] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.EntityConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, sources: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
},
|
||||
"EntityConfig": {
|
||||
"type": "object",
|
||||
"required": [],
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"description": "EntityConfig(_extra: Dict[str, Any] = <factory>, enabled: bool = True)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -37,12 +37,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.788708Z"
|
||||
"default": "2022-04-15T20:38:22.700175Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -53,7 +53,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -64,7 +64,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "BaseArtifactMetadata(dbt_schema_version: str, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "BaseArtifactMetadata(dbt_schema_version: str, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"RunResultOutput": {
|
||||
"type": "object",
|
||||
@@ -148,7 +148,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "RunResultOutput(status: Union[dbt.contracts.results.RunStatus, dbt.contracts.results.TestStatus, dbt.contracts.results.FreshnessStatus], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float, adapter_response: Dict[str, Any], message: Optional[str], failures: Optional[int], unique_id: str)"
|
||||
"description": "RunResultOutput(status: Union[dbt.contracts.results.RunStatus, dbt.contracts.results.TestStatus, dbt.contracts.results.FreshnessStatus], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float, adapter_response: Dict[str, Any], message: Union[str, NoneType], failures: Union[int, NoneType], unique_id: str)"
|
||||
},
|
||||
"TimingInfo": {
|
||||
"type": "object",
|
||||
@@ -183,7 +183,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "TimingInfo(name: str, started_at: Optional[datetime.datetime] = None, completed_at: Optional[datetime.datetime] = None)"
|
||||
"description": "TimingInfo(name: str, started_at: Union[datetime.datetime, NoneType] = None, completed_at: Union[datetime.datetime, NoneType] = None)"
|
||||
},
|
||||
"FreshnessMetadata": {
|
||||
"type": "object",
|
||||
@@ -195,12 +195,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.787436Z"
|
||||
"default": "2022-04-15T20:38:22.697740Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -211,7 +211,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -222,7 +222,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"SourceFreshnessRuntimeError": {
|
||||
"type": "object",
|
||||
@@ -361,7 +361,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessThreshold(warn_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, error_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, filter: Optional[str] = None)"
|
||||
"description": "FreshnessThreshold(warn_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, error_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, filter: Union[str, NoneType] = None)"
|
||||
},
|
||||
"Time": {
|
||||
"type": "object",
|
||||
@@ -394,7 +394,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Time(count: Optional[int] = None, period: Optional[dbt.contracts.graph.unparsed.TimePeriod] = None)"
|
||||
"description": "Time(count: Union[int, NoneType] = None, period: Union[dbt.contracts.graph.unparsed.TimePeriod, NoneType] = None)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -39,12 +39,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.787436Z"
|
||||
"default": "2022-04-15T20:38:22.697740Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -55,7 +55,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -66,7 +66,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"SourceFreshnessRuntimeError": {
|
||||
"type": "object",
|
||||
@@ -205,7 +205,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessThreshold(warn_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, error_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, filter: Optional[str] = None)"
|
||||
"description": "FreshnessThreshold(warn_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, error_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, filter: Union[str, NoneType] = None)"
|
||||
},
|
||||
"Time": {
|
||||
"type": "object",
|
||||
@@ -238,7 +238,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Time(count: Optional[int] = None, period: Optional[dbt.contracts.graph.unparsed.TimePeriod] = None)"
|
||||
"description": "Time(count: Union[int, NoneType] = None, period: Union[dbt.contracts.graph.unparsed.TimePeriod, NoneType] = None)"
|
||||
},
|
||||
"TimingInfo": {
|
||||
"type": "object",
|
||||
@@ -273,7 +273,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "TimingInfo(name: str, started_at: Optional[datetime.datetime] = None, completed_at: Optional[datetime.datetime] = None)"
|
||||
"description": "TimingInfo(name: str, started_at: Union[datetime.datetime, NoneType] = None, completed_at: Union[datetime.datetime, NoneType] = None)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -1,184 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
import dbt.exceptions
|
||||
|
||||
class TestGoodDocsBlocks(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return 'docs_blocks_035'
|
||||
|
||||
@staticmethod
|
||||
def dir(path):
|
||||
return os.path.normpath(path)
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir("models")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_valid_doc_ref(self):
|
||||
self.assertEqual(len(self.run_dbt()), 1)
|
||||
|
||||
self.assertTrue(os.path.exists('./target/manifest.json'))
|
||||
|
||||
with open('./target/manifest.json') as fp:
|
||||
manifest = json.load(fp)
|
||||
|
||||
model_data = manifest['nodes']['model.test.model']
|
||||
self.assertEqual(
|
||||
model_data['description'],
|
||||
'My model is just a copy of the seed'
|
||||
)
|
||||
self.assertEqual(
|
||||
{
|
||||
'name': 'id',
|
||||
'description': 'The user ID number',
|
||||
'data_type': None,
|
||||
'meta': {},
|
||||
'quote': None,
|
||||
'tags': [],
|
||||
},
|
||||
model_data['columns']['id']
|
||||
)
|
||||
self.assertEqual(
|
||||
{
|
||||
'name': 'first_name',
|
||||
'description': "The user's first name",
|
||||
'data_type': None,
|
||||
'meta': {},
|
||||
'quote': None,
|
||||
'tags': [],
|
||||
},
|
||||
model_data['columns']['first_name']
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
{
|
||||
'name': 'last_name',
|
||||
'description': "The user's last name",
|
||||
'data_type': None,
|
||||
'meta': {},
|
||||
'quote': None,
|
||||
'tags': [],
|
||||
},
|
||||
model_data['columns']['last_name']
|
||||
)
|
||||
self.assertEqual(len(model_data['columns']), 3)
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_alternative_docs_path(self):
|
||||
self.use_default_project({"docs-paths": [self.dir("docs")]})
|
||||
self.assertEqual(len(self.run_dbt()), 1)
|
||||
|
||||
self.assertTrue(os.path.exists('./target/manifest.json'))
|
||||
|
||||
with open('./target/manifest.json') as fp:
|
||||
manifest = json.load(fp)
|
||||
|
||||
model_data = manifest['nodes']['model.test.model']
|
||||
self.assertEqual(
|
||||
model_data['description'],
|
||||
'Alt text about the model'
|
||||
)
|
||||
self.assertEqual(
|
||||
{
|
||||
'name': 'id',
|
||||
'description': 'The user ID number with alternative text',
|
||||
'data_type': None,
|
||||
'meta': {},
|
||||
'quote': None,
|
||||
'tags': [],
|
||||
},
|
||||
model_data['columns']['id']
|
||||
)
|
||||
self.assertEqual(
|
||||
{
|
||||
'name': 'first_name',
|
||||
'description': "The user's first name",
|
||||
'data_type': None,
|
||||
'meta': {},
|
||||
'quote': None,
|
||||
'tags': [],
|
||||
},
|
||||
model_data['columns']['first_name']
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
{
|
||||
'name': 'last_name',
|
||||
'description': "The user's last name in this other file",
|
||||
'data_type': None,
|
||||
'meta': {},
|
||||
'quote': None,
|
||||
'tags': [],
|
||||
},
|
||||
model_data['columns']['last_name']
|
||||
)
|
||||
self.assertEqual(len(model_data['columns']), 3)
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_alternative_docs_path_missing(self):
|
||||
self.use_default_project({"docs-paths": [self.dir("not-docs")]})
|
||||
with self.assertRaises(dbt.exceptions.CompilationError):
|
||||
self.run_dbt()
|
||||
|
||||
|
||||
class TestMissingDocsBlocks(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return 'docs_blocks_035'
|
||||
|
||||
@staticmethod
|
||||
def dir(path):
|
||||
return os.path.normpath(path)
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir("missing_docs_models")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_missing_doc_ref(self):
|
||||
# The run should fail since we could not find the docs reference.
|
||||
with self.assertRaises(dbt.exceptions.CompilationError):
|
||||
self.run_dbt()
|
||||
|
||||
|
||||
class TestBadDocsBlocks(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return 'docs_blocks_035'
|
||||
|
||||
@staticmethod
|
||||
def dir(path):
|
||||
return os.path.normpath(path)
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir("invalid_name_models")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_invalid_doc_ref(self):
|
||||
# The run should fail since we could not find the docs reference.
|
||||
with self.assertRaises(dbt.exceptions.CompilationError):
|
||||
self.run_dbt(expect_pass=False)
|
||||
|
||||
class TestDuplicateDocsBlock(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return 'docs_blocks_035'
|
||||
|
||||
@staticmethod
|
||||
def dir(path):
|
||||
return os.path.normpath(path)
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir("duplicate_docs")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_duplicate_doc_ref(self):
|
||||
with self.assertRaises(dbt.exceptions.CompilationError):
|
||||
self.run_dbt(expect_pass=False)
|
||||
@@ -1,67 +0,0 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
from dbt.adapters.factory import FACTORY
|
||||
|
||||
class TestBaseCaching(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "caching_038"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'quoting': {
|
||||
'identifier': False,
|
||||
'schema': False,
|
||||
}
|
||||
}
|
||||
|
||||
def run_and_get_adapter(self):
|
||||
# we want to inspect the adapter that dbt used for the run, which is
|
||||
# not self.adapter. You can't do this until after you've run dbt once.
|
||||
self.run_dbt(['run'])
|
||||
return FACTORY.adapters[self.adapter_type]
|
||||
|
||||
def cache_run(self):
|
||||
adapter = self.run_and_get_adapter()
|
||||
self.assertEqual(len(adapter.cache.relations), 1)
|
||||
relation = next(iter(adapter.cache.relations.values()))
|
||||
self.assertEqual(relation.inner.schema, self.unique_schema())
|
||||
self.assertEqual(relation.schema, self.unique_schema().lower())
|
||||
|
||||
self.run_dbt(['run'])
|
||||
self.assertEqual(len(adapter.cache.relations), 1)
|
||||
second_relation = next(iter(adapter.cache.relations.values()))
|
||||
self.assertEqual(relation, second_relation)
|
||||
|
||||
class TestCachingLowercaseModel(TestBaseCaching):
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_cache(self):
|
||||
self.cache_run()
|
||||
|
||||
class TestCachingUppercaseModel(TestBaseCaching):
|
||||
@property
|
||||
def models(self):
|
||||
return "shouting_models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_cache(self):
|
||||
self.cache_run()
|
||||
|
||||
class TestCachingSelectedSchemaOnly(TestBaseCaching):
|
||||
@property
|
||||
def models(self):
|
||||
return "models_multi_schemas"
|
||||
|
||||
def run_and_get_adapter(self):
|
||||
# select only the 'model' in the default schema
|
||||
self.run_dbt(['--cache-selected-only', 'run', '--select', 'model'])
|
||||
return FACTORY.adapters[self.adapter_type]
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_cache(self):
|
||||
self.cache_run()
|
||||
@@ -1,2 +0,0 @@
|
||||
{{ config(materialized='ephemeral') }}
|
||||
select * from {{ ref('view_model') }}
|
||||
@@ -1,9 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: view_model
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- name: name
|
||||
@@ -1,5 +0,0 @@
|
||||
{{ config(materialized='table') }}
|
||||
select * from {{ ref('ephemeral_model') }}
|
||||
|
||||
-- establish a macro dependency to trigger state:modified.macros
|
||||
-- depends on: {{ my_macro() }}
|
||||
@@ -1 +0,0 @@
|
||||
select * from no.such.table
|
||||
@@ -1,2 +0,0 @@
|
||||
{{ config(materialized='ephemeral') }}
|
||||
select * from no.such.table
|
||||
@@ -1,9 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: view_model
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- name: name
|
||||
@@ -1,5 +0,0 @@
|
||||
{{ config(materialized='table') }}
|
||||
select * from {{ ref('ephemeral_model') }}
|
||||
|
||||
-- establish a macro dependency to trigger state:modified.macros
|
||||
-- depends on: {{ my_macro() }}
|
||||
@@ -1 +0,0 @@
|
||||
select * from no.such.table
|
||||
@@ -1,9 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: view_model
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- name: name
|
||||
@@ -1,2 +0,0 @@
|
||||
{{ config(materialized='table') }}
|
||||
select 1 as fun
|
||||
@@ -1 +0,0 @@
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,13 +0,0 @@
|
||||
{# trigger infinite recursion if not handled #}
|
||||
|
||||
{% macro my_infinitely_recursive_macro() %}
|
||||
{{ return(adapter.dispatch('my_infinitely_recursive_macro')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__my_infinitely_recursive_macro() %}
|
||||
{% if unmet_condition %}
|
||||
{{ my_infinitely_recursive_macro() }}
|
||||
{% else %}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@@ -1,3 +0,0 @@
|
||||
{% macro my_macro() %}
|
||||
{% do log('in a macro' ) %}
|
||||
{% endmacro %}
|
||||
@@ -1,2 +0,0 @@
|
||||
{{ config(materialized='ephemeral') }}
|
||||
select * from {{ ref('view_model') }}
|
||||
@@ -1,8 +0,0 @@
|
||||
version: 2
|
||||
exposures:
|
||||
- name: my_exposure
|
||||
type: application
|
||||
depends_on:
|
||||
- ref('view_model')
|
||||
owner:
|
||||
email: test@example.com
|
||||
@@ -1,10 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: view_model
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique:
|
||||
severity: error
|
||||
- not_null
|
||||
- name: name
|
||||
@@ -1,5 +0,0 @@
|
||||
{{ config(materialized='table') }}
|
||||
select * from {{ ref('ephemeral_model') }}
|
||||
|
||||
-- establish a macro dependency to trigger state:modified.macros
|
||||
-- depends on: {{ my_macro() }}
|
||||
@@ -1,4 +0,0 @@
|
||||
select * from {{ ref('seed') }}
|
||||
|
||||
-- establish a macro dependency that trips infinite recursion if not handled
|
||||
-- depends on: {{ my_infinitely_recursive_macro() }}
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"metadata": {
|
||||
"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v3.json",
|
||||
"dbt_version": "0.21.1"
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
id,name
|
||||
1,Alice
|
||||
2,Bob
|
||||
|
@@ -1,14 +0,0 @@
|
||||
{% snapshot my_cool_snapshot %}
|
||||
|
||||
{{
|
||||
config(
|
||||
target_database=database,
|
||||
target_schema=schema,
|
||||
unique_key='id',
|
||||
strategy='check',
|
||||
check_cols=['id'],
|
||||
)
|
||||
}}
|
||||
select * from {{ ref('view_model') }}
|
||||
|
||||
{% endsnapshot %}
|
||||
@@ -1,354 +0,0 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
import dbt.exceptions
|
||||
|
||||
|
||||
class TestDeferState(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "defer_state_062"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
def setUp(self):
|
||||
self.other_schema = None
|
||||
super().setUp()
|
||||
self._created_schemas.add(self.other_schema)
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'seeds': {
|
||||
'test': {
|
||||
'quote_columns': False,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def get_profile(self, adapter_type):
|
||||
if self.other_schema is None:
|
||||
self.other_schema = self.unique_schema() + '_other'
|
||||
profile = super().get_profile(adapter_type)
|
||||
default_name = profile['test']['target']
|
||||
profile['test']['outputs']['otherschema'] = copy.deepcopy(profile['test']['outputs'][default_name])
|
||||
profile['test']['outputs']['otherschema']['schema'] = self.other_schema
|
||||
return profile
|
||||
|
||||
def copy_state(self):
|
||||
assert not os.path.exists('state')
|
||||
os.makedirs('state')
|
||||
shutil.copyfile('target/manifest.json', 'state/manifest.json')
|
||||
|
||||
def run_and_compile_defer(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['test'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files
|
||||
self.copy_state()
|
||||
|
||||
# defer test, it succeeds
|
||||
results, success = self.run_dbt_and_check(['compile', '--state', 'state', '--defer'])
|
||||
self.assertEqual(len(results.results), 6)
|
||||
self.assertEqual(results.results[0].node.name, "seed")
|
||||
self.assertTrue(success)
|
||||
|
||||
def run_and_snapshot_defer(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['test'])
|
||||
assert len(results) == 2
|
||||
|
||||
# snapshot succeeds without --defer
|
||||
results = self.run_dbt(['snapshot'])
|
||||
|
||||
# no state, snapshot fails
|
||||
with pytest.raises(dbt.exceptions.DbtRuntimeError):
|
||||
results = self.run_dbt(['snapshot', '--state', 'state', '--defer'])
|
||||
|
||||
# copy files
|
||||
self.copy_state()
|
||||
|
||||
# defer test, it succeeds
|
||||
results = self.run_dbt(['snapshot', '--state', 'state', '--defer'])
|
||||
|
||||
# favor_state test, it succeeds
|
||||
results = self.run_dbt(['snapshot', '--state', 'state', '--defer', '--favor-state'])
|
||||
|
||||
def run_and_defer(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['test'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
|
||||
# test tests first, because run will change things
|
||||
# no state, wrong schema, failure.
|
||||
self.run_dbt(['test', '--target', 'otherschema'], expect_pass=False)
|
||||
|
||||
# test generate docs
|
||||
# no state, wrong schema, empty nodes
|
||||
catalog = self.run_dbt(['docs','generate','--target', 'otherschema'])
|
||||
assert not catalog.nodes
|
||||
|
||||
# no state, run also fails
|
||||
self.run_dbt(['run', '--target', 'otherschema'], expect_pass=False)
|
||||
|
||||
# defer test, it succeeds
|
||||
results = self.run_dbt(['test', '-m', 'view_model+', '--state', 'state', '--defer', '--target', 'otherschema'])
|
||||
|
||||
# defer docs generate with state, catalog refers schema from the happy times
|
||||
catalog = self.run_dbt(['docs','generate', '-m', 'view_model+', '--state', 'state', '--defer','--target', 'otherschema'])
|
||||
assert self.other_schema not in catalog.nodes["seed.test.seed"].metadata.schema
|
||||
assert self.unique_schema() in catalog.nodes["seed.test.seed"].metadata.schema
|
||||
|
||||
# with state it should work though
|
||||
results = self.run_dbt(['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'])
|
||||
assert self.other_schema not in results[0].node.compiled_code
|
||||
assert self.unique_schema() in results[0].node.compiled_code
|
||||
|
||||
with open('target/manifest.json') as fp:
|
||||
data = json.load(fp)
|
||||
assert data['nodes']['seed.test.seed']['deferred']
|
||||
|
||||
assert len(results) == 1
|
||||
|
||||
def run_and_defer_favor_state(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
assert not any(r.node.deferred for r in results)
|
||||
results = self.run_dbt(['test'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
|
||||
# test tests first, because run will change things
|
||||
# no state, wrong schema, failure.
|
||||
self.run_dbt(['test', '--target', 'otherschema'], expect_pass=False)
|
||||
|
||||
# no state, run also fails
|
||||
self.run_dbt(['run', '--target', 'otherschema'], expect_pass=False)
|
||||
|
||||
# defer test, it succeeds
|
||||
results = self.run_dbt(['test', '-m', 'view_model+', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'])
|
||||
|
||||
# with state it should work though
|
||||
results = self.run_dbt(['run', '-m', 'view_model', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'])
|
||||
assert self.other_schema not in results[0].node.compiled_code
|
||||
assert self.unique_schema() in results[0].node.compiled_code
|
||||
|
||||
with open('target/manifest.json') as fp:
|
||||
data = json.load(fp)
|
||||
assert data['nodes']['seed.test.seed']['deferred']
|
||||
|
||||
assert len(results) == 1
|
||||
|
||||
def run_switchdirs_defer(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
|
||||
self.use_default_project({'model-paths': ['changed_models']})
|
||||
# the sql here is just wrong, so it should fail
|
||||
self.run_dbt(
|
||||
['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'],
|
||||
expect_pass=False,
|
||||
)
|
||||
# but this should work since we just use the old happy model
|
||||
self.run_dbt(
|
||||
['run', '-m', 'table_model', '--state', 'state', '--defer', '--target', 'otherschema'],
|
||||
expect_pass=True,
|
||||
)
|
||||
|
||||
self.use_default_project({'model-paths': ['changed_models_bad']})
|
||||
# this should fail because the table model refs a broken ephemeral
|
||||
# model, which it should see
|
||||
self.run_dbt(
|
||||
['run', '-m', 'table_model', '--state', 'state', '--defer', '--target', 'otherschema'],
|
||||
expect_pass=False,
|
||||
)
|
||||
|
||||
def run_switchdirs_defer_favor_state(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
|
||||
self.use_default_project({'model-paths': ['changed_models']})
|
||||
# the sql here is just wrong, so it should fail
|
||||
self.run_dbt(
|
||||
['run', '-m', 'view_model', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'],
|
||||
expect_pass=False,
|
||||
)
|
||||
# but this should work since we just use the old happy model
|
||||
self.run_dbt(
|
||||
['run', '-m', 'table_model', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'],
|
||||
expect_pass=True,
|
||||
)
|
||||
|
||||
self.use_default_project({'model-paths': ['changed_models_bad']})
|
||||
# this should fail because the table model refs a broken ephemeral
|
||||
# model, which it should see
|
||||
self.run_dbt(
|
||||
['run', '-m', 'table_model', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'],
|
||||
expect_pass=False,
|
||||
)
|
||||
|
||||
def run_defer_iff_not_exists(self):
|
||||
results = self.run_dbt(['seed', '--target', 'otherschema'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run', '--target', 'otherschema'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run', '--state', 'state', '--defer'])
|
||||
assert len(results) == 2
|
||||
|
||||
# because the seed now exists in our schema, we shouldn't defer it
|
||||
assert self.other_schema not in results[0].node.compiled_code
|
||||
assert self.unique_schema() in results[0].node.compiled_code
|
||||
|
||||
def run_defer_iff_not_exists_favor_state(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'])
|
||||
assert len(results) == 2
|
||||
|
||||
# because the seed exists in other schema, we should defer it
|
||||
assert self.other_schema not in results[0].node.compiled_code
|
||||
assert self.unique_schema() in results[0].node.compiled_code
|
||||
|
||||
def run_defer_deleted_upstream(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
|
||||
self.use_default_project({'model-paths': ['changed_models_missing']})
|
||||
# ephemeral_model is now gone. previously this caused a
|
||||
# keyerror (dbt#2875), now it should pass
|
||||
self.run_dbt(
|
||||
['run', '-m', 'view_model', '--state', 'state', '--defer', '--target', 'otherschema'],
|
||||
expect_pass=True,
|
||||
)
|
||||
|
||||
# despite deferral, test should use models just created in our schema
|
||||
results = self.run_dbt(['test', '--state', 'state', '--defer'])
|
||||
assert self.other_schema not in results[0].node.compiled_code
|
||||
assert self.unique_schema() in results[0].node.compiled_code
|
||||
|
||||
def run_defer_deleted_upstream_favor_state(self):
|
||||
results = self.run_dbt(['seed'])
|
||||
assert len(results) == 1
|
||||
results = self.run_dbt(['run'])
|
||||
assert len(results) == 2
|
||||
|
||||
# copy files over from the happy times when we had a good target
|
||||
self.copy_state()
|
||||
|
||||
self.use_default_project({'model-paths': ['changed_models_missing']})
|
||||
|
||||
self.run_dbt(
|
||||
['run', '-m', 'view_model', '--state', 'state', '--defer', '--favor-state', '--target', 'otherschema'],
|
||||
expect_pass=True,
|
||||
)
|
||||
|
||||
# despite deferral, test should use models just created in our schema
|
||||
results = self.run_dbt(['test', '--state', 'state', '--defer', '--favor-state'])
|
||||
assert self.other_schema not in results[0].node.compiled_code
|
||||
assert self.unique_schema() in results[0].node.compiled_code
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_changetarget(self):
|
||||
self.run_and_defer()
|
||||
|
||||
# make sure these commands don't work with --defer
|
||||
with pytest.raises(SystemExit):
|
||||
self.run_dbt(['seed', '--defer'])
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_changetarget_favor_state(self):
|
||||
self.run_and_defer_favor_state()
|
||||
|
||||
# make sure these commands don't work with --defer
|
||||
with pytest.raises(SystemExit):
|
||||
self.run_dbt(['seed', '--defer'])
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_changedir(self):
|
||||
self.run_switchdirs_defer()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_changedir_favor_state(self):
|
||||
self.run_switchdirs_defer_favor_state()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_defer_iffnotexists(self):
|
||||
self.run_defer_iff_not_exists()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_defer_iffnotexists_favor_state(self):
|
||||
self.run_defer_iff_not_exists_favor_state()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_defer_deleted_upstream(self):
|
||||
self.run_defer_deleted_upstream()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_defer_deleted_upstream_favor_state(self):
|
||||
self.run_defer_deleted_upstream_favor_state()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_snapshot_defer(self):
|
||||
self.run_and_snapshot_defer()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_state_compile_defer(self):
|
||||
self.run_and_compile_defer()
|
||||
@@ -1,211 +0,0 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
|
||||
import pytest
|
||||
|
||||
from dbt.exceptions import CompilationError, IncompatibleSchemaError
|
||||
|
||||
|
||||
class TestModifiedState(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "modified_state_062"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'macro-paths': ['macros'],
|
||||
'seeds': {
|
||||
'test': {
|
||||
'quote_columns': True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _symlink_test_folders(self):
|
||||
# dbt's normal symlink behavior breaks this test. Copy the files
|
||||
# so we can freely modify them.
|
||||
for entry in os.listdir(self.test_original_source_path):
|
||||
src = os.path.join(self.test_original_source_path, entry)
|
||||
tst = os.path.join(self.test_root_dir, entry)
|
||||
if entry in {'models', 'seeds', 'macros', 'previous_state'}:
|
||||
shutil.copytree(src, tst)
|
||||
elif os.path.isdir(entry) or entry.endswith('.sql'):
|
||||
os.symlink(src, tst)
|
||||
|
||||
def copy_state(self):
|
||||
assert not os.path.exists('state')
|
||||
os.makedirs('state')
|
||||
shutil.copyfile('target/manifest.json', 'state/manifest.json')
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.run_dbt(['seed'])
|
||||
self.run_dbt(['run'])
|
||||
self.copy_state()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_changed_seed_contents_state(self):
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 0
|
||||
with open('seeds/seed.csv') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
with open('seeds/seed.csv', 'a') as fp:
|
||||
fp.write(f'3,carl{newline}')
|
||||
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'state:modified+', '--state', './state'])
|
||||
assert len(results) == 7
|
||||
assert set(results) == {'test.seed', 'test.table_model', 'test.view_model', 'test.ephemeral_model', 'test.not_null_view_model_id', 'test.unique_view_model_id', 'exposure:test.my_exposure'}
|
||||
|
||||
shutil.rmtree('./state')
|
||||
self.copy_state()
|
||||
|
||||
with open('seeds/seed.csv', 'a') as fp:
|
||||
# assume each line is ~2 bytes + len(name)
|
||||
target_size = 1*1024*1024
|
||||
line_size = 64
|
||||
|
||||
num_lines = target_size // line_size
|
||||
|
||||
maxlines = num_lines + 4
|
||||
|
||||
for idx in range(4, maxlines):
|
||||
value = ''.join(random.choices(string.ascii_letters, k=62))
|
||||
fp.write(f'{idx},{value}{newline}')
|
||||
|
||||
# now if we run again, we should get a warning
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
with pytest.raises(CompilationError) as exc:
|
||||
self.run_dbt(['--warn-error', 'ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'])
|
||||
assert '>1MB' in str(exc.value)
|
||||
|
||||
shutil.rmtree('./state')
|
||||
self.copy_state()
|
||||
|
||||
# once it's in path mode, we don't mark it as modified if it changes
|
||||
with open('seeds/seed.csv', 'a') as fp:
|
||||
fp.write(f'{random},test{newline}')
|
||||
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 0
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_changed_seed_config(self):
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 0
|
||||
|
||||
self.use_default_project({'seeds': {'test': {'quote_columns': False}}})
|
||||
|
||||
# quoting change -> seed changed
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_unrendered_config_same(self):
|
||||
results = self.run_dbt(['ls', '--resource-type', 'model', '--select', 'state:modified', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 0
|
||||
|
||||
# although this is the default value, dbt will recognize it as a change
|
||||
# for previously-unconfigured models, because it's been explicitly set
|
||||
self.use_default_project({'models': {'test': {'materialized': 'view'}}})
|
||||
results = self.run_dbt(['ls', '--resource-type', 'model', '--select', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.view_model'
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_changed_model_contents(self):
|
||||
results = self.run_dbt(['run', '--models', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
with open('models/table_model.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
with open('models/table_model.sql', 'w') as fp:
|
||||
fp.write("{{ config(materialized='table') }}")
|
||||
fp.write(newline)
|
||||
fp.write("select * from {{ ref('seed') }}")
|
||||
fp.write(newline)
|
||||
|
||||
results = self.run_dbt(['run', '--models', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'table_model'
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_new_macro(self):
|
||||
with open('macros/macros.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
new_macro = '{% macro my_other_macro() %}{% endmacro %}' + newline
|
||||
|
||||
# add a new macro to a new file
|
||||
with open('macros/second_macro.sql', 'w') as fp:
|
||||
fp.write(new_macro)
|
||||
|
||||
results, stdout = self.run_dbt_and_capture(['run', '--models', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
os.remove('macros/second_macro.sql')
|
||||
# add a new macro to the existing file
|
||||
with open('macros/macros.sql', 'a') as fp:
|
||||
fp.write(new_macro)
|
||||
|
||||
results, stdout = self.run_dbt_and_capture(['run', '--models', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_changed_macro_contents(self):
|
||||
with open('macros/macros.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
# modify an existing macro
|
||||
with open('macros/macros.sql', 'w') as fp:
|
||||
fp.write("{% macro my_macro() %}")
|
||||
fp.write(newline)
|
||||
fp.write(" {% do log('in a macro', info=True) %}")
|
||||
fp.write(newline)
|
||||
fp.write('{% endmacro %}')
|
||||
fp.write(newline)
|
||||
|
||||
# table_model calls this macro
|
||||
results, stdout = self.run_dbt_and_capture(['run', '--models', 'state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_changed_exposure(self):
|
||||
with open('models/exposures.yml', 'a') as fp:
|
||||
fp.write(' name: John Doe\n')
|
||||
|
||||
results, stdout = self.run_dbt_and_capture(['run', '--models', '+state:modified', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'view_model'
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_previous_version_manifest(self):
|
||||
# This tests that a different schema version in the file throws an error
|
||||
with self.assertRaises(IncompatibleSchemaError) as exc:
|
||||
results = self.run_dbt(['ls', '-s', 'state:modified', '--state', './previous_state'])
|
||||
self.assertEqual(exc.CODE, 10014)
|
||||
@@ -1,434 +0,0 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class TestRunResultsState(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "run_results_state_062"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'macro-paths': ['macros'],
|
||||
'seeds': {
|
||||
'test': {
|
||||
'quote_columns': True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _symlink_test_folders(self):
|
||||
# dbt's normal symlink behavior breaks this test. Copy the files
|
||||
# so we can freely modify them.
|
||||
for entry in os.listdir(self.test_original_source_path):
|
||||
src = os.path.join(self.test_original_source_path, entry)
|
||||
tst = os.path.join(self.test_root_dir, entry)
|
||||
if entry in {'models', 'seeds', 'macros'}:
|
||||
shutil.copytree(src, tst)
|
||||
elif os.path.isdir(entry) or entry.endswith('.sql'):
|
||||
os.symlink(src, tst)
|
||||
|
||||
def copy_state(self):
|
||||
assert not os.path.exists('state')
|
||||
os.makedirs('state')
|
||||
shutil.copyfile('target/manifest.json', 'state/manifest.json')
|
||||
shutil.copyfile('target/run_results.json', 'state/run_results.json')
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.run_dbt(['build'])
|
||||
self.copy_state()
|
||||
|
||||
def rebuild_run_dbt(self, expect_pass=True):
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['build'], expect_pass=expect_pass)
|
||||
self.copy_state()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_seed_run_results_state(self):
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['seed'])
|
||||
self.copy_state()
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'result:success', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:success', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:success+', '--state', './state'])
|
||||
assert len(results) == 7
|
||||
assert set(results) == {'test.seed', 'test.table_model', 'test.view_model', 'test.ephemeral_model', 'test.not_null_view_model_id', 'test.unique_view_model_id', 'exposure:test.my_exposure'}
|
||||
|
||||
with open('seeds/seed.csv') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
with open('seeds/seed.csv', 'a') as fp:
|
||||
fp.write(f'\"\'\'3,carl{newline}')
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['seed'], expect_pass=False)
|
||||
self.copy_state()
|
||||
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'result:error', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:error', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:error+', '--state', './state'])
|
||||
assert len(results) == 7
|
||||
assert set(results) == {'test.seed', 'test.table_model', 'test.view_model', 'test.ephemeral_model', 'test.not_null_view_model_id', 'test.unique_view_model_id', 'exposure:test.my_exposure'}
|
||||
|
||||
|
||||
with open('seeds/seed.csv') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
with open('seeds/seed.csv', 'a') as fp:
|
||||
# assume each line is ~2 bytes + len(name)
|
||||
target_size = 1*1024*1024
|
||||
line_size = 64
|
||||
|
||||
num_lines = target_size // line_size
|
||||
|
||||
maxlines = num_lines + 4
|
||||
|
||||
for idx in range(4, maxlines):
|
||||
value = ''.join(random.choices(string.ascii_letters, k=62))
|
||||
fp.write(f'{idx},{value}{newline}')
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['seed'], expect_pass=False)
|
||||
self.copy_state()
|
||||
|
||||
results = self.run_dbt(['ls', '--resource-type', 'seed', '--select', 'result:error', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:error', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.seed'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:error+', '--state', './state'])
|
||||
assert len(results) == 7
|
||||
assert set(results) == {'test.seed', 'test.table_model', 'test.view_model', 'test.ephemeral_model', 'test.not_null_view_model_id', 'test.unique_view_model_id', 'exposure:test.my_exposure'}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_build_run_results_state(self):
|
||||
results = self.run_dbt(['build', '--select', 'result:error', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
with open('models/view_model.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
with open('models/view_model.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_error")
|
||||
fp.write(newline)
|
||||
|
||||
self.rebuild_run_dbt(expect_pass=False)
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'result:error', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 3
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'view_model', 'not_null_view_model_id','unique_view_model_id'}
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:error', '--state', './state'])
|
||||
assert len(results) == 3
|
||||
assert set(results) == {'test.view_model', 'test.not_null_view_model_id', 'test.unique_view_model_id'}
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'result:error+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 4
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'table_model','view_model', 'not_null_view_model_id','unique_view_model_id'}
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:error+', '--state', './state'])
|
||||
assert len(results) == 6 # includes exposure
|
||||
assert set(results) == {'test.table_model', 'test.view_model', 'test.ephemeral_model', 'test.not_null_view_model_id', 'test.unique_view_model_id', 'exposure:test.my_exposure'}
|
||||
|
||||
# test failure on build tests
|
||||
# fail the unique test
|
||||
with open('models/view_model.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select 1 as id union all select 1 as id")
|
||||
fp.write(newline)
|
||||
|
||||
self.rebuild_run_dbt(expect_pass=False)
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'result:fail', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'unique_view_model_id'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:fail', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.unique_view_model_id'
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'result:fail+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 2
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'table_model', 'unique_view_model_id'}
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:fail+', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert set(results) == {'test.unique_view_model_id'}
|
||||
|
||||
# change the unique test severity from error to warn and reuse the same view_model.sql changes above
|
||||
f = open('models/schema.yml', 'r')
|
||||
filedata = f.read()
|
||||
f.close()
|
||||
newdata = filedata.replace('error','warn')
|
||||
f = open('models/schema.yml', 'w')
|
||||
f.write(newdata)
|
||||
f.close()
|
||||
|
||||
self.rebuild_run_dbt(expect_pass=True)
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'result:warn', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'unique_view_model_id'
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:warn', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert results[0] == 'test.unique_view_model_id'
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'result:warn+', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 2 # includes table_model to be run
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'table_model', 'unique_view_model_id'}
|
||||
|
||||
results = self.run_dbt(['ls', '--select', 'result:warn+', '--state', './state'])
|
||||
assert len(results) == 1
|
||||
assert set(results) == {'test.unique_view_model_id'}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_run_run_results_state(self):
|
||||
results = self.run_dbt(['run', '--select', 'result:success', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 2
|
||||
assert results[0].node.name == 'view_model'
|
||||
assert results[1].node.name == 'table_model'
|
||||
|
||||
# clear state and rerun upstream view model to test + operator
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['run', '--select', 'view_model'], expect_pass=True)
|
||||
self.copy_state()
|
||||
results = self.run_dbt(['run', '--select', 'result:success+', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 2
|
||||
assert results[0].node.name == 'view_model'
|
||||
assert results[1].node.name == 'table_model'
|
||||
|
||||
# check we are starting from a place with 0 errors
|
||||
results = self.run_dbt(['run', '--select', 'result:error', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
# force an error in the view model to test error and skipped states
|
||||
with open('models/view_model.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
with open('models/view_model.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_error")
|
||||
fp.write(newline)
|
||||
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['run'], expect_pass=False)
|
||||
self.copy_state()
|
||||
|
||||
# test single result selector on error
|
||||
results = self.run_dbt(['run', '--select', 'result:error', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'view_model'
|
||||
|
||||
# test + operator selection on error
|
||||
results = self.run_dbt(['run', '--select', 'result:error+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 2
|
||||
assert results[0].node.name == 'view_model'
|
||||
assert results[1].node.name == 'table_model'
|
||||
|
||||
# single result selector on skipped. Expect this to pass becase underlying view already defined above
|
||||
results = self.run_dbt(['run', '--select', 'result:skipped', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'table_model'
|
||||
|
||||
# add a downstream model that depends on table_model for skipped+ selector
|
||||
with open('models/table_model_downstream.sql', 'w') as fp:
|
||||
fp.write("select * from {{ref('table_model')}}")
|
||||
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['run'], expect_pass=False)
|
||||
self.copy_state()
|
||||
|
||||
results = self.run_dbt(['run', '--select', 'result:skipped+', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 2
|
||||
assert results[0].node.name == 'table_model'
|
||||
assert results[1].node.name == 'table_model_downstream'
|
||||
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_test_run_results_state(self):
|
||||
# run passed nodes
|
||||
results = self.run_dbt(['test', '--select', 'result:pass', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 2
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'unique_view_model_id', 'not_null_view_model_id'}
|
||||
|
||||
# run passed nodes with + operator
|
||||
results = self.run_dbt(['test', '--select', 'result:pass+', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 2
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'unique_view_model_id', 'not_null_view_model_id'}
|
||||
|
||||
# update view model to generate a failure case
|
||||
os.remove('./models/view_model.sql')
|
||||
with open('models/view_model.sql', 'w') as fp:
|
||||
fp.write("select 1 as id union all select 1 as id")
|
||||
|
||||
self.rebuild_run_dbt(expect_pass=False)
|
||||
|
||||
# test with failure selector
|
||||
results = self.run_dbt(['test', '--select', 'result:fail', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'unique_view_model_id'
|
||||
|
||||
# test with failure selector and + operator
|
||||
results = self.run_dbt(['test', '--select', 'result:fail+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'unique_view_model_id'
|
||||
|
||||
# change the unique test severity from error to warn and reuse the same view_model.sql changes above
|
||||
with open('models/schema.yml', 'r+') as f:
|
||||
filedata = f.read()
|
||||
newdata = filedata.replace('error','warn')
|
||||
f.seek(0)
|
||||
f.write(newdata)
|
||||
f.truncate()
|
||||
|
||||
# rebuild - expect_pass = True because we changed the error to a warning this time around
|
||||
self.rebuild_run_dbt(expect_pass=True)
|
||||
|
||||
# test with warn selector
|
||||
results = self.run_dbt(['test', '--select', 'result:warn', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'unique_view_model_id'
|
||||
|
||||
# test with warn selector and + operator
|
||||
results = self.run_dbt(['test', '--select', 'result:warn+', '--state', './state'], expect_pass=True)
|
||||
assert len(results) == 1
|
||||
assert results[0].node.name == 'unique_view_model_id'
|
||||
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_concurrent_selectors_run_run_results_state(self):
|
||||
results = self.run_dbt(['run', '--select', 'state:modified+', 'result:error+', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
# force an error on a dbt model
|
||||
with open('models/view_model.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
with open('models/view_model.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_error")
|
||||
fp.write(newline)
|
||||
|
||||
shutil.rmtree('./state')
|
||||
self.run_dbt(['run'], expect_pass=False)
|
||||
self.copy_state()
|
||||
|
||||
# modify another dbt model
|
||||
with open('models/table_model_modified_example.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_error")
|
||||
fp.write(newline)
|
||||
|
||||
results = self.run_dbt(['run', '--select', 'state:modified+', 'result:error+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 3
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'view_model', 'table_model_modified_example', 'table_model'}
|
||||
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_concurrent_selectors_test_run_results_state(self):
|
||||
# create failure test case for result:fail selector
|
||||
os.remove('./models/view_model.sql')
|
||||
with open('./models/view_model.sql', 'w') as f:
|
||||
f.write('select 1 as id union all select 1 as id union all select null as id')
|
||||
|
||||
# run dbt build again to trigger test errors
|
||||
self.rebuild_run_dbt(expect_pass=False)
|
||||
|
||||
# get the failures from
|
||||
results = self.run_dbt(['test', '--select', 'result:fail', '--exclude', 'not_null_view_model_id', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 1
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'unique_view_model_id'}
|
||||
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_concurrent_selectors_build_run_results_state(self):
|
||||
results = self.run_dbt(['build', '--select', 'state:modified+', 'result:error+', '--state', './state'])
|
||||
assert len(results) == 0
|
||||
|
||||
# force an error on a dbt model
|
||||
with open('models/view_model.sql') as fp:
|
||||
fp.readline()
|
||||
newline = fp.newlines
|
||||
|
||||
with open('models/view_model.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_error")
|
||||
fp.write(newline)
|
||||
|
||||
self.rebuild_run_dbt(expect_pass=False)
|
||||
|
||||
# modify another dbt model
|
||||
with open('models/table_model_modified_example.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_error")
|
||||
fp.write(newline)
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'state:modified+', 'result:error+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 5
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'table_model_modified_example', 'view_model', 'table_model', 'not_null_view_model_id', 'unique_view_model_id'}
|
||||
|
||||
# create failure test case for result:fail selector
|
||||
os.remove('./models/view_model.sql')
|
||||
with open('./models/view_model.sql', 'w') as f:
|
||||
f.write('select 1 as id union all select 1 as id')
|
||||
|
||||
# create error model case for result:error selector
|
||||
with open('./models/error_model.sql', 'w') as f:
|
||||
f.write('select 1 as id from not_exists')
|
||||
|
||||
# create something downstream from the error model to rerun
|
||||
with open('./models/downstream_of_error_model.sql', 'w') as f:
|
||||
f.write('select * from {{ ref("error_model") }} )')
|
||||
|
||||
# regenerate build state
|
||||
self.rebuild_run_dbt(expect_pass=False)
|
||||
|
||||
# modify model again to trigger the state:modified selector
|
||||
with open('models/table_model_modified_example.sql', 'w') as fp:
|
||||
fp.write(newline)
|
||||
fp.write("select * from forced_another_error")
|
||||
fp.write(newline)
|
||||
|
||||
results = self.run_dbt(['build', '--select', 'state:modified+', 'result:error+', 'result:fail+', '--state', './state'], expect_pass=False)
|
||||
assert len(results) == 5
|
||||
nodes = set([elem.node.name for elem in results])
|
||||
assert nodes == {'error_model', 'downstream_of_error_model', 'table_model_modified_example', 'table_model', 'unique_view_model_id'}
|
||||
@@ -35,7 +35,6 @@ def basic_uncompiled_model():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
deferred=False,
|
||||
description='',
|
||||
@@ -68,7 +67,6 @@ def basic_compiled_model():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
deferred=True,
|
||||
description='',
|
||||
@@ -125,7 +123,6 @@ def basic_uncompiled_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities':[],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'database': 'test_db',
|
||||
'deferred': False,
|
||||
@@ -175,7 +172,6 @@ def basic_compiled_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities':[],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'database': 'test_db',
|
||||
'deferred': True,
|
||||
@@ -363,7 +359,6 @@ def basic_uncompiled_schema_test_node():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
deferred=False,
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
@@ -397,7 +392,6 @@ def basic_compiled_schema_test_node():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
deferred=False,
|
||||
description='',
|
||||
@@ -436,7 +430,6 @@ def basic_uncompiled_schema_test_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities':[],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'database': 'test_db',
|
||||
'description': '',
|
||||
@@ -487,7 +480,6 @@ def basic_compiled_schema_test_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'deferred': False,
|
||||
'database': 'test_db',
|
||||
|
||||
@@ -10,8 +10,6 @@ from dbt.contracts.graph.model_config import (
|
||||
SnapshotConfig,
|
||||
SourceConfig,
|
||||
ExposureConfig,
|
||||
MetricConfig,
|
||||
EntityConfig,
|
||||
EmptySnapshotConfig,
|
||||
Hook,
|
||||
)
|
||||
@@ -26,7 +24,6 @@ from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
SeedNode,
|
||||
Docs,
|
||||
MacroDependsOn,
|
||||
@@ -142,7 +139,6 @@ def base_parsed_model_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'database': 'test_db',
|
||||
'description': '',
|
||||
@@ -189,7 +185,6 @@ def basic_parsed_model_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
database='test_db',
|
||||
@@ -240,7 +235,6 @@ def complex_parsed_model_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': ['model.test.bar']},
|
||||
'database': 'test_db',
|
||||
'deferred': True,
|
||||
@@ -298,7 +292,6 @@ def complex_parsed_model_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.test.bar']),
|
||||
deferred=True,
|
||||
description='My parsed node',
|
||||
@@ -721,7 +714,6 @@ def patched_model_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='The foo model',
|
||||
database='test_db',
|
||||
@@ -781,7 +773,6 @@ def base_parsed_hook_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'database': 'test_db',
|
||||
'deferred': False,
|
||||
@@ -828,7 +819,6 @@ def base_parsed_hook_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
deferred=False,
|
||||
@@ -859,7 +849,6 @@ def complex_parsed_hook_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': ['model.test.bar']},
|
||||
'deferred': False,
|
||||
'database': 'test_db',
|
||||
@@ -917,7 +906,6 @@ def complex_parsed_hook_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.test.bar']),
|
||||
description='My parsed node',
|
||||
deferred=False,
|
||||
@@ -1012,7 +1000,6 @@ def basic_parsed_schema_test_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'deferred': False,
|
||||
'database': 'test_db',
|
||||
@@ -1059,7 +1046,6 @@ def basic_parsed_schema_test_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
database='test_db',
|
||||
@@ -1089,7 +1075,6 @@ def complex_parsed_schema_test_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': ['model.test.bar']},
|
||||
'database': 'test_db',
|
||||
'deferred': False,
|
||||
@@ -1153,7 +1138,6 @@ def complex_parsed_schema_test_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.test.bar']),
|
||||
description='My parsed node',
|
||||
database='test_db',
|
||||
@@ -1448,7 +1432,6 @@ def basic_timestamp_snapshot_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'deferred': False,
|
||||
'database': 'test_db',
|
||||
@@ -1506,7 +1489,6 @@ def basic_timestamp_snapshot_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
database='test_db',
|
||||
@@ -1555,7 +1537,6 @@ def basic_intermediate_timestamp_snapshot_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
database='test_db',
|
||||
@@ -1591,7 +1572,6 @@ def basic_check_snapshot_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'depends_on': {'macros': [], 'nodes': []},
|
||||
'database': 'test_db',
|
||||
'deferred': False,
|
||||
@@ -1649,7 +1629,6 @@ def basic_check_snapshot_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
database='test_db',
|
||||
@@ -1698,7 +1677,6 @@ def basic_intermediate_check_snapshot_object():
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
description='',
|
||||
database='test_db',
|
||||
@@ -2137,7 +2115,6 @@ def basic_parsed_exposure_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'fqn': ['test', 'exposures', 'my_exposure'],
|
||||
'unique_id': 'exposure.test.my_exposure',
|
||||
'package_name': 'test',
|
||||
@@ -2200,7 +2177,6 @@ def complex_parsed_exposure_dict():
|
||||
'refs': [],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'fqn': ['test', 'exposures', 'my_exposure'],
|
||||
'unique_id': 'exposure.test.my_exposure',
|
||||
'package_name': 'test',
|
||||
@@ -2275,24 +2251,47 @@ def test_compare_changed_exposure(func, basic_parsed_exposure_object):
|
||||
|
||||
|
||||
# METRICS
|
||||
@pytest.fixture
|
||||
def minimal_parsed_metric_dict():
|
||||
return {
|
||||
'name': 'my_metric',
|
||||
'type': 'count',
|
||||
'timestamp': 'created_at',
|
||||
'time_grains': ['day'],
|
||||
'fqn': ['test', 'metrics', 'my_metric'],
|
||||
'unique_id': 'metric.test.my_metric',
|
||||
'package_name': 'test',
|
||||
'meta': {},
|
||||
'tags': [],
|
||||
'path': 'models/something.yml',
|
||||
'original_file_path': 'models/something.yml',
|
||||
'description': '',
|
||||
'created_at': 1.0,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def basic_parsed_metric_dict():
|
||||
return {
|
||||
'name': 'new_customers',
|
||||
'label': 'New Customers',
|
||||
'model': "ref('dim_customers')",
|
||||
'model': 'ref("dim_customers")',
|
||||
'calculation_method': 'count',
|
||||
'expression': 'user_id',
|
||||
'timestamp': 'signup_date',
|
||||
'time_grains': ['day', 'week', 'month'],
|
||||
'dimensions': ['plan', 'country'],
|
||||
'filters': [],
|
||||
'filters': [
|
||||
{
|
||||
"field": "is_paying",
|
||||
"value": "true",
|
||||
"operator": "=",
|
||||
}
|
||||
],
|
||||
'resource_type': 'metric',
|
||||
'refs': [['dim_customers']],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'fqn': ['test', 'metrics', 'my_metric'],
|
||||
'unique_id': 'metric.test.my_metric',
|
||||
'package_name': 'test',
|
||||
@@ -2301,98 +2300,26 @@ def basic_parsed_metric_dict():
|
||||
'description': '',
|
||||
'meta': {},
|
||||
'tags': [],
|
||||
'created_at': 1,
|
||||
'depends_on': {
|
||||
'nodes': [],
|
||||
'macros': [],
|
||||
},
|
||||
'config': {
|
||||
'enabled': True,
|
||||
},
|
||||
'unrendered_config': {},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def basic_parsed_metric_object():
|
||||
return Metric(
|
||||
name='new_customers',
|
||||
resource_type=NodeType.Metric,
|
||||
model="ref('dim_customers')",
|
||||
label='New Customers',
|
||||
calculation_method='count',
|
||||
expression="user_id",
|
||||
timestamp='signup_date',
|
||||
time_grains=['day','week','month'],
|
||||
dimensions=['plan','country'],
|
||||
filters=[],
|
||||
refs=[['dim_customers']],
|
||||
fqn=['test', 'metrics', 'my_metric'],
|
||||
unique_id='metric.test.my_metric',
|
||||
package_name='test',
|
||||
path='models/something.yml',
|
||||
original_file_path='models/something.yml',
|
||||
description='',
|
||||
meta={},
|
||||
tags=[],
|
||||
config=MetricConfig(),
|
||||
unrendered_config={},
|
||||
)
|
||||
|
||||
def test_simple_parsed_metric(basic_parsed_metric_dict, basic_parsed_metric_object):
|
||||
assert_symmetric(basic_parsed_metric_object, basic_parsed_metric_dict, Metric)
|
||||
|
||||
# ENTITIES
|
||||
|
||||
@pytest.fixture
|
||||
def basic_parsed_entity_dict():
|
||||
return {
|
||||
'name': 'my_entity',
|
||||
'model': "ref('my_model')",
|
||||
'dimensions': [],
|
||||
'resource_type': 'entity',
|
||||
'refs': [['my_model']],
|
||||
'sources': [],
|
||||
'metrics': [],
|
||||
'entities': [],
|
||||
'fqn': ['test', 'entities', 'my_entity'],
|
||||
'unique_id': 'entity.test.my_entity',
|
||||
'package_name': 'test',
|
||||
'path': 'models/something.yml',
|
||||
'original_file_path': 'models/something.yml',
|
||||
'description': '',
|
||||
'meta': {},
|
||||
'tags': [],
|
||||
'created_at': 1.0,
|
||||
'depends_on': {
|
||||
'nodes': [],
|
||||
'macros': [],
|
||||
},
|
||||
'config': {
|
||||
'enabled': True,
|
||||
},
|
||||
'unrendered_config': {},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def basic_parsed_entity_object():
|
||||
return Entity(
|
||||
name='my_entity',
|
||||
model="ref('my_model')",
|
||||
dimensions=[],
|
||||
resource_type=NodeType.Entity,
|
||||
fqn=['test', 'entities', 'my_entity'],
|
||||
refs=[['my_model']],
|
||||
unique_id='entity.test.my_entity',
|
||||
def basic_parsed_metric_object():
|
||||
return Metric(
|
||||
name='my_metric',
|
||||
resource_type=NodeType.Metric,
|
||||
calculation_method='count',
|
||||
fqn=['test', 'metrics', 'my_metric'],
|
||||
unique_id='metric.test.my_metric',
|
||||
package_name='test',
|
||||
path='models/something.yml',
|
||||
original_file_path='models/something.yml',
|
||||
description='',
|
||||
meta={},
|
||||
tags=[],
|
||||
config=EntityConfig(),
|
||||
unrendered_config={},
|
||||
tags=[]
|
||||
)
|
||||
|
||||
def test_simple_parsed_entity(basic_parsed_entity_dict, basic_parsed_entity_object):
|
||||
assert_symmetric(basic_parsed_entity_object, basic_parsed_entity_dict, Entity)
|
||||
@@ -7,8 +7,7 @@ from dbt.contracts.graph.unparsed import (
|
||||
FreshnessThreshold, Quoting, UnparsedSourceDefinition,
|
||||
UnparsedSourceTableDefinition, UnparsedDocumentationFile, UnparsedColumn,
|
||||
UnparsedNodeUpdate, Docs, UnparsedExposure, MaturityType, ExposureOwner,
|
||||
ExposureType, UnparsedMetric, UnparsedEntity, MetricFilter, MetricTime,
|
||||
MetricTimePeriod
|
||||
ExposureType, UnparsedMetric, MetricFilter, MetricTime, MetricTimePeriod
|
||||
)
|
||||
from dbt.contracts.results import FreshnessStatus
|
||||
from dbt.node_types import NodeType
|
||||
@@ -784,49 +783,3 @@ class TestUnparsedMetric(ContractTestCase):
|
||||
tst = self.get_ok_dict()
|
||||
tst['tags'] = [123]
|
||||
self.assert_fails_validation(tst)
|
||||
|
||||
|
||||
|
||||
|
||||
class TestUnparsedEntity(ContractTestCase):
|
||||
ContractType = UnparsedEntity
|
||||
|
||||
def get_ok_dict(self):
|
||||
return {
|
||||
'name': 'my_entity',
|
||||
'model': "ref('my_model')",
|
||||
'description': 'my model',
|
||||
'dimensions': ['plan', 'country'],
|
||||
'config': {},
|
||||
'tags': [],
|
||||
'meta': {},
|
||||
}
|
||||
|
||||
def test_ok(self):
|
||||
metric = self.ContractType(
|
||||
name='my_entity',
|
||||
model="ref('my_model')",
|
||||
description="my model",
|
||||
dimensions=['plan', 'country'],
|
||||
config={},
|
||||
tags=[],
|
||||
meta={},
|
||||
)
|
||||
dct = self.get_ok_dict()
|
||||
self.assert_symmetric(metric, dct)
|
||||
pickle.loads(pickle.dumps(metric))
|
||||
|
||||
def test_bad_entity_no_name(self):
|
||||
tst = self.get_ok_dict()
|
||||
del tst['name']
|
||||
self.assert_fails_validation(tst)
|
||||
|
||||
def test_bad_entity_no_model(self):
|
||||
tst = self.get_ok_dict()
|
||||
del tst['model']
|
||||
self.assert_fails_validation(tst)
|
||||
|
||||
def test_bad_tags(self):
|
||||
tst = self.get_ok_dict()
|
||||
tst['tags'] = [123]
|
||||
self.assert_fails_validation(tst)
|
||||
|
||||
@@ -22,8 +22,7 @@ from dbt.contracts.graph.nodes import (
|
||||
SeedNode,
|
||||
SourceDefinition,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity
|
||||
Metric
|
||||
)
|
||||
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
@@ -43,7 +42,7 @@ from .utils import MockMacro, MockDocumentation, MockSource, MockNode, MockMater
|
||||
|
||||
|
||||
REQUIRED_PARSED_NODE_KEYS = frozenset({
|
||||
'alias', 'tags', 'config', 'unique_id', 'refs', 'sources', 'metrics', 'entities', 'meta',
|
||||
'alias', 'tags', 'config', 'unique_id', 'refs', 'sources', 'metrics', 'meta',
|
||||
'depends_on', 'database', 'schema', 'name', 'resource_type',
|
||||
'package_name', 'path', 'original_file_path', 'raw_code', 'language',
|
||||
'description', 'columns', 'fqn', 'build_path', 'compiled_path', 'patch_path', 'docs',
|
||||
@@ -124,7 +123,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[['multi']],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
fqn=['root', 'my_metric'],
|
||||
unique_id='metric.root.my_metric',
|
||||
package_name='root',
|
||||
@@ -133,28 +131,6 @@ class ManifestTest(unittest.TestCase):
|
||||
)
|
||||
}
|
||||
|
||||
self.entities = {
|
||||
'entity.root.my_entity': Entity(
|
||||
name='my_entity',
|
||||
model='ref("multi")',
|
||||
description="my entity",
|
||||
dimensions=['plan', 'country'],
|
||||
resource_type=NodeType.Entity,
|
||||
meta={'is_okr': True},
|
||||
tags=['okrs'],
|
||||
depends_on=DependsOn(nodes=['model.root.multi']),
|
||||
refs=[['multi']],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
fqn=['root', 'my_entity'],
|
||||
unique_id='entity.root.my_entity',
|
||||
package_name='root',
|
||||
path='my_entity.yml',
|
||||
original_file_path='my_entity.yml'
|
||||
)
|
||||
}
|
||||
|
||||
self.nested_nodes = {
|
||||
'model.snowplow.events': ModelNode(
|
||||
name='events',
|
||||
@@ -168,7 +144,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -191,7 +166,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -214,7 +188,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[['events']],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.root.events']),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -237,7 +210,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[['events']],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.root.dep']),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -260,7 +232,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[['events']],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.root.events']),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -283,7 +254,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[['events']],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(nodes=['model.root.nested', 'model.root.sibling']),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -318,8 +288,6 @@ class ManifestTest(unittest.TestCase):
|
||||
exposure.validate(exposure.to_dict(omit_none=True))
|
||||
for metric in self.metrics.values():
|
||||
metric.validate(metric.to_dict(omit_none=True))
|
||||
for entity in self.entities.values():
|
||||
entity.validate(entity.to_dict(omit_none=True))
|
||||
for node in self.nested_nodes.values():
|
||||
node.validate(node.to_dict(omit_none=True))
|
||||
for source in self.sources.values():
|
||||
@@ -335,7 +303,7 @@ class ManifestTest(unittest.TestCase):
|
||||
def test__no_nodes(self):
|
||||
manifest = Manifest(
|
||||
nodes={}, sources={}, macros={}, docs={}, disabled={}, files={},
|
||||
exposures={}, metrics={}, selectors={}, entities={},
|
||||
exposures={}, metrics={}, selectors={},
|
||||
metadata=ManifestMetadata(generated_at=datetime.utcnow()),
|
||||
)
|
||||
|
||||
@@ -348,7 +316,6 @@ class ManifestTest(unittest.TestCase):
|
||||
'macros': {},
|
||||
'exposures': {},
|
||||
'metrics': {},
|
||||
'entities': {},
|
||||
'selectors': {},
|
||||
'parent_map': {},
|
||||
'child_map': {},
|
||||
@@ -369,7 +336,7 @@ class ManifestTest(unittest.TestCase):
|
||||
nodes = copy.copy(self.nested_nodes)
|
||||
manifest = Manifest(
|
||||
nodes=nodes, sources={}, macros={}, docs={}, disabled={}, files={},
|
||||
exposures={}, metrics={}, entities={}, selectors={},
|
||||
exposures={}, metrics={}, selectors={},
|
||||
metadata=ManifestMetadata(generated_at=datetime.utcnow()),
|
||||
)
|
||||
serialized = manifest.writable_manifest().to_dict(omit_none=True)
|
||||
@@ -435,23 +402,20 @@ class ManifestTest(unittest.TestCase):
|
||||
def test__build_flat_graph(self):
|
||||
exposures = copy.copy(self.exposures)
|
||||
metrics = copy.copy(self.metrics)
|
||||
entities = copy.copy(self.entities)
|
||||
nodes = copy.copy(self.nested_nodes)
|
||||
sources = copy.copy(self.sources)
|
||||
manifest = Manifest(nodes=nodes, sources=sources, macros={}, docs={},
|
||||
disabled={}, files={}, exposures=exposures,
|
||||
metrics=metrics, entities=entities, selectors={})
|
||||
metrics=metrics, selectors={})
|
||||
manifest.build_flat_graph()
|
||||
flat_graph = manifest.flat_graph
|
||||
flat_exposures = flat_graph['exposures']
|
||||
flat_metrics = flat_graph['metrics']
|
||||
flat_entities = flat_graph['entities']
|
||||
flat_nodes = flat_graph['nodes']
|
||||
flat_sources = flat_graph['sources']
|
||||
self.assertEqual(set(flat_graph), set(['exposures', 'nodes', 'sources', 'metrics', 'entities']))
|
||||
self.assertEqual(set(flat_graph), set(['exposures', 'nodes', 'sources', 'metrics']))
|
||||
self.assertEqual(set(flat_exposures), set(self.exposures))
|
||||
self.assertEqual(set(flat_metrics), set(self.metrics))
|
||||
self.assertEqual(set(flat_entities), set(self.entities))
|
||||
self.assertEqual(set(flat_nodes), set(self.nested_nodes))
|
||||
self.assertEqual(set(flat_sources), set(self.sources))
|
||||
for node in flat_nodes.values():
|
||||
@@ -502,7 +466,6 @@ class ManifestTest(unittest.TestCase):
|
||||
'macros': {},
|
||||
'exposures': {},
|
||||
'metrics': {},
|
||||
'entities': {},
|
||||
'selectors': {},
|
||||
'parent_map': {},
|
||||
'child_map': {},
|
||||
@@ -546,14 +509,11 @@ class ManifestTest(unittest.TestCase):
|
||||
)
|
||||
manifest = Manifest(nodes=nodes, sources=self.sources, macros={}, docs={},
|
||||
disabled={}, files={}, exposures=self.exposures,
|
||||
metrics=self.metrics, entities=self.entities, selectors={})
|
||||
metrics=self.metrics, selectors={})
|
||||
expect = {
|
||||
'metrics': frozenset([
|
||||
('root', 'my_metric')
|
||||
]),
|
||||
'entities': frozenset([
|
||||
('root', 'my_entity')
|
||||
]),
|
||||
'exposures': frozenset([
|
||||
('root', 'my_exposure')
|
||||
]),
|
||||
@@ -588,7 +548,6 @@ class ManifestTest(unittest.TestCase):
|
||||
refs=[],
|
||||
sources=[],
|
||||
metrics=[],
|
||||
entities=[],
|
||||
depends_on=DependsOn(),
|
||||
config=self.model_config,
|
||||
tags=[],
|
||||
@@ -776,7 +735,6 @@ class MixedManifestTest(unittest.TestCase):
|
||||
'sources': {},
|
||||
'exposures': {},
|
||||
'metrics': {},
|
||||
'entities': {},
|
||||
'selectors': {},
|
||||
'parent_map': {},
|
||||
'child_map': {},
|
||||
@@ -866,7 +824,7 @@ class MixedManifestTest(unittest.TestCase):
|
||||
manifest.build_flat_graph()
|
||||
flat_graph = manifest.flat_graph
|
||||
flat_nodes = flat_graph['nodes']
|
||||
self.assertEqual(set(flat_graph), set(['exposures', 'metrics', 'entities', 'nodes', 'sources']))
|
||||
self.assertEqual(set(flat_graph), set(['exposures', 'metrics', 'nodes', 'sources']))
|
||||
self.assertEqual(set(flat_nodes), set(self.nested_nodes))
|
||||
compiled_count = 0
|
||||
for node in flat_nodes.values():
|
||||
@@ -912,7 +870,6 @@ class TestManifestSearch(unittest.TestCase):
|
||||
files={},
|
||||
exposures={},
|
||||
metrics={},
|
||||
entities={},
|
||||
selectors={},
|
||||
)
|
||||
|
||||
@@ -935,7 +892,6 @@ def make_manifest(nodes=[], sources=[], macros=[], docs=[]):
|
||||
files={},
|
||||
exposures={},
|
||||
metrics={},
|
||||
entities={},
|
||||
selectors={},
|
||||
)
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ node_type_pluralizations = {
|
||||
NodeType.Macro: "macros",
|
||||
NodeType.Exposure: "exposures",
|
||||
NodeType.Metric: "metrics",
|
||||
NodeType.Entity: "entities",
|
||||
}
|
||||
|
||||
|
||||
|
||||
4
testing-project/postgres/.gitignore
vendored
4
testing-project/postgres/.gitignore
vendored
@@ -1,4 +0,0 @@
|
||||
|
||||
target/
|
||||
dbt_packages/
|
||||
logs/
|
||||
@@ -1,34 +0,0 @@
|
||||
# Name your project! Project names should contain only lowercase characters
|
||||
# and underscores. A good package name should reflect your organization's
|
||||
# name or the intended use of these models
|
||||
name: 'postgres'
|
||||
version: '1.0.0'
|
||||
config-version: 2
|
||||
|
||||
# This setting configures which "profile" dbt uses for this project.
|
||||
profile: 'postgres'
|
||||
|
||||
# These configurations specify where dbt should look for different types of files.
|
||||
# The `model-paths` config, for example, states that models in this project can be
|
||||
# found in the "models/" directory. You probably won't need to change these!
|
||||
model-paths: ["models"]
|
||||
analysis-paths: ["analyses"]
|
||||
test-paths: ["tests"]
|
||||
seed-paths: ["seeds"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
- "target"
|
||||
- "dbt_packages"
|
||||
|
||||
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
# In this example config, we tell dbt to build all models in the example/
|
||||
# directory as views. These settings can be overridden in the individual model
|
||||
# files using the `{{ config(...) }}` macro.
|
||||
models:
|
||||
postgres:
|
||||
@@ -1,21 +0,0 @@
|
||||
with orders as (
|
||||
|
||||
select * from {{ ref('fact_orders') }}
|
||||
|
||||
)
|
||||
,
|
||||
customers as (
|
||||
|
||||
select * from {{ ref('dim_customers') }}
|
||||
|
||||
)
|
||||
,
|
||||
final as (
|
||||
|
||||
select *
|
||||
from orders
|
||||
left join customers using (customer_id)
|
||||
|
||||
)
|
||||
|
||||
select * from final
|
||||
@@ -1 +0,0 @@
|
||||
select * from {{ref('dim_customers_source')}}
|
||||
@@ -1,21 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: dim_customers
|
||||
columns:
|
||||
- name: customer_id
|
||||
description: TBD
|
||||
|
||||
- name: first_name
|
||||
description: TBD
|
||||
|
||||
- name: last_name
|
||||
description: TBD
|
||||
|
||||
- name: email
|
||||
description: TBD
|
||||
|
||||
- name: gender
|
||||
description: TBD
|
||||
|
||||
- name: is_new_customer
|
||||
description: TBD
|
||||
@@ -1,17 +0,0 @@
|
||||
# version: 2
|
||||
# entities:
|
||||
|
||||
# - name: customers
|
||||
# model: ref('dim_customers')
|
||||
# description: "Our customers entity"
|
||||
|
||||
# dimensions:
|
||||
# - dimension_1
|
||||
|
||||
# - name: first_name_v3_testing
|
||||
# description: string
|
||||
# column_name: first_name
|
||||
# date_type: string
|
||||
# default_timestamp: true
|
||||
# primary_key: true
|
||||
# time_grains: [day, week, month]
|
||||
@@ -1,8 +0,0 @@
|
||||
version: 2
|
||||
entities:
|
||||
|
||||
- name: orders
|
||||
model: ref('fact_orders')
|
||||
description: "Our orders entity"
|
||||
dimensions:
|
||||
- dimension_1
|
||||
@@ -1,4 +0,0 @@
|
||||
select
|
||||
*
|
||||
,round(order_total - (order_total/2)) as discount_total
|
||||
from {{ref('fact_orders_source')}}
|
||||
@@ -1,21 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: fact_orders
|
||||
columns:
|
||||
- name: order_id
|
||||
description: TBD
|
||||
|
||||
- name: order_country
|
||||
description: TBD
|
||||
|
||||
- name: order_total
|
||||
description: TBD
|
||||
|
||||
- name: had_discount
|
||||
description: TBD
|
||||
|
||||
- name: customer_id
|
||||
description: TBD
|
||||
|
||||
- name: order_date
|
||||
description: TBD
|
||||
@@ -1,8 +0,0 @@
|
||||
version: 2
|
||||
metrics:
|
||||
- name: revenue
|
||||
label: Revenue
|
||||
model: ref('fact_orders')
|
||||
|
||||
calculation_method: sum
|
||||
expression: order_total
|
||||
@@ -1,6 +0,0 @@
|
||||
customer_id,first_name,last_name,email,gender,is_new_customer,date_added
|
||||
1,Geodude,Pokemon,rocks@pokemon.org,Male,FALSE,2022-01-01
|
||||
2,Mew,Pokemon,mew.is.the.best@pokemon.com,Genderfluid,TRUE,2022-01-06
|
||||
3,Mewtwo,Pokemon,no.mewtwo.is.better@pokemon.com,Genderqueer,FALSE,2022-01-13
|
||||
4,Charizard,Pokemon,firebreathbaby@pokemon.com,Female,TRUE,2022-02-01
|
||||
5,Snorlax,Pokemon,sleep@pokemon.com,Male,TRUE,2022-02-03
|
||||
|
@@ -1,11 +0,0 @@
|
||||
order_id,order_country,order_total,had_discount,customer_id,order_date
|
||||
1,Unovo,2,false,1,01/28/2022
|
||||
2,Kalos,1,false,2,01/20/2022
|
||||
3,Kalos,1,false,1,01/13/2022
|
||||
4,Alola,1,true,3,01/06/2022
|
||||
5,Alola,1,false,4,01/08/2022
|
||||
6,Kanto,1,false,5,01/21/2022
|
||||
7,Alola,1,true,2,01/22/2022
|
||||
8,Kanto,0,true,1,02/15/2022
|
||||
9,Unovo,1,false,2,02/03/2022
|
||||
10,Kanto,1,false,3,02/13/2022
|
||||
|
103
tests/adapter/dbt/tests/adapter/caching/test_caching.py
Normal file
103
tests/adapter/dbt/tests/adapter/caching/test_caching.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import pytest
|
||||
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
model_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized='table'
|
||||
)
|
||||
}}
|
||||
select 1 as id
|
||||
"""
|
||||
|
||||
another_schema_model_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized='table',
|
||||
schema='another_schema'
|
||||
)
|
||||
}}
|
||||
select 1 as id
|
||||
"""
|
||||
|
||||
|
||||
class BaseCachingTest:
|
||||
@pytest.fixture(scope="class")
|
||||
def project_config_update(self):
|
||||
return {
|
||||
"config-version": 2,
|
||||
"quoting": {
|
||||
"identifier": False,
|
||||
"schema": False,
|
||||
},
|
||||
}
|
||||
|
||||
def run_and_inspect_cache(self, project, run_args=None):
|
||||
run_dbt(run_args)
|
||||
|
||||
# the cache was empty at the start of the run.
|
||||
# the model materialization returned an unquoted relation and added to the cache.
|
||||
adapter = project.adapter
|
||||
assert len(adapter.cache.relations) == 1
|
||||
relation = list(adapter.cache.relations).pop()
|
||||
assert relation.schema == project.test_schema
|
||||
assert relation.schema == project.test_schema.lower()
|
||||
|
||||
# on the second run, dbt will find a relation in the database during cache population.
|
||||
# this relation will be quoted, because list_relations_without_caching (by default) uses
|
||||
# quote_policy = {"database": True, "schema": True, "identifier": True}
|
||||
# when adding relations to the cache.
|
||||
run_dbt(run_args)
|
||||
adapter = project.adapter
|
||||
assert len(adapter.cache.relations) == 1
|
||||
second_relation = list(adapter.cache.relations).pop()
|
||||
|
||||
# perform a case-insensitive + quote-insensitive comparison
|
||||
for key in ["database", "schema", "identifier"]:
|
||||
assert getattr(relation, key).lower() == getattr(second_relation, key).lower()
|
||||
|
||||
def test_cache(self, project):
|
||||
self.run_and_inspect_cache(project, run_args=["run"])
|
||||
|
||||
|
||||
class BaseCachingLowercaseModel(BaseCachingTest):
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model.sql": model_sql,
|
||||
}
|
||||
|
||||
|
||||
class BaseCachingUppercaseModel(BaseCachingTest):
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"MODEL.sql": model_sql,
|
||||
}
|
||||
|
||||
|
||||
class BaseCachingSelectedSchemaOnly(BaseCachingTest):
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model.sql": model_sql,
|
||||
"another_schema_model.sql": another_schema_model_sql,
|
||||
}
|
||||
|
||||
def test_cache(self, project):
|
||||
# this should only cache the schema containing the selected model
|
||||
run_args = ["--cache-selected-only", "run", "--select", "model"]
|
||||
self.run_and_inspect_cache(project, run_args)
|
||||
|
||||
|
||||
class TestCachingLowerCaseModel(BaseCachingLowercaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class TestCachingUppercaseModel(BaseCachingUppercaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly):
|
||||
pass
|
||||
@@ -1,60 +1,3 @@
|
||||
#
|
||||
# Properties
|
||||
#
|
||||
_PROPERTIES__SCHEMA = """
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: model_a
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_level_tag]
|
||||
tests:
|
||||
- unique
|
||||
|
||||
- name: incremental_ignore
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_level_tag]
|
||||
tests:
|
||||
- unique
|
||||
|
||||
- name: incremental_ignore_target
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_level_tag]
|
||||
tests:
|
||||
- unique
|
||||
|
||||
- name: incremental_append_new_columns
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_level_tag]
|
||||
tests:
|
||||
- unique
|
||||
|
||||
- name: incremental_append_new_columns_target
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_level_tag]
|
||||
tests:
|
||||
- unique
|
||||
|
||||
- name: incremental_sync_all_columns
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_level_tag]
|
||||
tests:
|
||||
- unique
|
||||
|
||||
- name: incremental_sync_all_columns_target
|
||||
columns:
|
||||
- name: id
|
||||
tags: [column_leveL_tag]
|
||||
tests:
|
||||
- unique
|
||||
"""
|
||||
|
||||
#
|
||||
# Models
|
||||
#
|
||||
@@ -70,7 +13,7 @@ _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY = """
|
||||
|
||||
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
@@ -123,7 +66,7 @@ with source_data as (
|
||||
|
||||
)
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
select id
|
||||
,cast(field1 as {{string_type}}) as field1
|
||||
@@ -184,7 +127,7 @@ _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS = """
|
||||
|
||||
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
@@ -215,7 +158,7 @@ _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE = """
|
||||
)
|
||||
}}
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
|
||||
|
||||
@@ -267,7 +210,7 @@ _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET = """
|
||||
config(materialized='table')
|
||||
}}
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
with source_data as (
|
||||
|
||||
@@ -293,7 +236,7 @@ _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS = """
|
||||
)
|
||||
}}
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
|
||||
|
||||
@@ -327,7 +270,7 @@ with source_data as (
|
||||
|
||||
)
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
select id
|
||||
,cast(field1 as {{string_type}}) as field1
|
||||
@@ -344,7 +287,7 @@ _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET = """
|
||||
config(materialized='table')
|
||||
}}
|
||||
|
||||
{% set string_type = 'varchar(10)' %}
|
||||
{% set string_type = dbt.type_string() %}
|
||||
|
||||
with source_data as (
|
||||
|
||||
@@ -360,35 +303,3 @@ select id,
|
||||
|
||||
from source_data
|
||||
"""
|
||||
|
||||
#
|
||||
# Tests
|
||||
#
|
||||
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_IGNORE = """
|
||||
select * from {{ ref('incremental_ignore') }} where false
|
||||
"""
|
||||
|
||||
_TESTS__SELECT_FROM_A = """
|
||||
select * from {{ ref('model_a') }} where false
|
||||
"""
|
||||
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET = """
|
||||
select * from {{ ref('incremental_append_new_columns_target') }} where false
|
||||
"""
|
||||
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS = """
|
||||
select * from {{ ref('incremental_sync_all_columns') }} where false
|
||||
"""
|
||||
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET = """
|
||||
select * from {{ ref('incremental_sync_all_columns_target') }} where false
|
||||
"""
|
||||
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET = """
|
||||
select * from {{ ref('incremental_ignore_target') }} where false
|
||||
"""
|
||||
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS = """
|
||||
select * from {{ ref('incremental_append_new_columns') }} where false
|
||||
"""
|
||||
@@ -5,8 +5,7 @@ from dbt.tests.util import (
|
||||
run_dbt,
|
||||
)
|
||||
|
||||
from tests.functional.incremental_schema_tests.fixtures import (
|
||||
_PROPERTIES__SCHEMA,
|
||||
from dbt.tests.adapter.incremental.fixtures import (
|
||||
_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY,
|
||||
_MODELS__INCREMENTAL_IGNORE,
|
||||
_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET,
|
||||
@@ -19,23 +18,10 @@ from tests.functional.incremental_schema_tests.fixtures import (
|
||||
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS,
|
||||
_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET,
|
||||
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET,
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_IGNORE,
|
||||
_TESTS__SELECT_FROM_A,
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET,
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS,
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET,
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET,
|
||||
_TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS,
|
||||
)
|
||||
|
||||
|
||||
class TestIncrementalSchemaChange:
|
||||
@pytest.fixture(scope="class")
|
||||
def properties(self):
|
||||
return {
|
||||
"schema.yml": _PROPERTIES__SCHEMA,
|
||||
}
|
||||
|
||||
class BaseIncrementalOnSchemaChangeSetup:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
@@ -53,18 +39,6 @@ class TestIncrementalSchemaChange:
|
||||
"incremental_append_new_columns_remove_one_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET,
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def tests(self):
|
||||
return {
|
||||
"select_from_incremental.sql": _TESTS__SELECT_FROM_INCREMENTAL_IGNORE,
|
||||
"select_from_a.sql": _TESTS__SELECT_FROM_A,
|
||||
"select_from_incremental_append_new_columns_target.sql": _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET,
|
||||
"select_from_incremental_sync_all_columns.sql": _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS,
|
||||
"select_from_incremental_sync_all_columns_target.sql": _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET,
|
||||
"select_from_incremental_ignore_target.sql": _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET,
|
||||
"select_from_incremental_append_new_columns.sql": _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS,
|
||||
}
|
||||
|
||||
def run_twice_and_assert(self, include, compare_source, compare_target, project):
|
||||
|
||||
# dbt run (twice)
|
||||
@@ -103,6 +77,8 @@ class TestIncrementalSchemaChange:
|
||||
compare_target = "incremental_sync_remove_only_target"
|
||||
self.run_twice_and_assert(select, compare_source, compare_target, project)
|
||||
|
||||
|
||||
class BaseIncrementalOnSchemaChange(BaseIncrementalOnSchemaChangeSetup):
|
||||
def test_run_incremental_ignore(self, project):
|
||||
select = "model_a incremental_ignore incremental_ignore_target"
|
||||
compare_source = "incremental_ignore"
|
||||
@@ -122,3 +98,7 @@ class TestIncrementalSchemaChange:
|
||||
run_dbt(["run", "--models", select, "--full-refresh"])
|
||||
results_two = run_dbt(["run", "--models", select], expect_pass=False)
|
||||
assert "Compilation Error" in results_two[1].message
|
||||
|
||||
|
||||
class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange):
|
||||
pass
|
||||
File diff suppressed because one or more lines are too long
@@ -258,7 +258,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"unique_id": "model.test.model",
|
||||
"fqn": ["test", "model"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"tags": [],
|
||||
"meta": {},
|
||||
"config": model_config,
|
||||
@@ -340,7 +339,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"unique_id": "model.test.second_model",
|
||||
"fqn": ["test", "second_model"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"tags": [],
|
||||
"meta": {},
|
||||
"config": second_config,
|
||||
@@ -490,7 +488,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"file_key_name": "models.model",
|
||||
"fqn": ["test", "not_null_model_id"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "not_null_model_id",
|
||||
"original_file_path": model_schema_yml_path,
|
||||
"package_name": "test",
|
||||
@@ -544,7 +541,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"extra_ctes_injected": True,
|
||||
"fqn": ["test", "snapshot_seed", "snapshot_seed"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"meta": {},
|
||||
"name": "snapshot_seed",
|
||||
"original_file_path": snapshot_path,
|
||||
@@ -588,7 +584,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"file_key_name": "models.model",
|
||||
"fqn": ["test", "test_nothing_model_"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "test_nothing_model_",
|
||||
"original_file_path": model_schema_yml_path,
|
||||
"package_name": "test",
|
||||
@@ -639,7 +634,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"file_key_name": "models.model",
|
||||
"fqn": ["test", "unique_model_id"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "unique_model_id",
|
||||
"original_file_path": model_schema_yml_path,
|
||||
"package_name": "test",
|
||||
@@ -741,7 +735,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
"maturity": "medium",
|
||||
"meta": {"tool": "my_tool", "languages": ["python"]},
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"tags": ["my_department"],
|
||||
"name": "notebook_exposure",
|
||||
"original_file_path": os.path.join("models", "schema.yml"),
|
||||
@@ -769,7 +762,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
},
|
||||
"fqn": ["test", "simple_exposure"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "simple_exposure",
|
||||
"original_file_path": os.path.join("models", "schema.yml"),
|
||||
"owner": {
|
||||
@@ -791,7 +783,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
|
||||
},
|
||||
},
|
||||
"metrics": {},
|
||||
"entities": {},
|
||||
"selectors": {},
|
||||
"parent_map": {
|
||||
"model.test.model": ["seed.test.seed"],
|
||||
@@ -874,7 +865,6 @@ def expected_references_manifest(project):
|
||||
"docs": {"node_color": None, "show": True},
|
||||
"fqn": ["test", "ephemeral_copy"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "ephemeral_copy",
|
||||
"original_file_path": ephemeral_copy_path,
|
||||
"package_name": "test",
|
||||
@@ -928,7 +918,6 @@ def expected_references_manifest(project):
|
||||
"docs": {"node_color": None, "show": True},
|
||||
"fqn": ["test", "ephemeral_summary"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "ephemeral_summary",
|
||||
"original_file_path": ephemeral_summary_path,
|
||||
"package_name": "test",
|
||||
@@ -984,7 +973,6 @@ def expected_references_manifest(project):
|
||||
"docs": {"node_color": None, "show": True},
|
||||
"fqn": ["test", "view_summary"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"name": "view_summary",
|
||||
"original_file_path": view_summary_path,
|
||||
"package_name": "test",
|
||||
@@ -1094,7 +1082,6 @@ def expected_references_manifest(project):
|
||||
"extra_ctes_injected": True,
|
||||
"fqn": ["test", "snapshot_seed", "snapshot_seed"],
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"meta": {},
|
||||
"name": "snapshot_seed",
|
||||
"original_file_path": snapshot_path,
|
||||
@@ -1181,7 +1168,6 @@ def expected_references_manifest(project):
|
||||
"maturity": "medium",
|
||||
"meta": {"tool": "my_tool", "languages": ["python"]},
|
||||
"metrics": [],
|
||||
"entities": [],
|
||||
"tags": ["my_department"],
|
||||
"name": "notebook_exposure",
|
||||
"original_file_path": os.path.join("models", "schema.yml"),
|
||||
@@ -1198,7 +1184,6 @@ def expected_references_manifest(project):
|
||||
},
|
||||
},
|
||||
"metrics": {},
|
||||
"entities": {},
|
||||
"selectors": {},
|
||||
"docs": {
|
||||
"doc.dbt.__overview__": ANY,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user