forked from repo-mirrors/dbt-core
Compare commits
1 Commits
adding-sem
...
refactor/r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
270baabf81 |
@@ -1,4 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, PROFILES_DIR, read_user_config # noqa
|
||||
from .project import Project # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
|
||||
@@ -3,9 +3,8 @@ from dataclasses import dataclass, field
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
List, Dict, Any, Optional, TypeVar, Union, Tuple, Callable, Mapping,
|
||||
Iterable, Set
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
@@ -26,12 +25,10 @@ from dbt.semver import VersionSpecifier
|
||||
from dbt.semver import versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import deep_map, MultiDict
|
||||
from dbt.legacy_config_updater import ConfigUpdater, IsFQNResource
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
from dbt.contracts.project import (
|
||||
ProjectV1 as ProjectV1Contract,
|
||||
ProjectV2 as ProjectV2Contract,
|
||||
parse_project_config,
|
||||
Project as ProjectContract,
|
||||
SemverString,
|
||||
)
|
||||
from dbt.contracts.project import PackageConfig
|
||||
@@ -75,6 +72,13 @@ Validator Error:
|
||||
"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IsFQNResource(Protocol):
|
||||
fqn: List[str]
|
||||
resource_type: NodeType
|
||||
package_name: str
|
||||
|
||||
|
||||
def _list_if_none(value):
|
||||
if value is None:
|
||||
value = []
|
||||
@@ -233,47 +237,8 @@ class PartialProject:
|
||||
return renderer.render_value(self.profile_name)
|
||||
|
||||
|
||||
class VarProvider(Protocol):
|
||||
class VarProvider:
|
||||
"""Var providers are tied to a particular Project."""
|
||||
def vars_for(
|
||||
self, node: IsFQNResource, adapter_type: str
|
||||
) -> Mapping[str, Any]:
|
||||
raise NotImplementedError(
|
||||
f'vars_for not implemented for {type(self)}!'
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
raise NotImplementedError(
|
||||
f'to_dict not implemented for {type(self)}!'
|
||||
)
|
||||
|
||||
|
||||
class V1VarProvider(VarProvider):
|
||||
def __init__(
|
||||
self,
|
||||
models: Dict[str, Any],
|
||||
seeds: Dict[str, Any],
|
||||
snapshots: Dict[str, Any],
|
||||
) -> None:
|
||||
self.models = models
|
||||
self.seeds = seeds
|
||||
self.snapshots = snapshots
|
||||
self.sources: Dict[str, Any] = {}
|
||||
|
||||
def vars_for(
|
||||
self, node: IsFQNResource, adapter_type: str
|
||||
) -> Mapping[str, Any]:
|
||||
updater = ConfigUpdater(adapter_type)
|
||||
return updater.get_project_config(node, self).get('vars', {})
|
||||
|
||||
def to_dict(self):
|
||||
raise ValidationError(
|
||||
'to_dict was called on a v1 vars, but it should only be called '
|
||||
'on v2 vars'
|
||||
)
|
||||
|
||||
|
||||
class V2VarProvider(VarProvider):
|
||||
def __init__(
|
||||
self,
|
||||
vars: Dict[str, Dict[str, Any]]
|
||||
@@ -382,7 +347,7 @@ class Project:
|
||||
project=project_dict
|
||||
)
|
||||
try:
|
||||
cfg = parse_project_config(project_dict)
|
||||
cfg = ProjectContract.from_dict(project_dict)
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@@ -431,32 +396,21 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
if cfg.config_version == 1:
|
||||
assert isinstance(cfg, ProjectV1Contract)
|
||||
# extract everything named 'vars'
|
||||
models = cfg.models
|
||||
seeds = cfg.seeds
|
||||
snapshots = cfg.snapshots
|
||||
sources = {}
|
||||
vars_value = V1VarProvider(
|
||||
models=models, seeds=seeds, snapshots=snapshots
|
||||
)
|
||||
elif cfg.config_version == 2:
|
||||
assert isinstance(cfg, ProjectV2Contract)
|
||||
models = cfg.models
|
||||
seeds = cfg.seeds
|
||||
snapshots = cfg.snapshots
|
||||
sources = cfg.sources
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
else:
|
||||
vars_dict = cfg.vars
|
||||
vars_value = V2VarProvider(vars_dict)
|
||||
else:
|
||||
if cfg.config_version != 2:
|
||||
raise ValidationError(
|
||||
f'Got unsupported config_version={cfg.config_version}'
|
||||
)
|
||||
|
||||
models = cfg.models
|
||||
seeds = cfg.seeds
|
||||
snapshots = cfg.snapshots
|
||||
sources = cfg.sources
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
else:
|
||||
vars_dict = cfg.vars
|
||||
vars_value = VarProvider(vars_dict)
|
||||
|
||||
on_run_start: List[str] = value_or(cfg.on_run_start, [])
|
||||
on_run_end: List[str] = value_or(cfg.on_run_end, [])
|
||||
|
||||
@@ -558,6 +512,8 @@ class Project:
|
||||
'on-run-end': self.on_run_end,
|
||||
'seeds': self.seeds,
|
||||
'snapshots': self.snapshots,
|
||||
'sources': self.sources,
|
||||
'vars': self.vars.to_dict(),
|
||||
'require-dbt-version': [
|
||||
v.to_version_string() for v in self.dbt_version
|
||||
],
|
||||
@@ -569,17 +525,11 @@ class Project:
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict())
|
||||
|
||||
if self.config_version == 2:
|
||||
result.update({
|
||||
'sources': self.sources,
|
||||
'vars': self.vars.to_dict()
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
def validate(self):
|
||||
try:
|
||||
ProjectV2Contract.from_dict(self.to_project_config())
|
||||
ProjectContract.from_dict(self.to_project_config())
|
||||
except ValidationError as e:
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@@ -619,6 +569,11 @@ class Project:
|
||||
project_name = project_dict.get('name')
|
||||
profile_name = project_dict.get('profile')
|
||||
config_version = project_dict.get('config-version', 1)
|
||||
if config_version != 2:
|
||||
raise DbtProjectError(
|
||||
f'Invalid config version: {config_version}, expected 2',
|
||||
path=os.path.join(project_root, 'dbt_project.yml')
|
||||
)
|
||||
|
||||
return PartialProject(
|
||||
config_version=config_version,
|
||||
@@ -661,32 +616,6 @@ class Project:
|
||||
)
|
||||
raise DbtProjectError(msg)
|
||||
|
||||
def as_v1(self, all_projects: Iterable[str]):
|
||||
if self.config_version == 1:
|
||||
return self
|
||||
|
||||
dct = self.to_project_config()
|
||||
|
||||
mutated = deepcopy(dct)
|
||||
# remove sources, it doesn't exist
|
||||
mutated.pop('sources', None)
|
||||
|
||||
common_config_keys = ['models', 'seeds', 'snapshots']
|
||||
|
||||
if 'vars' in dct and isinstance(dct['vars'], dict):
|
||||
v2_vars_to_v1(mutated, dct['vars'], set(all_projects))
|
||||
# ok, now we want to look through all the existing cfgkeys and mirror
|
||||
# it, except expand the '+' prefix.
|
||||
for cfgkey in common_config_keys:
|
||||
if cfgkey not in dct:
|
||||
continue
|
||||
|
||||
mutated[cfgkey] = _flatten_config(dct[cfgkey])
|
||||
mutated['config-version'] = 1
|
||||
project = Project.from_project_config(mutated)
|
||||
project.packages = self.packages
|
||||
return project
|
||||
|
||||
def get_selector(self, name: str) -> SelectionSpec:
|
||||
if name not in self.selectors:
|
||||
raise RuntimeException(
|
||||
@@ -694,45 +623,3 @@ class Project:
|
||||
f'{list(self.selectors)}'
|
||||
)
|
||||
return self.selectors[name]
|
||||
|
||||
|
||||
def v2_vars_to_v1(
|
||||
dst: Dict[str, Any], src_vars: Dict[str, Any], project_names: Set[str]
|
||||
) -> None:
|
||||
# stuff any 'vars' entries into the old-style
|
||||
# models/seeds/snapshots dicts
|
||||
common_config_keys = ['models', 'seeds', 'snapshots']
|
||||
for project_name in project_names:
|
||||
for cfgkey in common_config_keys:
|
||||
if cfgkey not in dst:
|
||||
dst[cfgkey] = {}
|
||||
if project_name not in dst[cfgkey]:
|
||||
dst[cfgkey][project_name] = {}
|
||||
project_type_cfg = dst[cfgkey][project_name]
|
||||
|
||||
if 'vars' not in project_type_cfg:
|
||||
project_type_cfg['vars'] = {}
|
||||
project_type_vars = project_type_cfg['vars']
|
||||
|
||||
project_type_vars.update({
|
||||
k: v for k, v in src_vars.items()
|
||||
if not isinstance(v, dict)
|
||||
})
|
||||
|
||||
items = src_vars.get(project_name, None)
|
||||
if isinstance(items, dict):
|
||||
project_type_vars.update(items)
|
||||
# remove this from the v1 form
|
||||
dst.pop('vars')
|
||||
|
||||
|
||||
def _flatten_config(dct: Dict[str, Any]):
|
||||
result = {}
|
||||
for key, value in dct.items():
|
||||
if isinstance(value, dict) and not key.startswith('+'):
|
||||
result[key] = _flatten_config(value)
|
||||
else:
|
||||
if key.startswith('+'):
|
||||
key = key[1:]
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
@@ -32,7 +32,6 @@ from dbt.exceptions import (
|
||||
warn_or_error,
|
||||
raise_compiler_error
|
||||
)
|
||||
from dbt.legacy_config_updater import ConfigUpdater
|
||||
|
||||
from hologram import ValidationError
|
||||
|
||||
@@ -249,27 +248,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
paths.add(path)
|
||||
return frozenset(paths)
|
||||
|
||||
def _get_v1_config_paths(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
path: FQNPath,
|
||||
paths: MutableSet[FQNPath],
|
||||
) -> PathSet:
|
||||
keys = ConfigUpdater(self.credentials.type).ConfigKeys
|
||||
|
||||
for key, value in config.items():
|
||||
if isinstance(value, dict):
|
||||
if key in keys:
|
||||
if path not in paths:
|
||||
paths.add(path)
|
||||
else:
|
||||
self._get_v1_config_paths(value, path + (key,), paths)
|
||||
else:
|
||||
if path not in paths:
|
||||
paths.add(path)
|
||||
|
||||
return frozenset(paths)
|
||||
|
||||
def _get_config_paths(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
@@ -279,10 +257,12 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
if paths is None:
|
||||
paths = set()
|
||||
|
||||
if self.config_version == 2:
|
||||
return self._get_v2_config_paths(config, path, paths)
|
||||
else:
|
||||
return self._get_v1_config_paths(config, path, paths)
|
||||
for key, value in config.items():
|
||||
if isinstance(value, dict) and not key.startswith('+'):
|
||||
self._get_v2_config_paths(value, path + (key,), paths)
|
||||
else:
|
||||
paths.add(path)
|
||||
return frozenset(paths)
|
||||
|
||||
def get_resource_config_paths(self) -> Dict[str, PathSet]:
|
||||
"""Return a dictionary with 'seeds' and 'models' keys whose values are
|
||||
@@ -381,17 +361,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
if path.is_dir() and not path.name.startswith('__'):
|
||||
yield path
|
||||
|
||||
def as_v1(self, all_projects: Iterable[str]):
|
||||
if self.config_version == 1:
|
||||
return self
|
||||
|
||||
return self.from_parts(
|
||||
project=Project.as_v1(self, all_projects),
|
||||
profile=self,
|
||||
args=self.args,
|
||||
dependencies=self.dependencies,
|
||||
)
|
||||
|
||||
|
||||
class UnsetCredentials(Credentials):
|
||||
def __init__(self):
|
||||
|
||||
@@ -46,17 +46,16 @@ class ConfiguredVar(Var):
|
||||
if var_name in self._config.cli_vars:
|
||||
return self._config.cli_vars[var_name]
|
||||
|
||||
if self._config.config_version == 2 and my_config.config_version == 2:
|
||||
adapter_type = self._config.credentials.type
|
||||
lookup = FQNLookup(self._project_name)
|
||||
active_vars = self._config.vars.vars_for(lookup, adapter_type)
|
||||
all_vars = MultiDict([active_vars])
|
||||
adapter_type = self._config.credentials.type
|
||||
lookup = FQNLookup(self._project_name)
|
||||
active_vars = self._config.vars.vars_for(lookup, adapter_type)
|
||||
all_vars = MultiDict([active_vars])
|
||||
|
||||
if self._config.project_name != my_config.project_name:
|
||||
all_vars.add(my_config.vars.vars_for(lookup, adapter_type))
|
||||
if self._config.project_name != my_config.project_name:
|
||||
all_vars.add(my_config.vars.vars_for(lookup, adapter_type))
|
||||
|
||||
if var_name in all_vars:
|
||||
return all_vars[var_name]
|
||||
if var_name in all_vars:
|
||||
return all_vars[var_name]
|
||||
|
||||
if default is not Var._VAR_NOTSET:
|
||||
return default
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Iterator, Dict, Any, TypeVar, Union
|
||||
from typing import List, Iterator, Dict, Any, TypeVar
|
||||
|
||||
from dbt.config import RuntimeConfig, Project
|
||||
from dbt.config import RuntimeConfig, Project, IsFQNResource
|
||||
from dbt.contracts.graph.model_config import BaseConfig, get_config_for
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.legacy_config_updater import ConfigUpdater, IsFQNResource
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import fqn_search
|
||||
|
||||
@@ -17,76 +16,6 @@ class ModelParts(IsFQNResource):
|
||||
package_name: str
|
||||
|
||||
|
||||
class LegacyContextConfig:
|
||||
def __init__(
|
||||
self,
|
||||
active_project: RuntimeConfig,
|
||||
own_project: Project,
|
||||
fqn: List[str],
|
||||
node_type: NodeType,
|
||||
):
|
||||
self._config = None
|
||||
self._active_project: RuntimeConfig = active_project
|
||||
self._own_project: Project = own_project
|
||||
|
||||
self._model = ModelParts(
|
||||
fqn=fqn,
|
||||
resource_type=node_type,
|
||||
package_name=self._own_project.project_name,
|
||||
)
|
||||
|
||||
self._updater = ConfigUpdater(active_project.credentials.type)
|
||||
|
||||
# the config options defined within the model
|
||||
self.in_model_config: Dict[str, Any] = {}
|
||||
|
||||
def get_default(self) -> Dict[str, Any]:
|
||||
defaults = {"enabled": True, "materialized": "view"}
|
||||
|
||||
if self._model.resource_type == NodeType.Seed:
|
||||
defaults['materialized'] = 'seed'
|
||||
elif self._model.resource_type == NodeType.Snapshot:
|
||||
defaults['materialized'] = 'snapshot'
|
||||
|
||||
if self._model.resource_type == NodeType.Test:
|
||||
defaults['severity'] = 'ERROR'
|
||||
|
||||
return defaults
|
||||
|
||||
def build_config_dict(self, base: bool = False) -> Dict[str, Any]:
|
||||
defaults = self.get_default()
|
||||
active_config = self.load_config_from_active_project()
|
||||
|
||||
if self._active_project.project_name == self._own_project.project_name:
|
||||
cfg = self._updater.merge(
|
||||
defaults, active_config, self.in_model_config
|
||||
)
|
||||
else:
|
||||
own_config = self.load_config_from_own_project()
|
||||
|
||||
cfg = self._updater.merge(
|
||||
defaults, own_config, self.in_model_config, active_config
|
||||
)
|
||||
|
||||
return cfg
|
||||
|
||||
def _translate_adapter_aliases(self, config: Dict[str, Any]):
|
||||
return self._active_project.credentials.translate_aliases(config)
|
||||
|
||||
def update_in_model_config(self, config: Dict[str, Any]) -> None:
|
||||
config = self._translate_adapter_aliases(config)
|
||||
self._updater.update_into(self.in_model_config, config)
|
||||
|
||||
def load_config_from_own_project(self) -> Dict[str, Any]:
|
||||
return self._updater.get_project_config(self._model, self._own_project)
|
||||
|
||||
def load_config_from_active_project(self) -> Dict[str, Any]:
|
||||
return self._updater.get_project_config(
|
||||
self._model,
|
||||
self._active_project,
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar('T', bound=BaseConfig)
|
||||
|
||||
|
||||
@@ -199,6 +128,3 @@ class ContextConfig:
|
||||
project_name=self._project_name,
|
||||
base=base,
|
||||
).to_dict()
|
||||
|
||||
|
||||
ContextConfigType = Union[LegacyContextConfig, ContextConfig]
|
||||
|
||||
@@ -14,7 +14,7 @@ from dbt.clients.jinja import get_rendered, MacroGenerator
|
||||
from dbt.config import RuntimeConfig, Project
|
||||
from .base import contextmember, contextproperty, Var
|
||||
from .configured import FQNLookup
|
||||
from .context_config import ContextConfigType
|
||||
from .context_config import ContextConfig
|
||||
from .macros import MacroNamespaceBuilder, MacroNamespace
|
||||
from .manifest import ManifestContext
|
||||
from dbt.contracts.graph.manifest import Manifest, Disabled
|
||||
@@ -41,7 +41,7 @@ from dbt.exceptions import (
|
||||
source_target_not_found,
|
||||
wrapped_exports,
|
||||
)
|
||||
from dbt.legacy_config_updater import IsFQNResource
|
||||
from dbt.config import IsFQNResource
|
||||
from dbt.logger import GLOBAL_LOGGER as logger # noqa
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
@@ -252,13 +252,13 @@ class BaseSourceResolver(BaseResolver):
|
||||
|
||||
|
||||
class Config(Protocol):
|
||||
def __init__(self, model, context_config: Optional[ContextConfigType]):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
...
|
||||
|
||||
|
||||
# `config` implementations
|
||||
class ParseConfigObject(Config):
|
||||
def __init__(self, model, context_config: Optional[ContextConfigType]):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
self.model = model
|
||||
self.context_config = context_config
|
||||
|
||||
@@ -314,7 +314,7 @@ class ParseConfigObject(Config):
|
||||
|
||||
class RuntimeConfigObject(Config):
|
||||
def __init__(
|
||||
self, model, context_config: Optional[ContextConfigType] = None
|
||||
self, model, context_config: Optional[ContextConfig] = None
|
||||
):
|
||||
self.model = model
|
||||
# we never use or get a config, only the parser cares
|
||||
@@ -619,7 +619,7 @@ class ProviderContext(ManifestContext):
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
provider: Provider,
|
||||
context_config: Optional[ContextConfigType],
|
||||
context_config: Optional[ContextConfig],
|
||||
) -> None:
|
||||
if provider is None:
|
||||
raise InternalException(
|
||||
@@ -630,7 +630,7 @@ class ProviderContext(ManifestContext):
|
||||
self.model: Union[ParsedMacro, NonSourceNode] = model
|
||||
super().__init__(config, manifest, model.package_name)
|
||||
self.sql_results: Dict[str, AttrDict] = {}
|
||||
self.context_config: Optional[ContextConfigType] = context_config
|
||||
self.context_config: Optional[ContextConfig] = context_config
|
||||
self.provider: Provider = provider
|
||||
self.adapter = get_adapter(self.config)
|
||||
self.db_wrapper = self.provider.DatabaseWrapper(
|
||||
@@ -1270,7 +1270,7 @@ def generate_parser_model(
|
||||
model: NonSourceNode,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
context_config: ContextConfigType,
|
||||
context_config: ContextConfig,
|
||||
) -> Dict[str, Any]:
|
||||
ctx = ModelContext(
|
||||
model, config, manifest, ParseProvider(), context_config
|
||||
|
||||
@@ -153,47 +153,7 @@ BANNED_PROJECT_NAMES = {
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectV1(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
name: Name
|
||||
version: Union[SemverString, float]
|
||||
project_root: Optional[str] = None
|
||||
source_paths: Optional[List[str]] = None
|
||||
macro_paths: Optional[List[str]] = None
|
||||
data_paths: Optional[List[str]] = None
|
||||
test_paths: Optional[List[str]] = None
|
||||
analysis_paths: Optional[List[str]] = None
|
||||
docs_paths: Optional[List[str]] = None
|
||||
asset_paths: Optional[List[str]] = None
|
||||
target_path: Optional[str] = None
|
||||
snapshot_paths: Optional[List[str]] = None
|
||||
clean_targets: Optional[List[str]] = None
|
||||
profile: Optional[str] = None
|
||||
log_path: Optional[str] = None
|
||||
modules_path: Optional[str] = None
|
||||
quoting: Optional[Quoting] = None
|
||||
on_run_start: Optional[List[str]] = field(default_factory=list_str)
|
||||
on_run_end: Optional[List[str]] = field(default_factory=list_str)
|
||||
require_dbt_version: Optional[Union[List[str], str]] = None
|
||||
models: Dict[str, Any] = field(default_factory=dict)
|
||||
seeds: Dict[str, Any] = field(default_factory=dict)
|
||||
snapshots: Dict[str, Any] = field(default_factory=dict)
|
||||
packages: List[PackageSpec] = field(default_factory=list)
|
||||
query_comment: Optional[Union[QueryComment, NoValue, str]] = NoValue()
|
||||
config_version: int = 1
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data, validate=True) -> 'ProjectV1':
|
||||
result = super().from_dict(data, validate=validate)
|
||||
if result.name in BANNED_PROJECT_NAMES:
|
||||
raise ValidationError(
|
||||
'Invalid project name: {} is a reserved word'
|
||||
.format(result.name)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectV2(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
class Project(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
name: Name
|
||||
version: Union[SemverString, float]
|
||||
config_version: int
|
||||
@@ -230,7 +190,7 @@ class ProjectV2(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
query_comment: Optional[Union[QueryComment, NoValue, str]] = NoValue()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data, validate=True) -> 'ProjectV2':
|
||||
def from_dict(cls, data, validate=True) -> 'Project':
|
||||
result = super().from_dict(data, validate=validate)
|
||||
if result.name in BANNED_PROJECT_NAMES:
|
||||
raise ValidationError(
|
||||
@@ -240,21 +200,6 @@ class ProjectV2(HyphenatedJsonSchemaMixin, Replaceable):
|
||||
return result
|
||||
|
||||
|
||||
def parse_project_config(
|
||||
data: Dict[str, Any], validate=True
|
||||
) -> Union[ProjectV1, ProjectV2]:
|
||||
config_version = data.get('config-version', 1)
|
||||
if config_version == 1:
|
||||
return ProjectV1.from_dict(data, validate=validate)
|
||||
elif config_version == 2:
|
||||
return ProjectV2.from_dict(data, validate=validate)
|
||||
else:
|
||||
raise ValidationError(
|
||||
f'Got an unexpected config-version={config_version}, expected '
|
||||
f'1 or 2'
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserConfig(ExtensibleJsonSchemaMixin, Replaceable, UserConfigContract):
|
||||
send_anonymous_usage_stats: bool = DEFAULT_SEND_ANONYMOUS_USAGE_STATS
|
||||
@@ -294,7 +239,7 @@ class ConfiguredQuoting(Quoting, Replaceable):
|
||||
|
||||
|
||||
@dataclass
|
||||
class Configuration(ProjectV2, ProfileConfig):
|
||||
class Configuration(Project, ProfileConfig):
|
||||
cli_vars: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata={'preserve_underscore': True},
|
||||
@@ -304,4 +249,4 @@ class Configuration(ProjectV2, ProfileConfig):
|
||||
|
||||
@dataclass
|
||||
class ProjectList(JsonSchemaMixin):
|
||||
projects: Dict[str, Union[ProjectV2, ProjectV1]]
|
||||
projects: Dict[str, Project]
|
||||
|
||||
@@ -99,23 +99,6 @@ class ModelsKeyNonModelDeprecation(DBTDeprecation):
|
||||
'''
|
||||
|
||||
|
||||
class DbtProjectYamlDeprecation(DBTDeprecation):
|
||||
_name = 'dbt-project-yaml-v1'
|
||||
_description = '''\
|
||||
dbt v0.17.0 introduces a new config format for the dbt_project.yml file.
|
||||
Support for the existing version 1 format will be removed in a future
|
||||
release of dbt. The following packages are currently configured with
|
||||
config version 1:{project_names}
|
||||
|
||||
|
||||
|
||||
For upgrading instructions, consult the documentation:
|
||||
|
||||
https://docs.getdbt.com/docs/guides/migration-guide/upgrading-to-0-17-0
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class ExecuteMacrosReleaseDeprecation(DBTDeprecation):
|
||||
_name = 'execute-macro-release'
|
||||
_description = '''\
|
||||
@@ -176,7 +159,6 @@ deprecations_list: List[DBTDeprecation] = [
|
||||
NotADictionaryDeprecation(),
|
||||
ColumnQuotingDeprecation(),
|
||||
ModelsKeyNonModelDeprecation(),
|
||||
DbtProjectYamlDeprecation(),
|
||||
ExecuteMacrosReleaseDeprecation(),
|
||||
AdapterMacroDeprecation(),
|
||||
]
|
||||
|
||||
@@ -1,214 +0,0 @@
|
||||
# TODO: rename this module.
|
||||
from typing import Dict, Any, Mapping, List
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import dbt.exceptions
|
||||
|
||||
from dbt.utils import deep_merge, fqn_search
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.adapters.factory import get_config_class_by_name
|
||||
|
||||
|
||||
class HasConfigFields(Protocol):
|
||||
seeds: Dict[str, Any]
|
||||
snapshots: Dict[str, Any]
|
||||
models: Dict[str, Any]
|
||||
sources: Dict[str, Any]
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IsFQNResource(Protocol):
|
||||
fqn: List[str]
|
||||
resource_type: NodeType
|
||||
package_name: str
|
||||
|
||||
|
||||
def _listify(value) -> List:
|
||||
if isinstance(value, tuple):
|
||||
value = list(value)
|
||||
elif not isinstance(value, list):
|
||||
value = [value]
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class ConfigUpdater:
|
||||
AppendListFields = {'pre-hook', 'post-hook', 'tags'}
|
||||
ExtendDictFields = {'vars', 'column_types', 'quoting', 'persist_docs'}
|
||||
DefaultClobberFields = {
|
||||
'enabled',
|
||||
'materialized',
|
||||
|
||||
# these 2 are additional - not defined in the NodeConfig object
|
||||
'sql_header',
|
||||
'incremental_strategy',
|
||||
|
||||
# these 3 are "special" - not defined in NodeConfig, instead set by
|
||||
# update_parsed_node_name in parsing
|
||||
'alias',
|
||||
'schema',
|
||||
'database',
|
||||
|
||||
# tests
|
||||
'severity',
|
||||
|
||||
# snapshots
|
||||
'unique_key',
|
||||
'target_database',
|
||||
'target_schema',
|
||||
'strategy',
|
||||
'updated_at',
|
||||
# this is often a list, but it should replace and not append (sometimes
|
||||
# it's 'all')
|
||||
'check_cols',
|
||||
# seeds
|
||||
'quote_columns',
|
||||
}
|
||||
|
||||
@property
|
||||
def ClobberFields(self):
|
||||
return self.DefaultClobberFields | self.AdapterSpecificConfigs
|
||||
|
||||
@property
|
||||
def ConfigKeys(self):
|
||||
return (
|
||||
self.AppendListFields | self.ExtendDictFields | self.ClobberFields
|
||||
)
|
||||
|
||||
def __init__(self, adapter_type: str):
|
||||
config_class = get_config_class_by_name(adapter_type)
|
||||
self.AdapterSpecificConfigs = {
|
||||
target_name for _, target_name in
|
||||
config_class._get_fields()
|
||||
}
|
||||
|
||||
def update_config_keys_into(
|
||||
self, mutable_config: Dict[str, Any], new_configs: Mapping[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Update mutable_config with the contents of new_configs, but only
|
||||
include "expected" config values.
|
||||
|
||||
Returns dict where the keys are what was updated and the update values
|
||||
are what the updates were.
|
||||
"""
|
||||
|
||||
relevant_configs: Dict[str, Any] = {
|
||||
key: new_configs[key] for key
|
||||
in new_configs if key in self.ConfigKeys
|
||||
}
|
||||
|
||||
for key in self.AppendListFields:
|
||||
append_fields = _listify(relevant_configs.get(key, []))
|
||||
mutable_config[key].extend([
|
||||
f for f in append_fields if f not in mutable_config[key]
|
||||
])
|
||||
|
||||
for key in self.ExtendDictFields:
|
||||
dict_val = relevant_configs.get(key, {})
|
||||
try:
|
||||
mutable_config[key].update(dict_val)
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
'Invalid config field: "{}" must be a dict'.format(key)
|
||||
)
|
||||
|
||||
for key in self.ClobberFields:
|
||||
if key in relevant_configs:
|
||||
mutable_config[key] = relevant_configs[key]
|
||||
|
||||
return relevant_configs
|
||||
|
||||
def update_into(
|
||||
self, mutable_config: Dict[str, Any], new_config: Mapping[str, Any]
|
||||
) -> None:
|
||||
"""Update mutable_config with the contents of new_config."""
|
||||
for key, value in new_config.items():
|
||||
if key in self.AppendListFields:
|
||||
current_list: List = _listify(mutable_config.get(key, []))
|
||||
current_list.extend(_listify(value))
|
||||
mutable_config[key] = current_list
|
||||
elif key in self.ExtendDictFields:
|
||||
current_dict: Dict = mutable_config.get(key, {})
|
||||
try:
|
||||
current_dict.update(value)
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
'Invalid config field: "{}" must be a dict'.format(key)
|
||||
)
|
||||
mutable_config[key] = current_dict
|
||||
else: # key in self.ClobberFields
|
||||
mutable_config[key] = value
|
||||
|
||||
def get_project_config(
|
||||
self, model: IsFQNResource, project: HasConfigFields
|
||||
) -> Dict[str, Any]:
|
||||
# most configs are overwritten by a more specific config, but pre/post
|
||||
# hooks are appended!
|
||||
config: Dict[str, Any] = {}
|
||||
for k in self.AppendListFields:
|
||||
config[k] = []
|
||||
for k in self.ExtendDictFields:
|
||||
config[k] = {}
|
||||
|
||||
if model.resource_type == NodeType.Seed:
|
||||
model_configs = project.seeds
|
||||
elif model.resource_type == NodeType.Snapshot:
|
||||
model_configs = project.snapshots
|
||||
elif model.resource_type == NodeType.Source:
|
||||
model_configs = project.sources
|
||||
else:
|
||||
model_configs = project.models
|
||||
|
||||
if model_configs is None:
|
||||
return config
|
||||
|
||||
# mutates config
|
||||
self.update_config_keys_into(config, model_configs)
|
||||
|
||||
for level_config in fqn_search(model_configs, model.fqn):
|
||||
relevant_configs = self.update_config_keys_into(
|
||||
config, level_config
|
||||
)
|
||||
|
||||
# mutates config
|
||||
relevant_configs = self.update_config_keys_into(
|
||||
config, level_config
|
||||
)
|
||||
|
||||
# TODO: does this do anything? Doesn't update_config_keys_into
|
||||
# handle the clobber case?
|
||||
clobber_configs = {
|
||||
k: v for (k, v) in relevant_configs.items()
|
||||
if k not in self.AppendListFields and
|
||||
k not in self.ExtendDictFields
|
||||
}
|
||||
|
||||
config.update(clobber_configs)
|
||||
|
||||
return config
|
||||
|
||||
def get_project_vars(
|
||||
self, project_vars: Dict[str, Any],
|
||||
):
|
||||
config: Dict[str, Any] = {}
|
||||
# this is pretty trivial, since the new project vars don't care about
|
||||
# FQNs or resource types
|
||||
self.update_config_keys_into(config, project_vars)
|
||||
return config
|
||||
|
||||
def merge(self, *configs: Dict[str, Any]) -> Dict[str, Any]:
|
||||
merged_config: Dict[str, Any] = {}
|
||||
for config in configs:
|
||||
# Do not attempt to deep merge clobber fields
|
||||
config = config.copy()
|
||||
clobber = {
|
||||
key: config.pop(key) for key in list(config.keys())
|
||||
if key in self.ClobberFields
|
||||
}
|
||||
intermediary_merged = deep_merge(
|
||||
merged_config, config
|
||||
)
|
||||
intermediary_merged.update(clobber)
|
||||
|
||||
merged_config.update(intermediary_merged)
|
||||
return merged_config
|
||||
@@ -18,7 +18,7 @@ from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients.jinja import get_rendered
|
||||
from dbt.config import Project, RuntimeConfig
|
||||
from dbt.context.context_config import (
|
||||
LegacyContextConfig, ContextConfig, ContextConfigType
|
||||
ContextConfig
|
||||
)
|
||||
from dbt.contracts.files import (
|
||||
SourceFile, FilePath, FileHash
|
||||
@@ -223,7 +223,7 @@ class ConfiguredParser(
|
||||
self,
|
||||
block: ConfiguredBlockType,
|
||||
path: str,
|
||||
config: ContextConfigType,
|
||||
config: ContextConfig,
|
||||
fqn: List[str],
|
||||
name=None,
|
||||
**kwargs,
|
||||
@@ -266,16 +266,16 @@ class ConfiguredParser(
|
||||
raise CompilationException(msg, node=node)
|
||||
|
||||
def _context_for(
|
||||
self, parsed_node: IntermediateNode, config: ContextConfigType
|
||||
self, parsed_node: IntermediateNode, config: ContextConfig
|
||||
) -> Dict[str, Any]:
|
||||
return generate_parser_model(
|
||||
parsed_node, self.root_project, self.macro_manifest, config
|
||||
)
|
||||
|
||||
def render_with_context(
|
||||
self, parsed_node: IntermediateNode, config: ContextConfigType
|
||||
self, parsed_node: IntermediateNode, config: ContextConfig
|
||||
) -> None:
|
||||
"""Given the parsed node and a ContextConfigType to use during parsing,
|
||||
"""Given the parsed node and a ContextConfig to use during parsing,
|
||||
render the node's sql wtih macro capture enabled.
|
||||
|
||||
Note: this mutates the config object when config() calls are rendered.
|
||||
@@ -307,9 +307,9 @@ class ConfiguredParser(
|
||||
self._update_node_alias(parsed_node, config_dict)
|
||||
|
||||
def update_parsed_node(
|
||||
self, parsed_node: IntermediateNode, config: ContextConfigType
|
||||
self, parsed_node: IntermediateNode, config: ContextConfig
|
||||
) -> None:
|
||||
"""Given the ContextConfigType used for parsing and the parsed node,
|
||||
"""Given the ContextConfig used for parsing and the parsed node,
|
||||
generate and set the true values to use, overriding the temporary parse
|
||||
values set in _build_intermediate_parsed_node.
|
||||
"""
|
||||
@@ -337,20 +337,11 @@ class ConfiguredParser(
|
||||
for hook in hooks:
|
||||
get_rendered(hook.sql, context, parsed_node, capture_macros=True)
|
||||
|
||||
def initial_config(self, fqn: List[str]) -> ContextConfigType:
|
||||
def initial_config(self, fqn: List[str]) -> ContextConfig:
|
||||
config_version = min(
|
||||
[self.project.config_version, self.root_project.config_version]
|
||||
)
|
||||
# grab a list of the existing project names. This is for var conversion
|
||||
all_projects = self.root_project.load_dependencies()
|
||||
if config_version == 1:
|
||||
return LegacyContextConfig(
|
||||
self.root_project.as_v1(all_projects),
|
||||
self.project.as_v1(all_projects),
|
||||
fqn,
|
||||
self.resource_type,
|
||||
)
|
||||
elif config_version == 2:
|
||||
if config_version == 2:
|
||||
return ContextConfig(
|
||||
self.root_project,
|
||||
fqn,
|
||||
@@ -360,18 +351,18 @@ class ConfiguredParser(
|
||||
else:
|
||||
raise InternalException(
|
||||
f'Got an unexpected project version={config_version}, '
|
||||
f'expected 1 or 2'
|
||||
f'expected 2'
|
||||
)
|
||||
|
||||
def config_dict(
|
||||
self, config: ContextConfigType,
|
||||
self, config: ContextConfig,
|
||||
) -> Dict[str, Any]:
|
||||
config_dict = config.build_config_dict(base=True)
|
||||
self._mangle_hooks(config_dict)
|
||||
return config_dict
|
||||
|
||||
def render_update(
|
||||
self, node: IntermediateNode, config: ContextConfigType
|
||||
self, node: IntermediateNode, config: ContextConfig
|
||||
) -> None:
|
||||
try:
|
||||
self.render_with_context(node, config)
|
||||
@@ -391,7 +382,7 @@ class ConfiguredParser(
|
||||
compiled_path: str = self.get_compiled_path(block)
|
||||
fqn = self.get_fqn(compiled_path, block.name)
|
||||
|
||||
config: ContextConfigType = self.initial_config(fqn)
|
||||
config: ContextConfig = self.initial_config(fqn)
|
||||
|
||||
node = self._create_parsetime_node(
|
||||
block=block,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, Iterator, Union, List, Tuple
|
||||
|
||||
from dbt.context.context_config import ContextConfigType
|
||||
from dbt.context.context_config import ContextConfig
|
||||
from dbt.contracts.files import FilePath
|
||||
from dbt.contracts.graph.parsed import ParsedHookNode
|
||||
from dbt.exceptions import InternalException
|
||||
@@ -89,7 +89,7 @@ class HookParser(SimpleParser[HookBlock, ParsedHookNode]):
|
||||
self,
|
||||
block: HookBlock,
|
||||
path: str,
|
||||
config: ContextConfigType,
|
||||
config: ContextConfig,
|
||||
fqn: List[str],
|
||||
name=None,
|
||||
**kwargs,
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import (
|
||||
import dbt.exceptions
|
||||
import dbt.flags as flags
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.factory import (
|
||||
get_relation_class_by_name,
|
||||
)
|
||||
@@ -335,15 +334,6 @@ class ManifestLoader:
|
||||
) -> Manifest:
|
||||
with PARSING_STATE:
|
||||
projects = root_config.load_dependencies()
|
||||
v1_configs = []
|
||||
for project in projects.values():
|
||||
if project.config_version == 1:
|
||||
v1_configs.append(f'\n\n - {project.project_name}')
|
||||
if v1_configs:
|
||||
deprecations.warn(
|
||||
'dbt-project-yaml-v1',
|
||||
project_names=''.join(v1_configs)
|
||||
)
|
||||
loader = cls(root_config, projects, macro_hook)
|
||||
loader.load(macro_manifest=macro_manifest)
|
||||
loader.write_parse_results()
|
||||
|
||||
@@ -13,7 +13,7 @@ from dbt.clients.jinja import get_rendered, add_rendered_test_kwargs
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.config.renderer import SchemaYamlRenderer
|
||||
from dbt.context.context_config import (
|
||||
ContextConfigType,
|
||||
ContextConfig,
|
||||
ContextConfigGenerator,
|
||||
)
|
||||
from dbt.context.configured import generate_schema_yml
|
||||
@@ -296,7 +296,7 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
self,
|
||||
target: Union[UnpatchedSourceDefinition, UnparsedNodeUpdate],
|
||||
path: str,
|
||||
config: ContextConfigType,
|
||||
config: ContextConfig,
|
||||
tags: List[str],
|
||||
fqn: List[str],
|
||||
name: str,
|
||||
@@ -452,9 +452,9 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
|
||||
return node
|
||||
|
||||
def render_with_context(
|
||||
self, node: ParsedSchemaTestNode, config: ContextConfigType,
|
||||
self, node: ParsedSchemaTestNode, config: ContextConfig,
|
||||
) -> None:
|
||||
"""Given the parsed node and a ContextConfigType to use during
|
||||
"""Given the parsed node and a ContextConfig to use during
|
||||
parsing, collect all the refs that might be squirreled away in the test
|
||||
arguments. This includes the implicit "model" argument.
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from dbt.context.context_config import ContextConfigType
|
||||
from dbt.context.context_config import ContextConfig
|
||||
from dbt.contracts.files import SourceFile, FilePath
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.node_types import NodeType
|
||||
@@ -24,7 +24,7 @@ class SeedParser(SimpleSQLParser[ParsedSeedNode]):
|
||||
return block.path.relative_path
|
||||
|
||||
def render_with_context(
|
||||
self, parsed_node: ParsedSeedNode, config: ContextConfigType
|
||||
self, parsed_node: ParsedSeedNode, config: ContextConfig
|
||||
) -> None:
|
||||
"""Seeds don't need to do any rendering."""
|
||||
|
||||
|
||||
@@ -110,10 +110,6 @@ class TestSimpleDependencyUnpinned(DBTIntegrationTest):
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_simple_dependency(self):
|
||||
# hack: insert the config version warning into the active deprecations,
|
||||
# to avoid triggering on that, since the unpinned branch also should
|
||||
# warn about the version.
|
||||
deprecations.active_deprecations.add('dbt-project-yaml-v1')
|
||||
with self.assertRaises(CompilationException) as exc:
|
||||
self.run_dbt(["deps"])
|
||||
assert 'is not pinned' in str(exc.exception)
|
||||
@@ -161,17 +157,18 @@ class TestRekeyedDependencyWithSubduplicates(DBTIntegrationTest):
|
||||
|
||||
@property
|
||||
def packages_config(self):
|
||||
# dbt-event-logging@0.1.5 requires dbt-utils.git@0.1.12, which the
|
||||
# this revision of dbt-integration-project requires dbt-utils.git@0.5.1, which the
|
||||
# package config handling should detect
|
||||
return {
|
||||
'packages': [
|
||||
{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-utils',
|
||||
'revision': '0.1.12',
|
||||
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-integration-project',
|
||||
'revision': 'config-version-2-deps'
|
||||
},
|
||||
{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-event-logging.git',
|
||||
'revision': '0.1.5',
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-utils.git',
|
||||
'revision': '0.5.1',
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -88,114 +88,3 @@ class TestSimpleDependencyWithOverriddenConfigs(BaseTestSimpleDependencyWithConf
|
||||
self.assertTablesEqual("seed", "table_model")
|
||||
self.assertTablesEqual("seed", "view_model")
|
||||
self.assertTablesEqual("seed", "incremental")
|
||||
|
||||
|
||||
class TestSimpleDependencyWithModelSpecificOverriddenConfigs(BaseTestSimpleDependencyWithConfigs):
|
||||
|
||||
@property
|
||||
def packages_config(self):
|
||||
return {
|
||||
"packages": [
|
||||
{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-integration-project',
|
||||
'revision': 'with-configs-0.17.0',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
# This feature doesn't exist in v2!
|
||||
return {
|
||||
'config-version': 1,
|
||||
"models": {
|
||||
"dbt_integration_project": {
|
||||
"config": {
|
||||
# model-level configs
|
||||
"vars": {
|
||||
"config_1": "ghi",
|
||||
"config_2": "jkl",
|
||||
"bool_config": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_simple_dependency(self):
|
||||
self.use_default_project()
|
||||
|
||||
self.run_dbt(["deps"])
|
||||
results = self.run_dbt(["run"], strict=False) # config is v1, can't use strict here
|
||||
self.assertEqual(len(results), 5)
|
||||
|
||||
self.assertTablesEqual('seed_config_expected_3', "config")
|
||||
self.assertTablesEqual("seed", "table_model")
|
||||
self.assertTablesEqual("seed", "view_model")
|
||||
self.assertTablesEqual("seed", "incremental")
|
||||
|
||||
|
||||
class TestSimpleDependencyWithModelSpecificOverriddenConfigsAndMaterializations(BaseTestSimpleDependencyWithConfigs):
|
||||
|
||||
@property
|
||||
def packages_config(self):
|
||||
return {
|
||||
"packages": [
|
||||
{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-integration-project',
|
||||
'revision': 'with-configs-0.17.0',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 1,
|
||||
"models": {
|
||||
"dbt_integration_project": {
|
||||
# disable config model, but supply vars
|
||||
"config": {
|
||||
"enabled": False,
|
||||
"vars": {
|
||||
"config_1": "ghi",
|
||||
"config_2": "jkl",
|
||||
"bool_config": True
|
||||
|
||||
}
|
||||
},
|
||||
# disable the table model
|
||||
"table_model": {
|
||||
"enabled": False,
|
||||
},
|
||||
# override materialization settings
|
||||
"view_model": {
|
||||
"materialized": "table"
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_simple_dependency(self):
|
||||
self.run_dbt(["deps"])
|
||||
results = self.run_dbt(["run"], strict=False) # config is v1, can't use strict here
|
||||
self.assertEqual(len(results), 3)
|
||||
|
||||
self.assertTablesEqual("seed", "view_model")
|
||||
self.assertTablesEqual("seed", "incremental")
|
||||
|
||||
created_models = self.get_models_in_schema()
|
||||
|
||||
# config, table are disabled
|
||||
self.assertFalse('config' in created_models)
|
||||
self.assertFalse('table_model' in created_models)
|
||||
|
||||
self.assertTrue('view_model' in created_models)
|
||||
self.assertEqual(created_models['view_model'], 'table')
|
||||
|
||||
self.assertTrue('incremental' in created_models)
|
||||
self.assertEqual(created_models['incremental'], 'table')
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
name: 'local_dep'
|
||||
version: '1.0'
|
||||
config-version: 2
|
||||
|
||||
profile: 'default'
|
||||
|
||||
macro-paths: ["macros"]
|
||||
@@ -0,0 +1,82 @@
|
||||
{# this is copy-pasted from dbt-utils, this test is no longer at its mercy #}
|
||||
{% macro except() %}
|
||||
{{ adapter.dispatch('except', packages=['local_dep'])() }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__except() %}
|
||||
|
||||
except
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro bigquery__except() %}
|
||||
|
||||
except distinct
|
||||
|
||||
{% endmacro %}%
|
||||
|
||||
{% macro test_equality(model) %}
|
||||
|
||||
{% set compare_model = kwargs.get('compare_model', kwargs.get('arg')) %}
|
||||
|
||||
|
||||
{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}
|
||||
{%- if not execute -%}
|
||||
{{ return('') }}
|
||||
{% endif %}
|
||||
|
||||
-- setup
|
||||
|
||||
{% set dest_columns = adapter.get_columns_in_relation(model) %}
|
||||
{% set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') %}
|
||||
|
||||
with a as (
|
||||
|
||||
select * from {{ model }}
|
||||
|
||||
),
|
||||
|
||||
b as (
|
||||
|
||||
select * from {{ compare_model }}
|
||||
|
||||
),
|
||||
|
||||
a_minus_b as (
|
||||
|
||||
select {{dest_cols_csv}} from a
|
||||
{{ local_dep.except() }}
|
||||
select {{dest_cols_csv}} from b
|
||||
|
||||
),
|
||||
|
||||
b_minus_a as (
|
||||
|
||||
select {{dest_cols_csv}} from b
|
||||
{{ local_dep.except() }}
|
||||
select {{dest_cols_csv}} from a
|
||||
|
||||
),
|
||||
|
||||
unioned as (
|
||||
|
||||
select * from a_minus_b
|
||||
union all
|
||||
select * from b_minus_a
|
||||
|
||||
),
|
||||
|
||||
final as (
|
||||
|
||||
select (select count(*) from unioned) +
|
||||
(select abs(
|
||||
(select count(*) from a_minus_b) -
|
||||
(select count(*) from b_minus_a)
|
||||
))
|
||||
as count
|
||||
|
||||
)
|
||||
|
||||
select count from final
|
||||
|
||||
{% endmacro %}
|
||||
@@ -17,4 +17,4 @@ models:
|
||||
- rejected_values: { values: ['orange', 'purple'] }
|
||||
# passes
|
||||
tests:
|
||||
- dbt_utils.equality: { compare_model: ref('table_copy') }
|
||||
- local_dep.equality: { compare_model: ref('table_copy') }
|
||||
|
||||
@@ -218,8 +218,7 @@ class TestCustomSchemaTests(DBTIntegrationTest):
|
||||
return {
|
||||
'packages': [
|
||||
{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-utils',
|
||||
'revision': '0.13-support',
|
||||
"local": "./local_dependency",
|
||||
},
|
||||
{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-integration-project',
|
||||
@@ -251,7 +250,7 @@ class TestCustomSchemaTests(DBTIntegrationTest):
|
||||
@use_profile('postgres')
|
||||
def test_postgres_schema_tests(self):
|
||||
self.run_dbt(["deps"])
|
||||
results = self.run_dbt(strict=False) # dbt-utils 0.13-support is config version 1
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 4)
|
||||
|
||||
test_results = self.run_schema_validations()
|
||||
|
||||
@@ -81,31 +81,6 @@ class TestModelsKeyMismatchDeprecation(BaseTestDeprecations):
|
||||
self.assertEqual(expected, deprecations.active_deprecations)
|
||||
|
||||
|
||||
class TestDbtProjectYamlV1Deprecation(BaseTestDeprecations):
|
||||
@property
|
||||
def models(self):
|
||||
return 'boring-models'
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {'config-version': 1}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_project_deprecations_fail(self):
|
||||
with self.assertRaises(dbt.exceptions.CompilationException) as exc:
|
||||
self.run_dbt(strict=True)
|
||||
|
||||
exc_str = ' '.join(str(exc.exception).split()) # flatten all whitespace
|
||||
self.assertIn('Support for the existing version 1 format will be removed', exc_str)
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_project_deprecations(self):
|
||||
self.assertEqual(deprecations.active_deprecations, set())
|
||||
self.run_dbt(strict=False)
|
||||
expected = {'dbt-project-yaml-v1'}
|
||||
self.assertEqual(expected, deprecations.active_deprecations)
|
||||
|
||||
|
||||
class TestAdapterMacroDeprecation(BaseTestDeprecations):
|
||||
@property
|
||||
def models(self):
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
from_root,from_second
|
||||
root_second_value,second_to_second_override_value
|
||||
|
@@ -1,43 +0,0 @@
|
||||
|
||||
name: 'second_dep'
|
||||
version: '1.0'
|
||||
|
||||
profile: 'default'
|
||||
|
||||
source-paths: ["models"]
|
||||
analysis-paths: ["analysis"]
|
||||
test-paths: ["tests"]
|
||||
data-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
|
||||
require-dbt-version: '>=0.1.0'
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
- "target"
|
||||
- "dbt_modules"
|
||||
|
||||
|
||||
seeds:
|
||||
quote_columns: False
|
||||
|
||||
|
||||
models:
|
||||
second_dep:
|
||||
vars:
|
||||
from_second_to_second: 'never_see_me'
|
||||
inner:
|
||||
vars:
|
||||
from_second_to_second: 'second_to_second_override_value'
|
||||
first_dep:
|
||||
vars:
|
||||
from_second_to_first: 'never_see_me_either'
|
||||
nested:
|
||||
vars:
|
||||
from_second_to_first: 'second_to_first_override_value'
|
||||
test:
|
||||
vars:
|
||||
from_second_to_root: 'also_never_see_me'
|
||||
inside:
|
||||
vars:
|
||||
from_second_to_root: 'second_to_root_override_value'
|
||||
@@ -1,3 +0,0 @@
|
||||
select
|
||||
'{{ var("from_root_to_second") }}' as from_root,
|
||||
'{{ var("from_second_to_second") }}' as from_second
|
||||
@@ -169,7 +169,6 @@ class TestVarDependencyInheritance(DBTIntegrationTest):
|
||||
return {
|
||||
"packages": [
|
||||
{'local': 'first_dependency'},
|
||||
{'local': 'second_dependency_v1'},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -186,20 +185,16 @@ class TestVarDependencyInheritance(DBTIntegrationTest):
|
||||
'first_dep': {
|
||||
'from_root_to_first': 'root_first_value',
|
||||
},
|
||||
'second_dep': {
|
||||
'from_root_to_second': 'root_second_value',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_var_mutual_overrides_v1_conversion(self):
|
||||
self.run_dbt(['deps'], strict=False)
|
||||
assert len(self.run_dbt(['seed'], strict=False)) == 3
|
||||
assert len(self.run_dbt(['run'], strict=False)) == 3
|
||||
assert len(self.run_dbt(['seed'], strict=False)) == 2
|
||||
assert len(self.run_dbt(['run'], strict=False)) == 2
|
||||
self.assertTablesEqual('root_model_expected', 'model')
|
||||
self.assertTablesEqual('first_dep_expected', 'first_dep_model')
|
||||
self.assertTablesEqual('second_dep_expected', 'second_dep_model')
|
||||
|
||||
|
||||
class TestMissingVarGenerateNameMacro(DBTIntegrationTest):
|
||||
|
||||
@@ -1101,8 +1101,9 @@ class TestRPCServerDeps(HasRPCServer):
|
||||
@property
|
||||
def packages_config(selF):
|
||||
return {
|
||||
# this is config-version 2, but with no upper bound
|
||||
'packages': [
|
||||
{'package': 'fishtown-analytics/dbt_utils', 'version': '0.2.1'},
|
||||
{'package': 'fishtown-analytics/dbt_utils', 'version': '0.5.0'},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -76,11 +76,11 @@ def deps_with_packages(packages, bad_packages, project_dir, profiles_dir, schema
|
||||
def test_rpc_deps_packages(project_root, profiles_root, dbt_profile, unique_schema):
|
||||
packages = [{
|
||||
'package': 'fishtown-analytics/dbt_utils',
|
||||
'version': '0.2.1',
|
||||
'version': '0.5.1',
|
||||
}]
|
||||
bad_packages = [{
|
||||
'package': 'fishtown-analytics/dbt_util',
|
||||
'version': '0.2.1',
|
||||
'version': '0.5.1',
|
||||
}]
|
||||
deps_with_packages(packages, bad_packages, project_root, profiles_root, unique_schema)
|
||||
|
||||
@@ -89,7 +89,7 @@ def test_rpc_deps_packages(project_root, profiles_root, dbt_profile, unique_sche
|
||||
def test_rpc_deps_git(project_root, profiles_root, dbt_profile, unique_schema):
|
||||
packages = [{
|
||||
'git': 'https://github.com/fishtown-analytics/dbt-utils.git',
|
||||
'revision': '0.2.1'
|
||||
'revision': '0.5.1'
|
||||
}]
|
||||
# if you use a bad URL, git thinks it's a private repo and prompts for auth
|
||||
bad_packages = [{
|
||||
|
||||
@@ -14,7 +14,6 @@ from dbt.adapters.bigquery import BigQueryCredentials
|
||||
from dbt.adapters.bigquery import BigQueryAdapter
|
||||
from dbt.adapters.bigquery import BigQueryRelation
|
||||
from dbt.adapters.bigquery import Plugin as BigQueryPlugin
|
||||
from dbt.adapters.bigquery.relation import BigQueryInformationSchema
|
||||
from dbt.adapters.bigquery.connections import BigQueryConnectionManager
|
||||
from dbt.adapters.base.query_headers import MacroQueryStringSetter
|
||||
from dbt.clients import agate_helper
|
||||
@@ -25,6 +24,7 @@ import google.cloud.bigquery
|
||||
|
||||
from .utils import config_from_parts_or_dicts, inject_adapter, TestAdapterConversions
|
||||
|
||||
|
||||
def _bq_conn():
|
||||
conn = MagicMock()
|
||||
conn.get.side_effect = lambda x: 'bigquery' if x == 'type' else None
|
||||
@@ -80,6 +80,7 @@ class BaseTestBigQueryAdapter(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'project-root': '/tmp/dbt/does-not-exist',
|
||||
'profile': 'default',
|
||||
'config-version': 2,
|
||||
}
|
||||
self.qh_patch = None
|
||||
|
||||
@@ -517,7 +518,7 @@ class TestBigQueryConnectionManager(unittest.TestCase):
|
||||
self.mock_client.query.assert_called_once_with(
|
||||
'sql', job_config=mock_bq.QueryJobConfig())
|
||||
|
||||
|
||||
|
||||
def test_copy_bq_table_appends(self):
|
||||
self._copy_table(
|
||||
write_disposition=dbt.adapters.bigquery.impl.WRITE_APPEND)
|
||||
|
||||
@@ -43,6 +43,7 @@ class CompilerTest(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': '/tmp/dbt/does-not-exist',
|
||||
'config-version': 2,
|
||||
}
|
||||
profile_cfg = {
|
||||
'outputs': {
|
||||
|
||||
@@ -105,6 +105,7 @@ class BaseConfigTest(unittest.TestCase):
|
||||
'version': '0.0.1',
|
||||
'name': 'my_test_project',
|
||||
'profile': 'default',
|
||||
'config-version': 2,
|
||||
}
|
||||
self.default_profile_data = {
|
||||
'default': {
|
||||
@@ -1256,7 +1257,7 @@ class TestVariableRuntimeConfigFiles(BaseFileTest):
|
||||
self.assertEqual(config.seeds['bar']['materialized'], 'default') # rendered!
|
||||
|
||||
|
||||
class TestV2V1Conversion(unittest.TestCase):
|
||||
class TestVarLookups(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.initial_src_vars = {
|
||||
# globals
|
||||
@@ -1280,43 +1281,8 @@ class TestV2V1Conversion(unittest.TestCase):
|
||||
self.other_var_search = mock.MagicMock(fqn=['other_project', 'model'], resource_type=NodeType.Model, package_name='other_project')
|
||||
self.third_var_search = mock.MagicMock(fqn=['third_project', 'third_model'], resource_type=NodeType.Model, package_name='third_project')
|
||||
|
||||
def test_v2_v1_dict(self):
|
||||
dbt.config.project.v2_vars_to_v1(self.dst, self.src_vars, self.projects)
|
||||
# make sure the input didn't get mutated. That would be bad!
|
||||
assert self.src_vars == self.initial_src_vars
|
||||
# conversion sould remove top-level 'vars'
|
||||
assert 'vars' not in self.dst
|
||||
|
||||
# when we convert, all of models/seeds/snapshots will have the same vars
|
||||
for key in ['models', 'seeds', 'snapshots']:
|
||||
assert key in self.dst
|
||||
for project in self.projects:
|
||||
assert project in self.dst[key]
|
||||
assert 'vars' in self.dst[key][project]
|
||||
if project == 'my_project':
|
||||
assert self.dst[key][project]['vars'] == {
|
||||
'foo': 123, # override
|
||||
'bar': 'goodbye',
|
||||
'baz': True, # only in my-project
|
||||
}
|
||||
elif project == 'other_project':
|
||||
assert self.dst[key][project]['vars'] == {
|
||||
'foo': 456, # override
|
||||
'bar': 'hello',
|
||||
}
|
||||
elif project == 'third_project':
|
||||
assert self.dst[key][project]['vars'] == {
|
||||
'foo': 123,
|
||||
'bar': 'hello',
|
||||
}
|
||||
else:
|
||||
assert False, f'extra project: {project}'
|
||||
|
||||
def test_v2_v1_lookups(self):
|
||||
dbt.config.project.v2_vars_to_v1(self.dst, self.src_vars, self.projects)
|
||||
|
||||
v1_vars = dbt.config.project.V1VarProvider(**self.dst)
|
||||
v2_vars = dbt.config.project.V2VarProvider(self.initial_src_vars)
|
||||
def test_lookups(self):
|
||||
vars_provider = dbt.config.project.VarProvider(self.initial_src_vars)
|
||||
|
||||
expected = [
|
||||
(self.local_var_search, 'foo', 123),
|
||||
@@ -1330,5 +1296,5 @@ class TestV2V1Conversion(unittest.TestCase):
|
||||
(self.third_var_search, 'baz', None),
|
||||
]
|
||||
for node, key, expected_value in expected:
|
||||
assert v1_vars.vars_for(node, 'postgres').get(key) == expected_value
|
||||
assert v2_vars.vars_for(node, 'postgres').get(key) == expected_value
|
||||
value = vars_provider.vars_for(node, 'postgres').get(key)
|
||||
assert value == expected_value
|
||||
|
||||
@@ -14,7 +14,7 @@ from dbt.clients.jinja import MacroStack
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedModelNode, NodeConfig, DependsOn, ParsedMacro
|
||||
)
|
||||
from dbt.config.project import V1VarProvider
|
||||
from dbt.config.project import VarProvider
|
||||
from dbt.context import base, target, configured, providers, docs, manifest, macros
|
||||
from dbt.contracts.files import FileHash
|
||||
from dbt.node_types import NodeType
|
||||
@@ -58,49 +58,43 @@ class TestVar(unittest.TestCase):
|
||||
checksum=FileHash.from_contents(''),
|
||||
)
|
||||
self.context = mock.MagicMock()
|
||||
self.provider = V1VarProvider({}, {}, {})
|
||||
self.provider = VarProvider({})
|
||||
self.config = mock.MagicMock(
|
||||
config_version=1, vars=self.provider, cli_vars={}, project_name='root'
|
||||
config_version=2, vars=self.provider, cli_vars={}, project_name='root'
|
||||
)
|
||||
|
||||
@mock.patch('dbt.legacy_config_updater.get_config_class_by_name', return_value=AdapterConfig)
|
||||
def test_var_default_something(self, mock_get_cls):
|
||||
def test_var_default_something(self):
|
||||
self.config.cli_vars = {'foo': 'baz'}
|
||||
var = providers.RuntimeVar(self.context, self.config, self.model)
|
||||
self.assertEqual(var('foo'), 'baz')
|
||||
self.assertEqual(var('foo', 'bar'), 'baz')
|
||||
|
||||
@mock.patch('dbt.legacy_config_updater.get_config_class_by_name', return_value=AdapterConfig)
|
||||
def test_var_default_none(self, mock_get_cls):
|
||||
def test_var_default_none(self):
|
||||
self.config.cli_vars = {'foo': None}
|
||||
var = providers.RuntimeVar(self.context, self.config, self.model)
|
||||
self.assertEqual(var('foo'), None)
|
||||
self.assertEqual(var('foo', 'bar'), None)
|
||||
|
||||
@mock.patch('dbt.legacy_config_updater.get_config_class_by_name', return_value=AdapterConfig)
|
||||
def test_var_not_defined(self, mock_get_cls):
|
||||
def test_var_not_defined(self):
|
||||
var = providers.RuntimeVar(self.context, self.config, self.model)
|
||||
|
||||
self.assertEqual(var('foo', 'bar'), 'bar')
|
||||
with self.assertRaises(dbt.exceptions.CompilationException):
|
||||
var('foo')
|
||||
|
||||
@mock.patch('dbt.legacy_config_updater.get_config_class_by_name', return_value=AdapterConfig)
|
||||
def test_parser_var_default_something(self, mock_get_cls):
|
||||
def test_parser_var_default_something(self):
|
||||
self.config.cli_vars = {'foo': 'baz'}
|
||||
var = providers.ParseVar(self.context, self.config, self.model)
|
||||
self.assertEqual(var('foo'), 'baz')
|
||||
self.assertEqual(var('foo', 'bar'), 'baz')
|
||||
|
||||
@mock.patch('dbt.legacy_config_updater.get_config_class_by_name', return_value=AdapterConfig)
|
||||
def test_parser_var_default_none(self, mock_get_cls):
|
||||
def test_parser_var_default_none(self):
|
||||
self.config.cli_vars = {'foo': None}
|
||||
var = providers.ParseVar(self.context, self.config, self.model)
|
||||
self.assertEqual(var('foo'), None)
|
||||
self.assertEqual(var('foo', 'bar'), None)
|
||||
|
||||
@mock.patch('dbt.legacy_config_updater.get_config_class_by_name', return_value=AdapterConfig)
|
||||
def test_parser_var_not_defined(self, mock_get_cls):
|
||||
def test_parser_var_not_defined(self):
|
||||
# at parse-time, we should not raise if we encounter a missing var
|
||||
# that way disabled models don't get parse errors
|
||||
var = providers.ParseVar(self.context, self.config, self.model)
|
||||
@@ -247,6 +241,7 @@ PROJECT_DATA = {
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': os.getcwd(),
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@ from .utils import ContractTestCase
|
||||
|
||||
from hologram import ValidationError
|
||||
|
||||
from dbt.contracts.project import ProjectV1
|
||||
from dbt.contracts.project import Project
|
||||
|
||||
class TestProjectV1(ContractTestCase):
|
||||
ContractType = ProjectV1
|
||||
|
||||
class TestProject(ContractTestCase):
|
||||
ContractType = Project
|
||||
|
||||
def test_minimal(self):
|
||||
dct = {
|
||||
@@ -13,12 +14,14 @@ class TestProjectV1(ContractTestCase):
|
||||
'version': '1.0',
|
||||
'profile': 'test',
|
||||
'project-root': '/usr/src/app',
|
||||
'config-version': 2,
|
||||
}
|
||||
project = ProjectV1(
|
||||
project = self.ContractType(
|
||||
name='test',
|
||||
version='1.0',
|
||||
profile='test',
|
||||
project_root='/usr/src/app',
|
||||
config_version=2,
|
||||
)
|
||||
self.assert_from_dict(project, dct)
|
||||
|
||||
@@ -28,6 +31,17 @@ class TestProjectV1(ContractTestCase):
|
||||
'version': '1.0',
|
||||
'profile': 'test',
|
||||
'project-root': '/usr/src/app',
|
||||
'config-version': 2,
|
||||
}
|
||||
with self.assertRaises(ValidationError):
|
||||
ProjectV1.from_dict(dct)
|
||||
self.ContractType.from_dict(dct)
|
||||
|
||||
def test_unsupported_version(self):
|
||||
dct = {
|
||||
'name': 'test',
|
||||
'version': '1.0',
|
||||
'profile': 'test',
|
||||
'project-root': '/usr/src/app',
|
||||
}
|
||||
with self.assertRaises(ValidationError):
|
||||
self.ContractType.from_dict(dct)
|
||||
|
||||
@@ -114,6 +114,7 @@ class DocumentationParserTest(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': self.root_path,
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
subdir_project = {
|
||||
@@ -122,6 +123,7 @@ class DocumentationParserTest(unittest.TestCase):
|
||||
'profile': 'test',
|
||||
'project-root': self.subdir_path,
|
||||
'quoting': {},
|
||||
'config-version': 2,
|
||||
}
|
||||
self.root_project_config = config_from_parts_or_dicts(
|
||||
project=root_project, profile=profile_data
|
||||
|
||||
@@ -141,6 +141,7 @@ class GraphTest(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': os.path.abspath('.'),
|
||||
'config-version': 2,
|
||||
}
|
||||
cfg.update(extra_cfg)
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ class TestLoader(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': normalize('/usr/src/app'),
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
self.root_project_config = config_from_parts_or_dicts(
|
||||
|
||||
@@ -92,6 +92,7 @@ class BaseParserTest(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': normalize('/usr/src/app'),
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
self.root_project_config = config_from_parts_or_dicts(
|
||||
@@ -105,6 +106,7 @@ class BaseParserTest(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': get_abs_os_path('./dbt_modules/snowplow'),
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
self.snowplow_project_config = config_from_parts_or_dicts(
|
||||
|
||||
@@ -28,6 +28,7 @@ class TestPostgresAdapter(unittest.TestCase):
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': '/tmp/dbt/does-not-exist',
|
||||
'config-version': 2,
|
||||
}
|
||||
profile_cfg = {
|
||||
'outputs': {
|
||||
@@ -266,6 +267,7 @@ class TestConnectingPostgresAdapter(unittest.TestCase):
|
||||
'identifier': False,
|
||||
'schema': True,
|
||||
},
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)
|
||||
@@ -397,6 +399,7 @@ class TestConnectingPostgresAdapter(unittest.TestCase):
|
||||
'identifier': False,
|
||||
'schema': True,
|
||||
},
|
||||
'config-version': 2,
|
||||
}
|
||||
self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)
|
||||
self.adapter.cleanup_connections()
|
||||
|
||||
@@ -27,6 +27,7 @@ class TestQueryHeaders(TestCase):
|
||||
'name': 'query_headers',
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'config-version': 2,
|
||||
}
|
||||
self.query = "SELECT 1;"
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@ class TestRedshiftAdapter(unittest.TestCase):
|
||||
'identifier': False,
|
||||
'schema': True,
|
||||
},
|
||||
'config-version': 2,
|
||||
}
|
||||
|
||||
self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)
|
||||
@@ -263,6 +264,7 @@ class TestRedshiftAdapter(unittest.TestCase):
|
||||
'identifier': False,
|
||||
'schema': True,
|
||||
},
|
||||
'config-version': 2,
|
||||
}
|
||||
self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)
|
||||
self.adapter.cleanup_connections()
|
||||
|
||||
@@ -45,6 +45,7 @@ class TestSnowflakeAdapter(unittest.TestCase):
|
||||
'schema': True,
|
||||
},
|
||||
'query-comment': 'dbt',
|
||||
'config-version': 2,
|
||||
}
|
||||
self.config = config_from_parts_or_dicts(project_cfg, profile_cfg)
|
||||
self.assertEqual(self.config.query_comment.comment, 'dbt')
|
||||
|
||||
@@ -1,177 +0,0 @@
|
||||
import os
|
||||
from unittest import TestCase, mock
|
||||
|
||||
from dbt.adapters import postgres # we want this available!
|
||||
import dbt.flags
|
||||
from dbt.context.context_config import LegacyContextConfig
|
||||
from dbt.legacy_config_updater import ConfigUpdater
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
from .utils import config_from_parts_or_dicts
|
||||
|
||||
|
||||
class LegacyContextConfigTest(TestCase):
|
||||
def setUp(self):
|
||||
dbt.flags.STRICT_MODE = True
|
||||
dbt.flags.WARN_ERROR = True
|
||||
|
||||
self.maxDiff = None
|
||||
|
||||
profile_data = {
|
||||
'target': 'test',
|
||||
'quoting': {},
|
||||
'outputs': {
|
||||
'test': {
|
||||
'type': 'redshift',
|
||||
'host': 'localhost',
|
||||
'schema': 'analytics',
|
||||
'user': 'test',
|
||||
'pass': 'test',
|
||||
'dbname': 'test',
|
||||
'port': 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
root_project = {
|
||||
'name': 'root',
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': os.path.abspath('.'),
|
||||
}
|
||||
|
||||
self.root_project_config = config_from_parts_or_dicts(
|
||||
project=root_project,
|
||||
profile=profile_data,
|
||||
cli_vars='{"test_schema_name": "foo"}'
|
||||
)
|
||||
|
||||
snowplow_project = {
|
||||
'name': 'snowplow',
|
||||
'version': '0.1',
|
||||
'profile': 'test',
|
||||
'project-root': os.path.abspath('./dbt_modules/snowplow'),
|
||||
}
|
||||
|
||||
self.snowplow_project_config = config_from_parts_or_dicts(
|
||||
project=snowplow_project, profile=profile_data
|
||||
)
|
||||
|
||||
self.all_projects = {
|
||||
'root': self.root_project_config,
|
||||
'snowplow': self.snowplow_project_config
|
||||
}
|
||||
self.patcher = mock.patch('dbt.context.providers.get_adapter')
|
||||
self.factory = self.patcher.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.patcher.stop()
|
||||
|
||||
def test__context_config_single_call(self):
|
||||
cfg = LegacyContextConfig(
|
||||
self.root_project_config, self.root_project_config,
|
||||
['root', 'x'], NodeType.Model
|
||||
)
|
||||
cfg.update_in_model_config({
|
||||
'materialized': 'something',
|
||||
'sort': 'my sort key',
|
||||
'pre-hook': 'my pre run hook',
|
||||
'vars': {'a': 1, 'b': 2},
|
||||
})
|
||||
expect = {
|
||||
'column_types': {},
|
||||
'enabled': True,
|
||||
'materialized': 'something',
|
||||
'post-hook': [],
|
||||
'pre-hook': ['my pre run hook'],
|
||||
'persist_docs': {},
|
||||
'quoting': {},
|
||||
'sort': 'my sort key',
|
||||
'tags': [],
|
||||
'vars': {'a': 1, 'b': 2},
|
||||
}
|
||||
self.assertEqual(cfg.build_config_dict(), expect)
|
||||
|
||||
def test__context_config_multiple_calls(self):
|
||||
cfg = LegacyContextConfig(
|
||||
self.root_project_config, self.root_project_config,
|
||||
['root', 'x'], NodeType.Model
|
||||
)
|
||||
cfg.update_in_model_config({
|
||||
'materialized': 'something',
|
||||
'sort': 'my sort key',
|
||||
'pre-hook': 'my pre run hook',
|
||||
'vars': {'a': 1, 'b': 2},
|
||||
})
|
||||
cfg.update_in_model_config({
|
||||
'materialized': 'something else',
|
||||
'pre-hook': ['my other pre run hook', 'another pre run hook'],
|
||||
'vars': {'a': 4, 'c': 3},
|
||||
})
|
||||
expect = {
|
||||
'column_types': {},
|
||||
'enabled': True,
|
||||
'materialized': 'something else',
|
||||
'persist_docs': {},
|
||||
'post-hook': [],
|
||||
'pre-hook': [
|
||||
'my pre run hook',
|
||||
'my other pre run hook',
|
||||
'another pre run hook',
|
||||
],
|
||||
'quoting': {},
|
||||
'sort': 'my sort key',
|
||||
'tags': [],
|
||||
'vars': {'a': 4, 'b': 2, 'c': 3},
|
||||
}
|
||||
self.assertEqual(cfg.build_config_dict(), expect)
|
||||
|
||||
def test__context_config_merge(self):
|
||||
self.root_project_config.models = {'sort': ['a', 'b']}
|
||||
cfg = LegacyContextConfig(
|
||||
self.root_project_config, self.root_project_config,
|
||||
['root', 'x'], NodeType.Model
|
||||
)
|
||||
cfg.update_in_model_config({
|
||||
'materialized': 'something',
|
||||
'sort': ['d', 'e']
|
||||
})
|
||||
expect = {
|
||||
'column_types': {},
|
||||
'enabled': True,
|
||||
'materialized': 'something',
|
||||
'post-hook': [],
|
||||
'pre-hook': [],
|
||||
'persist_docs': {},
|
||||
'quoting': {},
|
||||
'sort': ['d', 'e'],
|
||||
'tags': [],
|
||||
'vars': {},
|
||||
}
|
||||
self.assertEqual(cfg.build_config_dict(), expect)
|
||||
|
||||
def test_context_config_all_keys_accounted_for(self):
|
||||
updater = ConfigUpdater('postgres')
|
||||
used_keys = (
|
||||
frozenset(updater.AppendListFields) |
|
||||
frozenset(updater.ExtendDictFields) |
|
||||
frozenset(updater.ClobberFields) |
|
||||
frozenset({'unlogged'})
|
||||
)
|
||||
|
||||
self.assertEqual(used_keys, frozenset(updater.ConfigKeys))
|
||||
|
||||
def test__context_config_wrong_type(self):
|
||||
# ExtendDict fields should handle non-dict inputs gracefully
|
||||
self.root_project_config.models = {'persist_docs': False}
|
||||
cfg = LegacyContextConfig(
|
||||
self.root_project_config, self.root_project_config,
|
||||
['root', 'x'], NodeType.Model
|
||||
)
|
||||
|
||||
model = mock.MagicMock(resource_type=NodeType.Model, fqn=['root', 'x'], project_name='root')
|
||||
|
||||
with self.assertRaises(dbt.exceptions.CompilationException) as exc:
|
||||
cfg._updater.get_project_config(model, self.root_project_config)
|
||||
|
||||
self.assertIn('must be a dict', str(exc.exception))
|
||||
Reference in New Issue
Block a user