mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Initial pass at switching integration tests to pytest (#4691)
Author: Emily Rockman <emily.rockman@dbtlabs.com> route logs to dbt-core/logs instead of each test folder (#4711) * Initial pass at switching integration tests to pytest * Reorganize dbt.tests.tables. Cleanup adapter handling * Move run_sql to TestProjInfo and TableComparison. Add comments, cleanup adapter schema setup * Tweak unique_schema name generation * Update CHANGELOG.md
This commit is contained in:
2
.flake8
2
.flake8
@@ -8,5 +8,5 @@ ignore =
|
||||
W504
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
max-line-length = 99
|
||||
max-line-length = 140
|
||||
exclude = test
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
- Drop support for Python 3.7.0 + 3.7.1 ([#4584](https://github.com/dbt-labs/dbt-core/issues/4584), [#4585](https://github.com/dbt-labs/dbt-core/pull/4585), [#4643](https://github.com/dbt-labs/dbt-core/pull/4643))
|
||||
- Re-format codebase (except tests) using pre-commit hooks ([#3195](https://github.com/dbt-labs/dbt-core/issues/3195), [#4697](https://github.com/dbt-labs/dbt-core/pull/4697))
|
||||
- Add deps module README ([#4686](https://github.com/dbt-labs/dbt-core/pull/4686/))
|
||||
- Initial conversion of tests to pytest ([#4690](https://github.com/dbt-labs/dbt-core/issues/4690), [#4691](https://github.com/dbt-labs/dbt-core/pull/4691))
|
||||
|
||||
Contributors:
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||
|
||||
@@ -177,6 +177,10 @@ def get_adapter(config: AdapterRequiredConfig):
|
||||
return FACTORY.lookup_adapter(config.credentials.type)
|
||||
|
||||
|
||||
def get_adapter_by_type(adapter_type):
|
||||
return FACTORY.lookup_adapter(adapter_type)
|
||||
|
||||
|
||||
def reset_adapters():
|
||||
"""Clear the adapters. This is useful for tests, which change configs."""
|
||||
FACTORY.reset_adapters()
|
||||
|
||||
@@ -389,7 +389,9 @@ class NodeConfig(NodeAndTestConfig):
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
full_refresh: Optional[bool] = None
|
||||
unique_key: Optional[Union[str, List[str]]] = None
|
||||
# 'unique_key' doesn't use 'Optional' because typing.get_type_hints was
|
||||
# sometimes getting the Union order wrong, causing serialization failures.
|
||||
unique_key: Union[str, List[str], None] = None
|
||||
on_schema_change: Optional[str] = "ignore"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -85,6 +85,7 @@ class RunStatus(StrEnum):
|
||||
|
||||
|
||||
class TestStatus(StrEnum):
|
||||
__test__ = False
|
||||
Pass = NodeStatus.Pass
|
||||
Error = NodeStatus.Error
|
||||
Fail = NodeStatus.Fail
|
||||
|
||||
@@ -230,6 +230,8 @@ class TestTask(RunTask):
|
||||
constraints are satisfied.
|
||||
"""
|
||||
|
||||
__test__ = False
|
||||
|
||||
def raise_on_first_error(self):
|
||||
return False
|
||||
|
||||
|
||||
1
core/dbt/tests/__init__.py
Normal file
1
core/dbt/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# dbt.tests directory
|
||||
1
core/dbt/tests/fixtures/__init__.py
vendored
Normal file
1
core/dbt/tests/fixtures/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# dbt.tests.fixtures directory
|
||||
402
core/dbt/tests/fixtures/project.py
vendored
Normal file
402
core/dbt/tests/fixtures/project.py
vendored
Normal file
@@ -0,0 +1,402 @@
|
||||
import os
|
||||
import pytest # type: ignore
|
||||
import random
|
||||
from argparse import Namespace
|
||||
from datetime import datetime
|
||||
import yaml
|
||||
from unittest.mock import patch
|
||||
from contextlib import contextmanager
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.config.runtime import RuntimeConfig
|
||||
from dbt.adapters.factory import get_adapter, register_adapter, reset_adapters
|
||||
from dbt.events.functions import setup_event_logger
|
||||
from dbt.context import providers
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.test_types import IntegrationTestDebug
|
||||
|
||||
|
||||
# These are the fixtures that are used in dbt core functional tests
|
||||
|
||||
# Used in constructing the unique_schema and logs_dir
|
||||
@pytest.fixture
|
||||
def prefix():
|
||||
# create a directory name that will be unique per test session
|
||||
_randint = random.randint(0, 9999)
|
||||
_runtime_timedelta = datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0)
|
||||
_runtime = (int(_runtime_timedelta.total_seconds() * 1e6)) + _runtime_timedelta.microseconds
|
||||
prefix = f"test{_runtime}{_randint:04}"
|
||||
return prefix
|
||||
|
||||
|
||||
# Every test has a unique schema
|
||||
@pytest.fixture
|
||||
def unique_schema(request, prefix) -> str:
|
||||
test_file = request.module.__name__
|
||||
# We only want the last part of the name
|
||||
test_file = test_file.split(".")[-1]
|
||||
unique_schema = f"{prefix}_{test_file}"
|
||||
return unique_schema
|
||||
|
||||
|
||||
# Create a directory for the profile using tmpdir fixture
|
||||
@pytest.fixture
|
||||
def profiles_root(tmpdir):
|
||||
# tmpdir docs - https://docs.pytest.org/en/6.2.x/tmpdir.html
|
||||
return tmpdir.mkdir("profile")
|
||||
|
||||
|
||||
# Create a directory for the project using tmpdir fixture
|
||||
@pytest.fixture
|
||||
def project_root(tmpdir):
|
||||
# tmpdir docs - https://docs.pytest.org/en/6.2.x/tmpdir.html
|
||||
project_root = tmpdir.mkdir("project")
|
||||
print(f"\n=== Test project_root: {project_root}")
|
||||
return project_root
|
||||
|
||||
|
||||
# This is for data used by multiple tests, in the 'tests/data' directory
|
||||
@pytest.fixture(scope="session")
|
||||
def shared_data_dir(request):
|
||||
return os.path.join(request.config.rootdir, "tests", "data")
|
||||
|
||||
|
||||
# This is for data for a specific test directory, i.e. tests/basic/data
|
||||
@pytest.fixture(scope="module")
|
||||
def test_data_dir(request):
|
||||
return os.path.join(request.fspath.dirname, "data")
|
||||
|
||||
|
||||
# Maybe this doesn't need to be a separate fixture?
|
||||
@pytest.fixture(scope="session")
|
||||
def database_host():
|
||||
return os.environ.get("DOCKER_TEST_DATABASE_HOST", "localhost")
|
||||
|
||||
|
||||
# The profile dictionary, used to write out profiles.yml
|
||||
@pytest.fixture
|
||||
def dbt_profile_data(unique_schema, database_host):
|
||||
dbname = os.getenv("POSTGRES_TEST_DATABASE", "dbt")
|
||||
return {
|
||||
"config": {"send_anonymous_usage_stats": False},
|
||||
"test": {
|
||||
"outputs": {
|
||||
"default": {
|
||||
"type": "postgres",
|
||||
"threads": 4,
|
||||
"host": database_host,
|
||||
"port": int(os.getenv("POSTGRES_TEST_PORT", 5432)),
|
||||
"user": os.getenv("POSTGRES_TEST_USER", "root"),
|
||||
"pass": os.getenv("POSTGRES_TEST_PASS", "password"),
|
||||
"dbname": dbname,
|
||||
"schema": unique_schema,
|
||||
},
|
||||
"other_schema": {
|
||||
"type": "postgres",
|
||||
"threads": 4,
|
||||
"host": database_host,
|
||||
"port": int(os.getenv("POSTGRES_TEST_PORT", 5432)),
|
||||
"user": "noaccess",
|
||||
"pass": "password",
|
||||
"dbname": dbname,
|
||||
"schema": unique_schema + "_alt", # Should this be the same unique_schema?
|
||||
},
|
||||
},
|
||||
"target": "default",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Write out the profile data as a yaml file
|
||||
@pytest.fixture
|
||||
def profiles_yml(profiles_root, dbt_profile_data):
|
||||
os.environ["DBT_PROFILES_DIR"] = str(profiles_root)
|
||||
path = os.path.join(profiles_root, "profiles.yml")
|
||||
with open(path, "w") as fp:
|
||||
fp.write(yaml.safe_dump(dbt_profile_data))
|
||||
yield dbt_profile_data
|
||||
del os.environ["DBT_PROFILES_DIR"]
|
||||
|
||||
|
||||
# This fixture can be overridden in a project
|
||||
@pytest.fixture
|
||||
def project_config_update():
|
||||
return {}
|
||||
|
||||
|
||||
# Combines the project_config_update dictionary with defaults to
|
||||
# produce a project_yml config and write it out as dbt_project.yml
|
||||
@pytest.fixture
|
||||
def dbt_project_yml(project_root, project_config_update, logs_dir):
|
||||
project_config = {
|
||||
"config-version": 2,
|
||||
"name": "test",
|
||||
"version": "0.1.0",
|
||||
"profile": "test",
|
||||
"log-path": logs_dir,
|
||||
}
|
||||
if project_config_update:
|
||||
project_config.update(project_config_update)
|
||||
runtime_config_file = project_root.join("dbt_project.yml")
|
||||
runtime_config_file.write(yaml.safe_dump(project_config))
|
||||
|
||||
|
||||
# Fixture to provide packages as either yaml or dictionary
|
||||
@pytest.fixture
|
||||
def packages():
|
||||
return {}
|
||||
|
||||
|
||||
# Write out the packages.yml file
|
||||
@pytest.fixture
|
||||
def packages_yml(project_root, packages):
|
||||
if packages:
|
||||
if isinstance(packages, str):
|
||||
data = packages
|
||||
else:
|
||||
data = yaml.safe_dump(packages)
|
||||
project_root.join("packages.yml").write(data)
|
||||
|
||||
|
||||
# Fixture to provide selectors as either yaml or dictionary
|
||||
@pytest.fixture
|
||||
def selectors():
|
||||
return {}
|
||||
|
||||
|
||||
# Write out the selectors.yml file
|
||||
@pytest.fixture
|
||||
def selectors_yml(project_root, selectors):
|
||||
if selectors:
|
||||
if isinstance(selectors, str):
|
||||
data = selectors
|
||||
else:
|
||||
data = yaml.safe_dump(selectors)
|
||||
project_root.join("selectors.yml").write(data)
|
||||
|
||||
|
||||
# This creates an adapter that is used for running test setup and teardown,
|
||||
# and 'run_sql' commands. The 'run_dbt' commands will create their own adapter
|
||||
# so this one needs some special patching to run after dbt commands have been
|
||||
# executed
|
||||
@pytest.fixture
|
||||
def adapter(unique_schema, project_root, profiles_root, profiles_yml, dbt_project_yml):
|
||||
# The profiles.yml and dbt_project.yml should already be written out
|
||||
args = Namespace(
|
||||
profiles_dir=str(profiles_root), project_dir=str(project_root), target=None, profile=None
|
||||
)
|
||||
flags.set_from_args(args, {})
|
||||
runtime_config = RuntimeConfig.from_args(args)
|
||||
register_adapter(runtime_config)
|
||||
adapter = get_adapter(runtime_config)
|
||||
yield adapter
|
||||
adapter.cleanup_connections()
|
||||
reset_adapters()
|
||||
|
||||
|
||||
# Start at directory level.
|
||||
def write_project_files(project_root, dir_name, file_dict):
|
||||
path = project_root.mkdir(dir_name)
|
||||
if file_dict:
|
||||
write_project_files_recursively(path, file_dict)
|
||||
|
||||
|
||||
# Write files out from file_dict. Can be nested directories...
|
||||
def write_project_files_recursively(path, file_dict):
|
||||
for name, value in file_dict.items():
|
||||
if name.endswith(".sql") or name.endswith(".csv") or name.endswith(".md"):
|
||||
path.join(name).write(value)
|
||||
elif name.endswith(".yml") or name.endswith(".yaml"):
|
||||
if isinstance(value, str):
|
||||
data = value
|
||||
else:
|
||||
data = yaml.safe_dump(value)
|
||||
path.join(name).write(data)
|
||||
else:
|
||||
write_project_files_recursively(path.mkdir(name), value)
|
||||
|
||||
|
||||
# models, macros, seeds, snapshots, tests, analysis
|
||||
# Provide a dictionary of file names to contents. Nested directories
|
||||
# are handle by nested dictionaries.
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def macros():
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def seeds():
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def snapshots():
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tests():
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def analysis():
|
||||
return {}
|
||||
|
||||
|
||||
# Write out the files provided by models, macros, snapshots, seeds, tests, analysis
|
||||
@pytest.fixture
|
||||
def project_files(project_root, models, macros, snapshots, seeds, tests, analysis):
|
||||
write_project_files(project_root, "models", models)
|
||||
write_project_files(project_root, "macros", macros)
|
||||
write_project_files(project_root, "snapshots", snapshots)
|
||||
write_project_files(project_root, "seeds", seeds)
|
||||
write_project_files(project_root, "tests", tests)
|
||||
write_project_files(project_root, "analysis", analysis)
|
||||
|
||||
|
||||
# We have a separate logs dir for every test
|
||||
@pytest.fixture()
|
||||
def logs_dir(request, prefix):
|
||||
return os.path.join(request.config.rootdir, "logs", prefix)
|
||||
|
||||
|
||||
# This class is returned from the 'project' fixture, and contains information
|
||||
# from the pytest fixtures that may be needed in the test functions, including
|
||||
# a 'run_sql' method.
|
||||
class TestProjInfo:
|
||||
def __init__(
|
||||
self,
|
||||
project_root,
|
||||
profiles_dir,
|
||||
adapter,
|
||||
test_dir,
|
||||
shared_data_dir,
|
||||
test_data_dir,
|
||||
test_schema,
|
||||
database,
|
||||
):
|
||||
self.project_root = project_root
|
||||
self.profiles_dir = profiles_dir
|
||||
self.adapter = adapter
|
||||
self.test_dir = test_dir
|
||||
self.shared_data_dir = shared_data_dir
|
||||
self.test_data_dir = test_data_dir
|
||||
self.test_schema = test_schema
|
||||
self.database = database
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self, name="__test"):
|
||||
"""Since the 'adapter' in dbt.adapters.factory may have been replaced by execution
|
||||
of dbt commands since the test 'adapter' was created, we patch the 'get_adapter' call in
|
||||
dbt.context.providers, so that macros that are called refer to this test adapter.
|
||||
This allows tests to run normal adapter macros as if reset_adapters() were not
|
||||
called by handle_and_check (for asserts, etc).
|
||||
"""
|
||||
with patch.object(providers, "get_adapter", return_value=self.adapter):
|
||||
with self.adapter.connection_named(name):
|
||||
conn = self.adapter.connections.get_thread_connection()
|
||||
yield conn
|
||||
|
||||
# Run sql from a path
|
||||
def run_sql_file(self, sql_path):
|
||||
with open(sql_path, "r") as f:
|
||||
statements = f.read().split(";")
|
||||
for statement in statements:
|
||||
self.run_sql(statement)
|
||||
|
||||
# run sql from a string, using adapter saved at test startup
|
||||
def run_sql(self, sql, fetch=None):
|
||||
if sql.strip() == "":
|
||||
return
|
||||
# substitute schema and database in sql
|
||||
adapter = self.adapter
|
||||
kwargs = {
|
||||
"schema": self.test_schema,
|
||||
"database": adapter.quote(self.database),
|
||||
}
|
||||
sql = sql.format(**kwargs)
|
||||
|
||||
with self.get_connection("__test") as conn:
|
||||
msg = f'test connection "{conn.name}" executing: {sql}'
|
||||
fire_event(IntegrationTestDebug(msg=msg))
|
||||
with conn.handle.cursor() as cursor:
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
conn.handle.commit()
|
||||
conn.handle.commit()
|
||||
if fetch == "one":
|
||||
return cursor.fetchone()
|
||||
elif fetch == "all":
|
||||
return cursor.fetchall()
|
||||
else:
|
||||
return
|
||||
except BaseException as e:
|
||||
if conn.handle and not getattr(conn.handle, "closed", True):
|
||||
conn.handle.rollback()
|
||||
print(sql)
|
||||
print(e)
|
||||
raise
|
||||
finally:
|
||||
conn.transaction_open = False
|
||||
|
||||
def get_tables_in_schema(self):
|
||||
sql = """
|
||||
select table_name,
|
||||
case when table_type = 'BASE TABLE' then 'table'
|
||||
when table_type = 'VIEW' then 'view'
|
||||
else table_type
|
||||
end as materialization
|
||||
from information_schema.tables
|
||||
where {}
|
||||
order by table_name
|
||||
"""
|
||||
sql = sql.format("{} ilike '{}'".format("table_schema", self.test_schema))
|
||||
result = self.run_sql(sql, fetch="all")
|
||||
return {model_name: materialization for (model_name, materialization) in result}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project(
|
||||
project_root,
|
||||
profiles_root,
|
||||
request,
|
||||
unique_schema,
|
||||
profiles_yml,
|
||||
dbt_project_yml,
|
||||
packages_yml,
|
||||
selectors_yml,
|
||||
adapter,
|
||||
project_files,
|
||||
shared_data_dir,
|
||||
test_data_dir,
|
||||
logs_dir,
|
||||
):
|
||||
setup_event_logger(logs_dir)
|
||||
orig_cwd = os.getcwd()
|
||||
os.chdir(project_root)
|
||||
# Return whatever is needed later in tests but can only come from fixtures, so we can keep
|
||||
# the signatures in the test signature to a minimum.
|
||||
project = TestProjInfo(
|
||||
project_root=project_root,
|
||||
profiles_dir=profiles_root,
|
||||
adapter=adapter,
|
||||
test_dir=request.fspath.dirname,
|
||||
shared_data_dir=shared_data_dir,
|
||||
test_data_dir=test_data_dir,
|
||||
test_schema=unique_schema,
|
||||
# the following feels kind of fragile. TODO: better way of getting database
|
||||
database=profiles_yml["test"]["outputs"]["default"]["dbname"],
|
||||
)
|
||||
project.run_sql("drop schema if exists {schema} cascade")
|
||||
project.run_sql("create schema {schema}")
|
||||
|
||||
yield project
|
||||
|
||||
project.run_sql("drop schema if exists {schema} cascade")
|
||||
os.chdir(orig_cwd)
|
||||
365
core/dbt/tests/tables.py
Normal file
365
core/dbt/tests/tables.py
Normal file
@@ -0,0 +1,365 @@
|
||||
from dbt.context import providers
|
||||
from unittest.mock import patch
|
||||
from contextlib import contextmanager
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.test_types import IntegrationTestDebug
|
||||
|
||||
# This code was copied from the earlier test framework in test/integration/base.py
|
||||
# The goal is to vastly simplify this and replace it with calls to macros.
|
||||
# For now, we use this to get the tests converted in a more straightforward way.
|
||||
# Assertions:
|
||||
# assert_tables_equal (old: assertTablesEqual)
|
||||
# assert_many_relations_equal (old: assertManyRelationsEqual)
|
||||
# assert_many_tables_equal (old: assertManyTablesEqual)
|
||||
# assert_table_does_not_exist (old: assertTableDoesNotExist)
|
||||
# assert_table_does_exist (old: assertTableDoesExist)
|
||||
|
||||
|
||||
class TableComparison:
|
||||
def __init__(self, adapter, unique_schema, database):
|
||||
self.adapter = adapter
|
||||
self.unique_schema = unique_schema
|
||||
self.default_database = database
|
||||
# TODO: We need to get this from somewhere reasonable
|
||||
if database == "dbtMixedCase":
|
||||
self.quoting = {"database": True, "schema": True, "identifier": True}
|
||||
else:
|
||||
self.quoting = {"database": False, "schema": False, "identifier": False}
|
||||
|
||||
# assertion used in tests
|
||||
def assert_tables_equal(
|
||||
self,
|
||||
table_a,
|
||||
table_b,
|
||||
table_a_schema=None,
|
||||
table_b_schema=None,
|
||||
table_a_db=None,
|
||||
table_b_db=None,
|
||||
):
|
||||
if table_a_schema is None:
|
||||
table_a_schema = self.unique_schema
|
||||
|
||||
if table_b_schema is None:
|
||||
table_b_schema = self.unique_schema
|
||||
|
||||
if table_a_db is None:
|
||||
table_a_db = self.default_database
|
||||
|
||||
if table_b_db is None:
|
||||
table_b_db = self.default_database
|
||||
|
||||
relation_a = self._make_relation(table_a, table_a_schema, table_a_db)
|
||||
relation_b = self._make_relation(table_b, table_b_schema, table_b_db)
|
||||
|
||||
self._assert_table_columns_equal(relation_a, relation_b)
|
||||
|
||||
sql = self._assert_tables_equal_sql(relation_a, relation_b)
|
||||
result = self.run_sql(sql, fetch="one")
|
||||
|
||||
assert result[0] == 0, "row_count_difference nonzero: " + sql
|
||||
assert result[1] == 0, "num_mismatched nonzero: " + sql
|
||||
|
||||
# assertion used in tests
|
||||
def assert_many_relations_equal(self, relations, default_schema=None, default_database=None):
|
||||
if default_schema is None:
|
||||
default_schema = self.unique_schema
|
||||
if default_database is None:
|
||||
default_database = self.default_database
|
||||
|
||||
specs = []
|
||||
for relation in relations:
|
||||
if not isinstance(relation, (tuple, list)):
|
||||
relation = [relation]
|
||||
|
||||
assert len(relation) <= 3
|
||||
|
||||
if len(relation) == 3:
|
||||
relation = self._make_relation(*relation)
|
||||
elif len(relation) == 2:
|
||||
relation = self._make_relation(relation[0], relation[1], default_database)
|
||||
elif len(relation) == 1:
|
||||
relation = self._make_relation(relation[0], default_schema, default_database)
|
||||
else:
|
||||
raise ValueError("relation must be a sequence of 1, 2, or 3 values")
|
||||
|
||||
specs.append(relation)
|
||||
|
||||
with self.get_connection():
|
||||
column_specs = self.get_many_relation_columns(specs)
|
||||
|
||||
# make sure everyone has equal column definitions
|
||||
first_columns = None
|
||||
for relation in specs:
|
||||
key = (relation.database, relation.schema, relation.identifier)
|
||||
# get a good error here instead of a hard-to-diagnose KeyError
|
||||
assert key in column_specs, f"No columns found for {key}"
|
||||
columns = column_specs[key]
|
||||
if first_columns is None:
|
||||
first_columns = columns
|
||||
else:
|
||||
assert first_columns == columns, f"{str(specs[0])} did not match {str(relation)}"
|
||||
|
||||
# make sure everyone has the same data. if we got here, everyone had
|
||||
# the same column specs!
|
||||
first_relation = None
|
||||
for relation in specs:
|
||||
if first_relation is None:
|
||||
first_relation = relation
|
||||
else:
|
||||
sql = self._assert_tables_equal_sql(
|
||||
first_relation, relation, columns=first_columns
|
||||
)
|
||||
result = self.run_sql(sql, fetch="one")
|
||||
|
||||
assert result[0] == 0, "row_count_difference nonzero: " + sql
|
||||
assert result[1] == 0, "num_mismatched nonzero: " + sql
|
||||
|
||||
# assertion used in tests
|
||||
def assert_many_tables_equal(self, *args):
|
||||
schema = self.unique_schema
|
||||
|
||||
all_tables = []
|
||||
for table_equivalencies in args:
|
||||
all_tables += list(table_equivalencies)
|
||||
|
||||
all_cols = self.get_table_columns_as_dict(all_tables, schema)
|
||||
|
||||
for table_equivalencies in args:
|
||||
first_table = table_equivalencies[0]
|
||||
first_relation = self._make_relation(first_table)
|
||||
|
||||
# assert that all tables have the same columns
|
||||
base_result = all_cols[first_table]
|
||||
assert len(base_result) > 0
|
||||
|
||||
for other_table in table_equivalencies[1:]:
|
||||
other_result = all_cols[other_table]
|
||||
assert len(other_result) > 0
|
||||
assert base_result == other_result
|
||||
|
||||
other_relation = self._make_relation(other_table)
|
||||
sql = self._assert_tables_equal_sql(
|
||||
first_relation, other_relation, columns=base_result
|
||||
)
|
||||
result = self.run_sql(sql, fetch="one")
|
||||
|
||||
assert result[0] == 0, "row_count_difference nonzero: " + sql
|
||||
assert result[1] == 0, "num_mismatched nonzero: " + sql
|
||||
|
||||
# assertion used in tests
|
||||
def assert_table_does_not_exist(self, table, schema=None, database=None):
|
||||
columns = self.get_table_columns(table, schema, database)
|
||||
assert len(columns) == 0
|
||||
|
||||
# assertion used in tests
|
||||
def assert_table_does_exist(self, table, schema=None, database=None):
|
||||
columns = self.get_table_columns(table, schema, database)
|
||||
|
||||
assert len(columns) > 0
|
||||
|
||||
# called by assert_tables_equal
|
||||
def _assert_table_columns_equal(self, relation_a, relation_b):
|
||||
table_a_result = self.get_relation_columns(relation_a)
|
||||
table_b_result = self.get_relation_columns(relation_b)
|
||||
|
||||
assert len(table_a_result) == len(table_b_result)
|
||||
|
||||
for a_column, b_column in zip(table_a_result, table_b_result):
|
||||
a_name, a_type, a_size = a_column
|
||||
b_name, b_type, b_size = b_column
|
||||
assert a_name == b_name, "{} vs {}: column '{}' != '{}'".format(
|
||||
relation_a, relation_b, a_name, b_name
|
||||
)
|
||||
|
||||
assert a_type == b_type, "{} vs {}: column '{}' has type '{}' != '{}'".format(
|
||||
relation_a, relation_b, a_name, a_type, b_type
|
||||
)
|
||||
|
||||
assert a_size == b_size, "{} vs {}: column '{}' has size '{}' != '{}'".format(
|
||||
relation_a, relation_b, a_name, a_size, b_size
|
||||
)
|
||||
|
||||
def get_relation_columns(self, relation):
|
||||
with self.get_connection():
|
||||
columns = self.adapter.get_columns_in_relation(relation)
|
||||
return sorted(((c.name, c.dtype, c.char_size) for c in columns), key=lambda x: x[0])
|
||||
|
||||
def get_table_columns(self, table, schema=None, database=None):
|
||||
schema = self.unique_schema if schema is None else schema
|
||||
database = self.default_database if database is None else database
|
||||
relation = self.adapter.Relation.create(
|
||||
database=database,
|
||||
schema=schema,
|
||||
identifier=table,
|
||||
type="table",
|
||||
quote_policy=self.quoting,
|
||||
)
|
||||
return self.get_relation_columns(relation)
|
||||
|
||||
# called by assert_many_table_equal
|
||||
def get_table_columns_as_dict(self, tables, schema=None):
|
||||
col_matrix = self.get_many_table_columns(tables, schema)
|
||||
res = {}
|
||||
for row in col_matrix:
|
||||
table_name = row[0]
|
||||
col_def = row[1:]
|
||||
if table_name not in res:
|
||||
res[table_name] = []
|
||||
res[table_name].append(col_def)
|
||||
return res
|
||||
|
||||
# override for presto
|
||||
@property
|
||||
def column_schema(self):
|
||||
return "table_name, column_name, data_type, character_maximum_length"
|
||||
|
||||
# This should be overridden for Snowflake. Called by get_many_table_columns.
|
||||
def get_many_table_columns_information_schema(self, tables, schema, database=None):
|
||||
columns = self.column_schema
|
||||
|
||||
sql = """
|
||||
select {columns}
|
||||
from {db_string}information_schema.columns
|
||||
where {schema_filter}
|
||||
and ({table_filter})
|
||||
order by column_name asc"""
|
||||
|
||||
db_string = ""
|
||||
if database:
|
||||
db_string = self.quote_as_configured(database, "database") + "."
|
||||
|
||||
table_filters_s = " OR ".join(
|
||||
_ilike("table_name", table.replace('"', "")) for table in tables
|
||||
)
|
||||
schema_filter = _ilike("table_schema", schema)
|
||||
|
||||
sql = sql.format(
|
||||
columns=columns,
|
||||
schema_filter=schema_filter,
|
||||
table_filter=table_filters_s,
|
||||
db_string=db_string,
|
||||
)
|
||||
|
||||
columns = self.run_sql(sql, fetch="all")
|
||||
return list(map(self.filter_many_columns, columns))
|
||||
|
||||
# Snowflake needs a static char_size
|
||||
def filter_many_columns(self, column):
|
||||
if len(column) == 3:
|
||||
table_name, column_name, data_type = column
|
||||
char_size = None
|
||||
else:
|
||||
table_name, column_name, data_type, char_size = column
|
||||
return (table_name, column_name, data_type, char_size)
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self, name="_test"):
|
||||
"""Create a test connection context where all executed macros, etc will
|
||||
use the adapter created in the schema fixture.
|
||||
This allows tests to run normal adapter macros as if reset_adapters()
|
||||
were not called by handle_and_check (for asserts, etc)
|
||||
"""
|
||||
with patch.object(providers, "get_adapter", return_value=self.adapter):
|
||||
with self.adapter.connection_named(name):
|
||||
conn = self.adapter.connections.get_thread_connection()
|
||||
yield conn
|
||||
|
||||
def _make_relation(self, identifier, schema=None, database=None):
|
||||
if schema is None:
|
||||
schema = self.unique_schema
|
||||
if database is None:
|
||||
database = self.default_database
|
||||
return self.adapter.Relation.create(
|
||||
database=database, schema=schema, identifier=identifier, quote_policy=self.quoting
|
||||
)
|
||||
|
||||
# called by get_many_relation_columns
|
||||
def get_many_table_columns(self, tables, schema, database=None):
|
||||
result = self.get_many_table_columns_information_schema(tables, schema, database)
|
||||
result.sort(key=lambda x: "{}.{}".format(x[0], x[1]))
|
||||
return result
|
||||
|
||||
# called by assert_many_relations_equal
|
||||
def get_many_relation_columns(self, relations):
|
||||
"""Returns a dict of (datbase, schema) -> (dict of (table_name -> list of columns))"""
|
||||
schema_fqns = {}
|
||||
for rel in relations:
|
||||
this_schema = schema_fqns.setdefault((rel.database, rel.schema), [])
|
||||
this_schema.append(rel.identifier)
|
||||
|
||||
column_specs = {}
|
||||
for key, tables in schema_fqns.items():
|
||||
database, schema = key
|
||||
columns = self.get_many_table_columns(tables, schema, database=database)
|
||||
table_columns = {}
|
||||
for col in columns:
|
||||
table_columns.setdefault(col[0], []).append(col[1:])
|
||||
for rel_name, columns in table_columns.items():
|
||||
key = (database, schema, rel_name)
|
||||
column_specs[key] = columns
|
||||
|
||||
return column_specs
|
||||
|
||||
def _assert_tables_equal_sql(self, relation_a, relation_b, columns=None):
|
||||
if columns is None:
|
||||
columns = self.get_relation_columns(relation_a)
|
||||
column_names = [c[0] for c in columns]
|
||||
sql = self.adapter.get_rows_different_sql(relation_a, relation_b, column_names)
|
||||
return sql
|
||||
|
||||
# This duplicates code in the TestProjInfo class.
|
||||
def run_sql(self, sql, fetch=None):
|
||||
if sql.strip() == "":
|
||||
return
|
||||
# substitute schema and database in sql
|
||||
adapter = self.adapter
|
||||
kwargs = {
|
||||
"schema": self.unique_schema,
|
||||
"database": adapter.quote(self.default_database),
|
||||
}
|
||||
sql = sql.format(**kwargs)
|
||||
|
||||
with self.get_connection("__test") as conn:
|
||||
msg = f'test connection "{conn.name}" executing: {sql}'
|
||||
fire_event(IntegrationTestDebug(msg=msg))
|
||||
with conn.handle.cursor() as cursor:
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
conn.handle.commit()
|
||||
conn.handle.commit()
|
||||
if fetch == "one":
|
||||
return cursor.fetchone()
|
||||
elif fetch == "all":
|
||||
return cursor.fetchall()
|
||||
else:
|
||||
return
|
||||
except BaseException as e:
|
||||
if conn.handle and not getattr(conn.handle, "closed", True):
|
||||
conn.handle.rollback()
|
||||
print(sql)
|
||||
print(e)
|
||||
raise
|
||||
finally:
|
||||
conn.transaction_open = False
|
||||
|
||||
def get_tables_in_schema(self):
|
||||
sql = """
|
||||
select table_name,
|
||||
case when table_type = 'BASE TABLE' then 'table'
|
||||
when table_type = 'VIEW' then 'view'
|
||||
else table_type
|
||||
end as materialization
|
||||
from information_schema.tables
|
||||
where {}
|
||||
order by table_name
|
||||
"""
|
||||
|
||||
sql = sql.format(_ilike("table_schema", self.unique_schema))
|
||||
result = self.run_sql(sql, fetch="all")
|
||||
|
||||
return {model_name: materialization for (model_name, materialization) in result}
|
||||
|
||||
|
||||
# needs overriding for presto
|
||||
def _ilike(target, value):
|
||||
return "{} ilike '{}'".format(target, value)
|
||||
73
core/dbt/tests/util.py
Normal file
73
core/dbt/tests/util.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
from dbt.main import handle_and_check
|
||||
from dbt.logger import log_manager
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.events.functions import capture_stdout_logs, stop_capture_stdout_logs
|
||||
|
||||
|
||||
# This is used in pytest tests to run dbt
|
||||
def run_dbt(args: List[str] = None, expect_pass=True):
|
||||
# The logger will complain about already being initialized if
|
||||
# we don't do this.
|
||||
log_manager.reset_handlers()
|
||||
if args is None:
|
||||
args = ["run"]
|
||||
|
||||
print("\n\nInvoking dbt with {}".format(args))
|
||||
res, success = handle_and_check(args)
|
||||
# assert success == expect_pass, "dbt exit state did not match expected"
|
||||
return res
|
||||
|
||||
|
||||
def run_dbt_and_capture(args: List[str] = None, expect_pass=True):
|
||||
try:
|
||||
stringbuf = capture_stdout_logs()
|
||||
res = run_dbt(args, expect_pass=expect_pass)
|
||||
stdout = stringbuf.getvalue()
|
||||
|
||||
finally:
|
||||
stop_capture_stdout_logs()
|
||||
|
||||
return res, stdout
|
||||
|
||||
|
||||
# Used in test cases to get the manifest from the partial parsing file
|
||||
def get_manifest(project_root):
|
||||
path = project_root.join("target", "partial_parse.msgpack")
|
||||
if os.path.exists(path):
|
||||
with open(path, "rb") as fp:
|
||||
manifest_mp = fp.read()
|
||||
manifest: Manifest = Manifest.from_msgpack(manifest_mp)
|
||||
return manifest
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def normalize(path):
|
||||
"""On windows, neither is enough on its own:
|
||||
|
||||
>>> normcase('C:\\documents/ALL CAPS/subdir\\..')
|
||||
'c:\\documents\\all caps\\subdir\\..'
|
||||
>>> normpath('C:\\documents/ALL CAPS/subdir\\..')
|
||||
'C:\\documents\\ALL CAPS'
|
||||
>>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..'))
|
||||
'c:\\documents\\all caps'
|
||||
"""
|
||||
return os.path.normcase(os.path.normpath(path))
|
||||
|
||||
|
||||
def copy_file(src_path, src, dest_path, dest) -> None:
|
||||
# dest is a list, so that we can provide nested directories, like 'models' etc.
|
||||
# copy files from the data_dir to appropriate project directory
|
||||
shutil.copyfile(
|
||||
os.path.join(src_path, src),
|
||||
os.path.join(dest_path, *dest),
|
||||
)
|
||||
|
||||
|
||||
def rm_file(src_path, src) -> None:
|
||||
# remove files from proj_path
|
||||
os.remove(os.path.join(src_path, src))
|
||||
@@ -1,17 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental",
|
||||
unique_key = "id",
|
||||
persist_docs = {"relation": true}
|
||||
)
|
||||
}}
|
||||
|
||||
|
||||
select *
|
||||
from {{ ref('seed') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
where id > (select max(id) from {{this}})
|
||||
|
||||
{% endif %}
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = 'first_name',
|
||||
sort_type = 'compound'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,8 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view",
|
||||
enabled = False
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,5 +0,0 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{%- do adapter.get_relation(database=target.database, schema=target.schema, identifier='MATERIALIZED') -%}
|
||||
|
||||
select * from {{ ref('MATERIALIZED') }}
|
||||
@@ -1,11 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
where id > (select max(id) from {{this}})
|
||||
{% endif %}
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = ['first_name', 'last_name'],
|
||||
sort_type = 'interleaved'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,8 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
-- this is a unicode character: å
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,7 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: DISABLED
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
@@ -1,7 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1 +0,0 @@
|
||||
select 1 as id
|
||||
@@ -1,17 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental",
|
||||
unique_key = "id",
|
||||
persist_docs = {"relation": true}
|
||||
)
|
||||
}}
|
||||
|
||||
|
||||
select *
|
||||
from {{ ref('seed') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
where id > (select max(id) from {{this}})
|
||||
|
||||
{% endif %}
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = 'first_name',
|
||||
sort_type = 'compound'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,8 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view",
|
||||
enabled = False
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,5 +0,0 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{%- do adapter.get_relation(database=target.database, schema=target.schema, identifier='materialized') -%}
|
||||
|
||||
select * from {{ ref('materialized') }}
|
||||
@@ -1,11 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
where id > (select max(id) from {{this}})
|
||||
{% endif %}
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = ['first_name', 'last_name'],
|
||||
sort_type = 'interleaved'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,12 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
-- ensure that dbt_utils' relation check will work
|
||||
{% set relation = ref('seed') %}
|
||||
{%- if not (relation is mapping and relation.get('metadata', {}).get('type', '').endswith('Relation')) -%}
|
||||
{%- do exceptions.raise_compiler_error("Macro " ~ macro ~ " expected a Relation but received the value: " ~ relation) -%}
|
||||
{%- endif -%}
|
||||
-- this is a unicode character: å
|
||||
select * from {{ relation }}
|
||||
@@ -1,7 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: disabled
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
@@ -1,7 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
@@ -1,202 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
from pytest import mark
|
||||
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
|
||||
class BaseTestSimpleCopy(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "simple_copy_001"
|
||||
|
||||
@staticmethod
|
||||
def dir(path):
|
||||
return path.lstrip('/')
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir("models")
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return self.seed_quote_cfg_with({
|
||||
'profile': '{{ "tes" ~ "t" }}'
|
||||
})
|
||||
|
||||
def seed_quote_cfg_with(self, extra):
|
||||
cfg = {
|
||||
'config-version': 2,
|
||||
'seeds': {
|
||||
'quote_columns': False,
|
||||
}
|
||||
}
|
||||
cfg.update(extra)
|
||||
return cfg
|
||||
|
||||
|
||||
class TestSimpleCopy(BaseTestSimpleCopy):
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return self.seed_quote_cfg_with({"seed-paths": [self.dir("seed-initial")]})
|
||||
|
||||
@use_profile("postgres")
|
||||
def test__postgres__simple_copy(self):
|
||||
results = self.run_dbt(["seed"])
|
||||
self.assertEqual(len(results), 1)
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 7)
|
||||
|
||||
self.assertManyTablesEqual(["seed", "view_model", "incremental", "materialized", "get_and_ref"])
|
||||
|
||||
self.use_default_project({"seed-paths": [self.dir("seed-update")]})
|
||||
results = self.run_dbt(["seed"])
|
||||
self.assertEqual(len(results), 1)
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 7)
|
||||
|
||||
self.assertManyTablesEqual(["seed", "view_model", "incremental", "materialized", "get_and_ref"])
|
||||
|
||||
@use_profile('postgres')
|
||||
def test__postgres__simple_copy_with_materialized_views(self):
|
||||
self.run_sql('''
|
||||
create table {schema}.unrelated_table (id int)
|
||||
'''.format(schema=self.unique_schema())
|
||||
)
|
||||
self.run_sql('''
|
||||
create materialized view {schema}.unrelated_materialized_view as (
|
||||
select * from {schema}.unrelated_table
|
||||
)
|
||||
'''.format(schema=self.unique_schema()))
|
||||
self.run_sql('''
|
||||
create view {schema}.unrelated_view as (
|
||||
select * from {schema}.unrelated_materialized_view
|
||||
)
|
||||
'''.format(schema=self.unique_schema()))
|
||||
results = self.run_dbt(["seed"])
|
||||
self.assertEqual(len(results), 1)
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 7)
|
||||
|
||||
@use_profile("postgres")
|
||||
def test__postgres__dbt_doesnt_run_empty_models(self):
|
||||
results = self.run_dbt(["seed"])
|
||||
self.assertEqual(len(results), 1)
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 7)
|
||||
|
||||
models = self.get_models_in_schema()
|
||||
|
||||
self.assertFalse("empty" in models.keys())
|
||||
self.assertFalse("disabled" in models.keys())
|
||||
|
||||
|
||||
class TestShouting(BaseTestSimpleCopy):
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir('models-shouting')
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return self.seed_quote_cfg_with({"seed-paths": [self.dir("seed-initial")]})
|
||||
|
||||
@use_profile("postgres")
|
||||
def test__postgres__simple_copy_loud(self):
|
||||
results = self.run_dbt(["seed"])
|
||||
self.assertEqual(len(results), 1)
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 7)
|
||||
|
||||
self.assertManyTablesEqual(["seed", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
|
||||
|
||||
self.use_default_project({"seed-paths": [self.dir("seed-update")]})
|
||||
results = self.run_dbt(["seed"])
|
||||
self.assertEqual(len(results), 1)
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 7)
|
||||
|
||||
self.assertManyTablesEqual(["seed", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
|
||||
|
||||
|
||||
# I give up on getting this working for Windows.
|
||||
@mark.skipif(os.name == 'nt', reason='mixed-case postgres database tests are not supported on Windows')
|
||||
class TestMixedCaseDatabase(BaseTestSimpleCopy):
|
||||
@property
|
||||
def models(self):
|
||||
return self.dir('models-trivial')
|
||||
|
||||
def postgres_profile(self):
|
||||
return {
|
||||
'config': {
|
||||
'send_anonymous_usage_stats': False
|
||||
},
|
||||
'test': {
|
||||
'outputs': {
|
||||
'default2': {
|
||||
'type': 'postgres',
|
||||
'threads': 4,
|
||||
'host': self.database_host,
|
||||
'port': 5432,
|
||||
'user': 'root',
|
||||
'pass': 'password',
|
||||
'dbname': 'dbtMixedCase',
|
||||
'schema': self.unique_schema()
|
||||
},
|
||||
},
|
||||
'target': 'default2'
|
||||
}
|
||||
}
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {'config-version': 2}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_run_mixed_case(self):
|
||||
self.run_dbt()
|
||||
self.run_dbt()
|
||||
|
||||
|
||||
class TestQuotedDatabase(BaseTestSimpleCopy):
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return self.seed_quote_cfg_with({
|
||||
'quoting': {
|
||||
'database': True,
|
||||
},
|
||||
"seed-paths": [self.dir("seed-initial")],
|
||||
})
|
||||
|
||||
def seed_get_json(self, expect_pass=True):
|
||||
results, output = self.run_dbt_and_capture(
|
||||
['--debug', '--log-format=json', '--single-threaded', 'seed'],
|
||||
expect_pass=expect_pass
|
||||
)
|
||||
|
||||
logs = []
|
||||
for line in output.split('\n'):
|
||||
try:
|
||||
log = json.loads(line)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
# TODO structured logging does not put out run_state yet
|
||||
# if log['extra'].get('run_state') != 'internal':
|
||||
# continue
|
||||
logs.append(log)
|
||||
|
||||
# empty lists evaluate as False
|
||||
self.assertTrue(logs)
|
||||
return logs
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_no_create_schemas(self):
|
||||
logs = self.seed_get_json()
|
||||
for log in logs:
|
||||
msg = log['msg']
|
||||
self.assertFalse(
|
||||
'create schema if not exists' in msg,
|
||||
f'did not expect schema creation: {msg}'
|
||||
)
|
||||
@@ -1,9 +0,0 @@
|
||||
{%- set tgt = ref('seed') -%}
|
||||
{%- set got = adapter.get_relation(database=tgt.database, schema=tgt.schema, identifier=tgt.identifier) | string -%}
|
||||
{% set replaced = got.replace('"', '-') %}
|
||||
{% set expected = "-" + tgt.database.upper() + '-.-' + tgt.schema.upper() + '-.-' + tgt.identifier.upper() + '-' %}
|
||||
|
||||
with cte as (
|
||||
select '{{ replaced }}' as name
|
||||
)
|
||||
select * from cte where name not like '{{ expected }}'
|
||||
@@ -1,2 +0,0 @@
|
||||
-- should be ref('model')
|
||||
select * from {{ ref(model) }}
|
||||
@@ -1 +0,0 @@
|
||||
select 1 as id
|
||||
@@ -1,7 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "ephemeral"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.seed
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('ephemeral_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
@@ -1,13 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.seed
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
where id > (select max(id) from {{this}})
|
||||
|
||||
{% endif %}
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('incremental_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
@@ -1,7 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.seed
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('materialized_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
@@ -1,7 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.seed
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('view_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
@@ -1,9 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ var('var_ref') }}
|
||||
group by gender
|
||||
order by gender asc
|
||||
@@ -1,119 +0,0 @@
|
||||
create table {schema}.summary_expected (
|
||||
gender VARCHAR(10),
|
||||
ct BIGINT
|
||||
);
|
||||
|
||||
insert into {schema}.summary_expected (gender, ct) values
|
||||
('Female', 40),
|
||||
('Male', 60);
|
||||
|
||||
create table {schema}.seed (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
first_name VARCHAR(50),
|
||||
last_name VARCHAR(50),
|
||||
email VARCHAR(50),
|
||||
gender VARCHAR(10),
|
||||
ip_address VARCHAR(20)
|
||||
);
|
||||
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values
|
||||
('Jack', 'Hunter', 'jhunter0@pbs.org', 'Male', '59.80.20.168'),
|
||||
('Kathryn', 'Walker', 'kwalker1@ezinearticles.com', 'Female', '194.121.179.35'),
|
||||
('Gerald', 'Ryan', 'gryan2@com.com', 'Male', '11.3.212.243'),
|
||||
('Bonnie', 'Spencer', 'bspencer3@ameblo.jp', 'Female', '216.32.196.175'),
|
||||
('Harold', 'Taylor', 'htaylor4@people.com.cn', 'Male', '253.10.246.136'),
|
||||
('Jacqueline', 'Griffin', 'jgriffin5@t.co', 'Female', '16.13.192.220'),
|
||||
('Wanda', 'Arnold', 'warnold6@google.nl', 'Female', '232.116.150.64'),
|
||||
('Craig', 'Ortiz', 'cortiz7@sciencedaily.com', 'Male', '199.126.106.13'),
|
||||
('Gary', 'Day', 'gday8@nih.gov', 'Male', '35.81.68.186'),
|
||||
('Rose', 'Wright', 'rwright9@yahoo.co.jp', 'Female', '236.82.178.100'),
|
||||
('Raymond', 'Kelley', 'rkelleya@fc2.com', 'Male', '213.65.166.67'),
|
||||
('Gerald', 'Robinson', 'grobinsonb@disqus.com', 'Male', '72.232.194.193'),
|
||||
('Mildred', 'Martinez', 'mmartinezc@samsung.com', 'Female', '198.29.112.5'),
|
||||
('Dennis', 'Arnold', 'darnoldd@google.com', 'Male', '86.96.3.250'),
|
||||
('Judy', 'Gray', 'jgraye@opensource.org', 'Female', '79.218.162.245'),
|
||||
('Theresa', 'Garza', 'tgarzaf@epa.gov', 'Female', '21.59.100.54'),
|
||||
('Gerald', 'Robertson', 'grobertsong@csmonitor.com', 'Male', '131.134.82.96'),
|
||||
('Philip', 'Hernandez', 'phernandezh@adobe.com', 'Male', '254.196.137.72'),
|
||||
('Julia', 'Gonzalez', 'jgonzalezi@cam.ac.uk', 'Female', '84.240.227.174'),
|
||||
('Andrew', 'Davis', 'adavisj@patch.com', 'Male', '9.255.67.25'),
|
||||
('Kimberly', 'Harper', 'kharperk@foxnews.com', 'Female', '198.208.120.253'),
|
||||
('Mark', 'Martin', 'mmartinl@marketwatch.com', 'Male', '233.138.182.153'),
|
||||
('Cynthia', 'Ruiz', 'cruizm@google.fr', 'Female', '18.178.187.201'),
|
||||
('Samuel', 'Carroll', 'scarrolln@youtu.be', 'Male', '128.113.96.122'),
|
||||
('Jennifer', 'Larson', 'jlarsono@vinaora.com', 'Female', '98.234.85.95'),
|
||||
('Ashley', 'Perry', 'aperryp@rakuten.co.jp', 'Female', '247.173.114.52'),
|
||||
('Howard', 'Rodriguez', 'hrodriguezq@shutterfly.com', 'Male', '231.188.95.26'),
|
||||
('Amy', 'Brooks', 'abrooksr@theatlantic.com', 'Female', '141.199.174.118'),
|
||||
('Louise', 'Warren', 'lwarrens@adobe.com', 'Female', '96.105.158.28'),
|
||||
('Tina', 'Watson', 'twatsont@myspace.com', 'Female', '251.142.118.177'),
|
||||
('Janice', 'Kelley', 'jkelleyu@creativecommons.org', 'Female', '239.167.34.233'),
|
||||
('Terry', 'Mccoy', 'tmccoyv@bravesites.com', 'Male', '117.201.183.203'),
|
||||
('Jeffrey', 'Morgan', 'jmorganw@surveymonkey.com', 'Male', '78.101.78.149'),
|
||||
('Louis', 'Harvey', 'lharveyx@sina.com.cn', 'Male', '51.50.0.167'),
|
||||
('Philip', 'Miller', 'pmillery@samsung.com', 'Male', '103.255.222.110'),
|
||||
('Willie', 'Marshall', 'wmarshallz@ow.ly', 'Male', '149.219.91.68'),
|
||||
('Patrick', 'Lopez', 'plopez10@redcross.org', 'Male', '250.136.229.89'),
|
||||
('Adam', 'Jenkins', 'ajenkins11@harvard.edu', 'Male', '7.36.112.81'),
|
||||
('Benjamin', 'Cruz', 'bcruz12@linkedin.com', 'Male', '32.38.98.15'),
|
||||
('Ruby', 'Hawkins', 'rhawkins13@gmpg.org', 'Female', '135.171.129.255'),
|
||||
('Carlos', 'Barnes', 'cbarnes14@a8.net', 'Male', '240.197.85.140'),
|
||||
('Ruby', 'Griffin', 'rgriffin15@bravesites.com', 'Female', '19.29.135.24'),
|
||||
('Sean', 'Mason', 'smason16@icq.com', 'Male', '159.219.155.249'),
|
||||
('Anthony', 'Payne', 'apayne17@utexas.edu', 'Male', '235.168.199.218'),
|
||||
('Steve', 'Cruz', 'scruz18@pcworld.com', 'Male', '238.201.81.198'),
|
||||
('Anthony', 'Garcia', 'agarcia19@flavors.me', 'Male', '25.85.10.18'),
|
||||
('Doris', 'Lopez', 'dlopez1a@sphinn.com', 'Female', '245.218.51.238'),
|
||||
('Susan', 'Nichols', 'snichols1b@freewebs.com', 'Female', '199.99.9.61'),
|
||||
('Wanda', 'Ferguson', 'wferguson1c@yahoo.co.jp', 'Female', '236.241.135.21'),
|
||||
('Andrea', 'Pierce', 'apierce1d@google.co.uk', 'Female', '132.40.10.209'),
|
||||
('Lawrence', 'Phillips', 'lphillips1e@jugem.jp', 'Male', '72.226.82.87'),
|
||||
('Judy', 'Gilbert', 'jgilbert1f@multiply.com', 'Female', '196.250.15.142'),
|
||||
('Eric', 'Williams', 'ewilliams1g@joomla.org', 'Male', '222.202.73.126'),
|
||||
('Ralph', 'Romero', 'rromero1h@sogou.com', 'Male', '123.184.125.212'),
|
||||
('Jean', 'Wilson', 'jwilson1i@ocn.ne.jp', 'Female', '176.106.32.194'),
|
||||
('Lori', 'Reynolds', 'lreynolds1j@illinois.edu', 'Female', '114.181.203.22'),
|
||||
('Donald', 'Moreno', 'dmoreno1k@bbc.co.uk', 'Male', '233.249.97.60'),
|
||||
('Steven', 'Berry', 'sberry1l@eepurl.com', 'Male', '186.193.50.50'),
|
||||
('Theresa', 'Shaw', 'tshaw1m@people.com.cn', 'Female', '120.37.71.222'),
|
||||
('John', 'Stephens', 'jstephens1n@nationalgeographic.com', 'Male', '191.87.127.115'),
|
||||
('Richard', 'Jacobs', 'rjacobs1o@state.tx.us', 'Male', '66.210.83.155'),
|
||||
('Andrew', 'Lawson', 'alawson1p@over-blog.com', 'Male', '54.98.36.94'),
|
||||
('Peter', 'Morgan', 'pmorgan1q@rambler.ru', 'Male', '14.77.29.106'),
|
||||
('Nicole', 'Garrett', 'ngarrett1r@zimbio.com', 'Female', '21.127.74.68'),
|
||||
('Joshua', 'Kim', 'jkim1s@edublogs.org', 'Male', '57.255.207.41'),
|
||||
('Ralph', 'Roberts', 'rroberts1t@people.com.cn', 'Male', '222.143.131.109'),
|
||||
('George', 'Montgomery', 'gmontgomery1u@smugmug.com', 'Male', '76.75.111.77'),
|
||||
('Gerald', 'Alvarez', 'galvarez1v@flavors.me', 'Male', '58.157.186.194'),
|
||||
('Donald', 'Olson', 'dolson1w@whitehouse.gov', 'Male', '69.65.74.135'),
|
||||
('Carlos', 'Morgan', 'cmorgan1x@pbs.org', 'Male', '96.20.140.87'),
|
||||
('Aaron', 'Stanley', 'astanley1y@webnode.com', 'Male', '163.119.217.44'),
|
||||
('Virginia', 'Long', 'vlong1z@spiegel.de', 'Female', '204.150.194.182'),
|
||||
('Robert', 'Berry', 'rberry20@tripadvisor.com', 'Male', '104.19.48.241'),
|
||||
('Antonio', 'Brooks', 'abrooks21@unesco.org', 'Male', '210.31.7.24'),
|
||||
('Ruby', 'Garcia', 'rgarcia22@ovh.net', 'Female', '233.218.162.214'),
|
||||
('Jack', 'Hanson', 'jhanson23@blogtalkradio.com', 'Male', '31.55.46.199'),
|
||||
('Kathryn', 'Nelson', 'knelson24@walmart.com', 'Female', '14.189.146.41'),
|
||||
('Jason', 'Reed', 'jreed25@printfriendly.com', 'Male', '141.189.89.255'),
|
||||
('George', 'Coleman', 'gcoleman26@people.com.cn', 'Male', '81.189.221.144'),
|
||||
('Rose', 'King', 'rking27@ucoz.com', 'Female', '212.123.168.231'),
|
||||
('Johnny', 'Holmes', 'jholmes28@boston.com', 'Male', '177.3.93.188'),
|
||||
('Katherine', 'Gilbert', 'kgilbert29@altervista.org', 'Female', '199.215.169.61'),
|
||||
('Joshua', 'Thomas', 'jthomas2a@ustream.tv', 'Male', '0.8.205.30'),
|
||||
('Julie', 'Perry', 'jperry2b@opensource.org', 'Female', '60.116.114.192'),
|
||||
('Richard', 'Perry', 'rperry2c@oracle.com', 'Male', '181.125.70.232'),
|
||||
('Kenneth', 'Ruiz', 'kruiz2d@wikimedia.org', 'Male', '189.105.137.109'),
|
||||
('Jose', 'Morgan', 'jmorgan2e@webnode.com', 'Male', '101.134.215.156'),
|
||||
('Donald', 'Campbell', 'dcampbell2f@goo.ne.jp', 'Male', '102.120.215.84'),
|
||||
('Debra', 'Collins', 'dcollins2g@uol.com.br', 'Female', '90.13.153.235'),
|
||||
('Jesse', 'Johnson', 'jjohnson2h@stumbleupon.com', 'Male', '225.178.125.53'),
|
||||
('Elizabeth', 'Stone', 'estone2i@histats.com', 'Female', '123.184.126.221'),
|
||||
('Angela', 'Rogers', 'arogers2j@goodreads.com', 'Female', '98.104.132.187'),
|
||||
('Emily', 'Dixon', 'edixon2k@mlb.com', 'Female', '39.190.75.57'),
|
||||
('Albert', 'Scott', 'ascott2l@tinypic.com', 'Male', '40.209.13.189'),
|
||||
('Barbara', 'Peterson', 'bpeterson2m@ow.ly', 'Female', '75.249.136.180'),
|
||||
('Adam', 'Greene', 'agreene2n@fastcompany.com', 'Male', '184.173.109.144'),
|
||||
('Earl', 'Sanders', 'esanders2o@hc360.com', 'Male', '247.34.90.117'),
|
||||
('Angela', 'Brooks', 'abrooks2p@mtv.com', 'Female', '10.63.249.126'),
|
||||
('Harold', 'Foster', 'hfoster2q@privacy.gov.au', 'Male', '139.214.40.244'),
|
||||
('Carl', 'Meyer', 'cmeyer2r@disqus.com', 'Male', '204.117.7.88');
|
||||
@@ -1,136 +0,0 @@
|
||||
import os
|
||||
|
||||
from dbt.exceptions import CompilationException
|
||||
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
|
||||
class TestSimpleReference(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "simple_reference_003"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'vars': {
|
||||
'test': {
|
||||
'var_ref': '{{ ref("view_copy") }}',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# self.use_default_config()
|
||||
self.run_sql_file("seed.sql")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test__postgres__simple_reference(self):
|
||||
|
||||
results = self.run_dbt()
|
||||
# ephemeral_copy doesn't show up in results
|
||||
self.assertEqual(len(results), 8)
|
||||
|
||||
# Copies should match
|
||||
self.assertTablesEqual("seed","incremental_copy")
|
||||
self.assertTablesEqual("seed","materialized_copy")
|
||||
self.assertTablesEqual("seed","view_copy")
|
||||
|
||||
# Summaries should match
|
||||
self.assertTablesEqual("summary_expected","incremental_summary")
|
||||
self.assertTablesEqual("summary_expected","materialized_summary")
|
||||
self.assertTablesEqual("summary_expected","view_summary")
|
||||
self.assertTablesEqual("summary_expected","ephemeral_summary")
|
||||
self.assertTablesEqual("summary_expected","view_using_ref")
|
||||
|
||||
self.run_sql_file("update.sql")
|
||||
|
||||
results = self.run_dbt()
|
||||
self.assertEqual(len(results), 8)
|
||||
|
||||
# Copies should match
|
||||
self.assertTablesEqual("seed","incremental_copy")
|
||||
self.assertTablesEqual("seed","materialized_copy")
|
||||
self.assertTablesEqual("seed","view_copy")
|
||||
|
||||
# Summaries should match
|
||||
self.assertTablesEqual("summary_expected","incremental_summary")
|
||||
self.assertTablesEqual("summary_expected","materialized_summary")
|
||||
self.assertTablesEqual("summary_expected","view_summary")
|
||||
self.assertTablesEqual("summary_expected","ephemeral_summary")
|
||||
self.assertTablesEqual("summary_expected","view_using_ref")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test__postgres__simple_reference_with_models(self):
|
||||
|
||||
# Run materialized_copy, ephemeral_copy, and their dependents
|
||||
# ephemeral_copy should not actually be materialized b/c it is ephemeral
|
||||
results = self.run_dbt(
|
||||
['run', '--models', 'materialized_copy', 'ephemeral_copy']
|
||||
)
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
# Copies should match
|
||||
self.assertTablesEqual("seed","materialized_copy")
|
||||
|
||||
created_models = self.get_models_in_schema()
|
||||
self.assertTrue('materialized_copy' in created_models)
|
||||
|
||||
@use_profile('postgres')
|
||||
def test__postgres__simple_reference_with_models_and_children(self):
|
||||
|
||||
# Run materialized_copy, ephemeral_copy, and their dependents
|
||||
# ephemeral_copy should not actually be materialized b/c it is ephemeral
|
||||
# the dependent ephemeral_summary, however, should be materialized as a table
|
||||
results = self.run_dbt(
|
||||
['run', '--models', 'materialized_copy+', 'ephemeral_copy+']
|
||||
)
|
||||
self.assertEqual(len(results), 3)
|
||||
|
||||
# Copies should match
|
||||
self.assertTablesEqual("seed","materialized_copy")
|
||||
|
||||
# Summaries should match
|
||||
self.assertTablesEqual("summary_expected","materialized_summary")
|
||||
self.assertTablesEqual("summary_expected","ephemeral_summary")
|
||||
|
||||
created_models = self.get_models_in_schema()
|
||||
|
||||
self.assertFalse('incremental_copy' in created_models)
|
||||
self.assertFalse('incremental_summary' in created_models)
|
||||
self.assertFalse('view_copy' in created_models)
|
||||
self.assertFalse('view_summary' in created_models)
|
||||
|
||||
# make sure this wasn't errantly materialized
|
||||
self.assertFalse('ephemeral_copy' in created_models)
|
||||
|
||||
self.assertTrue('materialized_copy' in created_models)
|
||||
self.assertTrue('materialized_summary' in created_models)
|
||||
self.assertEqual(created_models['materialized_copy'], 'table')
|
||||
self.assertEqual(created_models['materialized_summary'], 'table')
|
||||
|
||||
self.assertTrue('ephemeral_summary' in created_models)
|
||||
self.assertEqual(created_models['ephemeral_summary'], 'table')
|
||||
|
||||
|
||||
class TestErrorReference(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "simple_reference_003"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "invalid-models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_undefined_value(self):
|
||||
with self.assertRaises(CompilationException) as exc:
|
||||
self.run_dbt(['compile'])
|
||||
path = os.path.join('invalid-models', 'descendant.sql')
|
||||
self.assertIn(path, str(exc.exception))
|
||||
@@ -1,107 +0,0 @@
|
||||
|
||||
truncate table {schema}.summary_expected;
|
||||
insert into {schema}.summary_expected (gender, ct) values
|
||||
('Female', 94),
|
||||
('Male', 106);
|
||||
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values
|
||||
('Michael', 'Perez', 'mperez0@chronoengine.com', 'Male', '106.239.70.175'),
|
||||
('Shawn', 'Mccoy', 'smccoy1@reddit.com', 'Male', '24.165.76.182'),
|
||||
('Kathleen', 'Payne', 'kpayne2@cargocollective.com', 'Female', '113.207.168.106'),
|
||||
('Jimmy', 'Cooper', 'jcooper3@cargocollective.com', 'Male', '198.24.63.114'),
|
||||
('Katherine', 'Rice', 'krice4@typepad.com', 'Female', '36.97.186.238'),
|
||||
('Sarah', 'Ryan', 'sryan5@gnu.org', 'Female', '119.117.152.40'),
|
||||
('Martin', 'Mcdonald', 'mmcdonald6@opera.com', 'Male', '8.76.38.115'),
|
||||
('Frank', 'Robinson', 'frobinson7@wunderground.com', 'Male', '186.14.64.194'),
|
||||
('Jennifer', 'Franklin', 'jfranklin8@mail.ru', 'Female', '91.216.3.131'),
|
||||
('Henry', 'Welch', 'hwelch9@list-manage.com', 'Male', '176.35.182.168'),
|
||||
('Fred', 'Snyder', 'fsnydera@reddit.com', 'Male', '217.106.196.54'),
|
||||
('Amy', 'Dunn', 'adunnb@nba.com', 'Female', '95.39.163.195'),
|
||||
('Kathleen', 'Meyer', 'kmeyerc@cdc.gov', 'Female', '164.142.188.214'),
|
||||
('Steve', 'Ferguson', 'sfergusond@reverbnation.com', 'Male', '138.22.204.251'),
|
||||
('Teresa', 'Hill', 'thille@dion.ne.jp', 'Female', '82.84.228.235'),
|
||||
('Amanda', 'Harper', 'aharperf@mail.ru', 'Female', '16.123.56.176'),
|
||||
('Kimberly', 'Ray', 'krayg@xing.com', 'Female', '48.66.48.12'),
|
||||
('Johnny', 'Knight', 'jknighth@jalbum.net', 'Male', '99.30.138.123'),
|
||||
('Virginia', 'Freeman', 'vfreemani@tiny.cc', 'Female', '225.172.182.63'),
|
||||
('Anna', 'Austin', 'aaustinj@diigo.com', 'Female', '62.111.227.148'),
|
||||
('Willie', 'Hill', 'whillk@mail.ru', 'Male', '0.86.232.249'),
|
||||
('Sean', 'Harris', 'sharrisl@zdnet.com', 'Male', '117.165.133.249'),
|
||||
('Mildred', 'Adams', 'madamsm@usatoday.com', 'Female', '163.44.97.46'),
|
||||
('David', 'Graham', 'dgrahamn@zimbio.com', 'Male', '78.13.246.202'),
|
||||
('Victor', 'Hunter', 'vhuntero@ehow.com', 'Male', '64.156.179.139'),
|
||||
('Aaron', 'Ruiz', 'aruizp@weebly.com', 'Male', '34.194.68.78'),
|
||||
('Benjamin', 'Brooks', 'bbrooksq@jalbum.net', 'Male', '20.192.189.107'),
|
||||
('Lisa', 'Wilson', 'lwilsonr@japanpost.jp', 'Female', '199.152.130.217'),
|
||||
('Benjamin', 'King', 'bkings@comsenz.com', 'Male', '29.189.189.213'),
|
||||
('Christina', 'Williamson', 'cwilliamsont@boston.com', 'Female', '194.101.52.60'),
|
||||
('Jane', 'Gonzalez', 'jgonzalezu@networksolutions.com', 'Female', '109.119.12.87'),
|
||||
('Thomas', 'Owens', 'towensv@psu.edu', 'Male', '84.168.213.153'),
|
||||
('Katherine', 'Moore', 'kmoorew@naver.com', 'Female', '183.150.65.24'),
|
||||
('Jennifer', 'Stewart', 'jstewartx@yahoo.com', 'Female', '38.41.244.58'),
|
||||
('Sara', 'Tucker', 'stuckery@topsy.com', 'Female', '181.130.59.184'),
|
||||
('Harold', 'Ortiz', 'hortizz@vkontakte.ru', 'Male', '198.231.63.137'),
|
||||
('Shirley', 'James', 'sjames10@yelp.com', 'Female', '83.27.160.104'),
|
||||
('Dennis', 'Johnson', 'djohnson11@slate.com', 'Male', '183.178.246.101'),
|
||||
('Louise', 'Weaver', 'lweaver12@china.com.cn', 'Female', '1.14.110.18'),
|
||||
('Maria', 'Armstrong', 'marmstrong13@prweb.com', 'Female', '181.142.1.249'),
|
||||
('Gloria', 'Cruz', 'gcruz14@odnoklassniki.ru', 'Female', '178.232.140.243'),
|
||||
('Diana', 'Spencer', 'dspencer15@ifeng.com', 'Female', '125.153.138.244'),
|
||||
('Kelly', 'Nguyen', 'knguyen16@altervista.org', 'Female', '170.13.201.119'),
|
||||
('Jane', 'Rodriguez', 'jrodriguez17@biblegateway.com', 'Female', '12.102.249.81'),
|
||||
('Scott', 'Brown', 'sbrown18@geocities.jp', 'Male', '108.174.99.192'),
|
||||
('Norma', 'Cruz', 'ncruz19@si.edu', 'Female', '201.112.156.197'),
|
||||
('Marie', 'Peters', 'mpeters1a@mlb.com', 'Female', '231.121.197.144'),
|
||||
('Lillian', 'Carr', 'lcarr1b@typepad.com', 'Female', '206.179.164.163'),
|
||||
('Judy', 'Nichols', 'jnichols1c@t-online.de', 'Female', '158.190.209.194'),
|
||||
('Billy', 'Long', 'blong1d@yahoo.com', 'Male', '175.20.23.160'),
|
||||
('Howard', 'Reid', 'hreid1e@exblog.jp', 'Male', '118.99.196.20'),
|
||||
('Laura', 'Ferguson', 'lferguson1f@tuttocitta.it', 'Female', '22.77.87.110'),
|
||||
('Anne', 'Bailey', 'abailey1g@geocities.com', 'Female', '58.144.159.245'),
|
||||
('Rose', 'Morgan', 'rmorgan1h@ehow.com', 'Female', '118.127.97.4'),
|
||||
('Nicholas', 'Reyes', 'nreyes1i@google.ru', 'Male', '50.135.10.252'),
|
||||
('Joshua', 'Kennedy', 'jkennedy1j@house.gov', 'Male', '154.6.163.209'),
|
||||
('Paul', 'Watkins', 'pwatkins1k@upenn.edu', 'Male', '177.236.120.87'),
|
||||
('Kathryn', 'Kelly', 'kkelly1l@businessweek.com', 'Female', '70.28.61.86'),
|
||||
('Adam', 'Armstrong', 'aarmstrong1m@techcrunch.com', 'Male', '133.235.24.202'),
|
||||
('Norma', 'Wallace', 'nwallace1n@phoca.cz', 'Female', '241.119.227.128'),
|
||||
('Timothy', 'Reyes', 'treyes1o@google.cn', 'Male', '86.28.23.26'),
|
||||
('Elizabeth', 'Patterson', 'epatterson1p@sun.com', 'Female', '139.97.159.149'),
|
||||
('Edward', 'Gomez', 'egomez1q@google.fr', 'Male', '158.103.108.255'),
|
||||
('David', 'Cox', 'dcox1r@friendfeed.com', 'Male', '206.80.80.58'),
|
||||
('Brenda', 'Wood', 'bwood1s@over-blog.com', 'Female', '217.207.44.179'),
|
||||
('Adam', 'Walker', 'awalker1t@blogs.com', 'Male', '253.211.54.93'),
|
||||
('Michael', 'Hart', 'mhart1u@wix.com', 'Male', '230.206.200.22'),
|
||||
('Jesse', 'Ellis', 'jellis1v@google.co.uk', 'Male', '213.254.162.52'),
|
||||
('Janet', 'Powell', 'jpowell1w@un.org', 'Female', '27.192.194.86'),
|
||||
('Helen', 'Ford', 'hford1x@creativecommons.org', 'Female', '52.160.102.168'),
|
||||
('Gerald', 'Carpenter', 'gcarpenter1y@about.me', 'Male', '36.30.194.218'),
|
||||
('Kathryn', 'Oliver', 'koliver1z@army.mil', 'Female', '202.63.103.69'),
|
||||
('Alan', 'Berry', 'aberry20@gov.uk', 'Male', '246.157.112.211'),
|
||||
('Harry', 'Andrews', 'handrews21@ameblo.jp', 'Male', '195.108.0.12'),
|
||||
('Andrea', 'Hall', 'ahall22@hp.com', 'Female', '149.162.163.28'),
|
||||
('Barbara', 'Wells', 'bwells23@behance.net', 'Female', '224.70.72.1'),
|
||||
('Anne', 'Wells', 'awells24@apache.org', 'Female', '180.168.81.153'),
|
||||
('Harry', 'Harper', 'hharper25@rediff.com', 'Male', '151.87.130.21'),
|
||||
('Jack', 'Ray', 'jray26@wufoo.com', 'Male', '220.109.38.178'),
|
||||
('Phillip', 'Hamilton', 'phamilton27@joomla.org', 'Male', '166.40.47.30'),
|
||||
('Shirley', 'Hunter', 'shunter28@newsvine.com', 'Female', '97.209.140.194'),
|
||||
('Arthur', 'Daniels', 'adaniels29@reuters.com', 'Male', '5.40.240.86'),
|
||||
('Virginia', 'Rodriguez', 'vrodriguez2a@walmart.com', 'Female', '96.80.164.184'),
|
||||
('Christina', 'Ryan', 'cryan2b@hibu.com', 'Female', '56.35.5.52'),
|
||||
('Theresa', 'Mendoza', 'tmendoza2c@vinaora.com', 'Female', '243.42.0.210'),
|
||||
('Jason', 'Cole', 'jcole2d@ycombinator.com', 'Male', '198.248.39.129'),
|
||||
('Phillip', 'Bryant', 'pbryant2e@rediff.com', 'Male', '140.39.116.251'),
|
||||
('Adam', 'Torres', 'atorres2f@sun.com', 'Male', '101.75.187.135'),
|
||||
('Margaret', 'Johnston', 'mjohnston2g@ucsd.edu', 'Female', '159.30.69.149'),
|
||||
('Paul', 'Payne', 'ppayne2h@hhs.gov', 'Male', '199.234.140.220'),
|
||||
('Todd', 'Willis', 'twillis2i@businessweek.com', 'Male', '191.59.136.214'),
|
||||
('Willie', 'Oliver', 'woliver2j@noaa.gov', 'Male', '44.212.35.197'),
|
||||
('Frances', 'Robertson', 'frobertson2k@go.com', 'Female', '31.117.65.136'),
|
||||
('Gregory', 'Hawkins', 'ghawkins2l@joomla.org', 'Male', '91.3.22.49'),
|
||||
('Lisa', 'Perkins', 'lperkins2m@si.edu', 'Female', '145.95.31.186'),
|
||||
('Jacqueline', 'Anderson', 'janderson2n@cargocollective.com', 'Female', '14.176.0.187'),
|
||||
('Shirley', 'Diaz', 'sdiaz2o@ucla.edu', 'Female', '207.12.95.46'),
|
||||
('Nicole', 'Meyer', 'nmeyer2p@flickr.com', 'Female', '231.79.115.13'),
|
||||
('Mary', 'Gray', 'mgray2q@constantcontact.com', 'Female', '210.116.64.253'),
|
||||
('Jean', 'Mcdonald', 'jmcdonald2r@baidu.com', 'Female', '122.239.235.117');
|
||||
37
tests/CONVERTING.md
Normal file
37
tests/CONVERTING.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Notes on converting tests from unittest to pytest
|
||||
|
||||
* Base fixtures are provided in the core/dbt/tests/fixtures/project.py
|
||||
* You can override any fixture by specifying it in your test script
|
||||
* You can't use any test utility from the legacy tests that starts with 'self'.
|
||||
* Most special case assert functions will work with a simple 'assert'
|
||||
* Every integration test needs to use the 'project' fixture
|
||||
* Files are not copied (unless done explicitly in the test) so if you need
|
||||
to access a local file (like seed.sql) you need to get the path from the project fixture,
|
||||
(project.test\_data\_dir) or the 'test\_data\_dir' fixture (for specifying the location of data files.)
|
||||
* Table comparison methods have been moved to TableComparison in test/tables.py
|
||||
* Fixtures are for test setup, and are specified in the test signature. You can't call
|
||||
fixtures in the middle of a test function.
|
||||
* Information from the fixture setup that might be needed later in the test is provided
|
||||
by the project fixture return class (TestProjInfo)
|
||||
* Every fixture has a scope, which means that you can call it multiple times in the
|
||||
same scope and it will return the same thing. Default scope is 'function', you can
|
||||
also scope fixtures to 'module' and 'session'.
|
||||
* All fixtures are run before and after the test function they're attached to.
|
||||
If you have teardown pieces in the fixture, do a 'yield' after the setup, and
|
||||
the part after the 'yield' will be run at teardown.
|
||||
* 'run\_dbt', 'run\_sql', and 'get\_manifest' are provided by the core/dbt/tests/util.py file
|
||||
* You will probably want to make separate test files out of tests that use
|
||||
substantially different projects. If they're only different by a file or two,
|
||||
you could write out individual files instead and keep them in the same file.
|
||||
* You can also import file strings from other test cases
|
||||
* old: self.get\_models\_in\_schema, new: get\_tables\_in\_schema
|
||||
* somewhat easier way to get the legacy files into a more usable format:
|
||||
```tail -n +1 models/* > copy_models.out```
|
||||
* some of the legacy tests used a 'default_project' method to change (for example)
|
||||
the seeds directory to load a different seed. Don't do that. Copying a file is
|
||||
probably a better option.
|
||||
|
||||
|
||||
# Integration test directories that have been converted
|
||||
* 001\_simple\_copy\_tests => moved to 'basic'
|
||||
* 003\_simple\_reference\_tests => moved to 'basic'
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# tests directory
|
||||
4
tests/conftest.py
Normal file
4
tests/conftest.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# Import the fuctional fixtures as a plugin
|
||||
# Note: fixtures with session scope need to be local
|
||||
|
||||
pytest_plugins = ["dbt.tests.fixtures.project"]
|
||||
1
tests/data/__init__.py
Normal file
1
tests/data/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# tests/data directory
|
||||
1
tests/fixtures/__init__.py
vendored
Normal file
1
tests/fixtures/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# fixtures directory
|
||||
415
tests/fixtures/jaffle_shop.py
vendored
Normal file
415
tests/fixtures/jaffle_shop.py
vendored
Normal file
@@ -0,0 +1,415 @@
|
||||
import pytest
|
||||
import os
|
||||
|
||||
# models/customers.sql
|
||||
customers_sql = """
|
||||
with customers as (
|
||||
|
||||
select * from {{ ref('stg_customers') }}
|
||||
|
||||
),
|
||||
|
||||
orders as (
|
||||
|
||||
select * from {{ ref('stg_orders') }}
|
||||
|
||||
),
|
||||
|
||||
payments as (
|
||||
|
||||
select * from {{ ref('stg_payments') }}
|
||||
|
||||
),
|
||||
|
||||
customer_orders as (
|
||||
|
||||
select
|
||||
customer_id,
|
||||
|
||||
min(order_date) as first_order,
|
||||
max(order_date) as most_recent_order,
|
||||
count(order_id) as number_of_orders
|
||||
from orders
|
||||
|
||||
group by customer_id
|
||||
|
||||
),
|
||||
|
||||
customer_payments as (
|
||||
|
||||
select
|
||||
orders.customer_id,
|
||||
sum(amount) as total_amount
|
||||
|
||||
from payments
|
||||
|
||||
left join orders on
|
||||
payments.order_id = orders.order_id
|
||||
|
||||
group by orders.customer_id
|
||||
|
||||
),
|
||||
|
||||
final as (
|
||||
|
||||
select
|
||||
customers.customer_id,
|
||||
customers.first_name,
|
||||
customers.last_name,
|
||||
customer_orders.first_order,
|
||||
customer_orders.most_recent_order,
|
||||
customer_orders.number_of_orders,
|
||||
customer_payments.total_amount as customer_lifetime_value
|
||||
|
||||
from customers
|
||||
|
||||
left join customer_orders
|
||||
on customers.customer_id = customer_orders.customer_id
|
||||
|
||||
left join customer_payments
|
||||
on customers.customer_id = customer_payments.customer_id
|
||||
|
||||
)
|
||||
|
||||
select * from final
|
||||
"""
|
||||
|
||||
# models/docs.md
|
||||
docs_md = """
|
||||
{% docs orders_status %}
|
||||
|
||||
Orders can be one of the following statuses:
|
||||
|
||||
| status | description |
|
||||
|----------------|------------------------------------------------------------------------------------------------------------------------|
|
||||
| placed | The order has been placed but has not yet left the warehouse |
|
||||
| shipped | The order has ben shipped to the customer and is currently in transit |
|
||||
| completed | The order has been received by the customer |
|
||||
| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |
|
||||
| returned | The order has been returned by the customer and received at the warehouse |
|
||||
|
||||
|
||||
{% enddocs %}
|
||||
"""
|
||||
|
||||
# models/orders.sql
|
||||
orders_sql = """
|
||||
{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %}
|
||||
|
||||
with orders as (
|
||||
|
||||
select * from {{ ref('stg_orders') }}
|
||||
|
||||
),
|
||||
|
||||
payments as (
|
||||
|
||||
select * from {{ ref('stg_payments') }}
|
||||
|
||||
),
|
||||
|
||||
order_payments as (
|
||||
|
||||
select
|
||||
order_id,
|
||||
|
||||
{% for payment_method in payment_methods -%}
|
||||
sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount,
|
||||
{% endfor -%}
|
||||
|
||||
sum(amount) as total_amount
|
||||
|
||||
from payments
|
||||
|
||||
group by order_id
|
||||
|
||||
),
|
||||
|
||||
final as (
|
||||
|
||||
select
|
||||
orders.order_id,
|
||||
orders.customer_id,
|
||||
orders.order_date,
|
||||
orders.status,
|
||||
|
||||
{% for payment_method in payment_methods -%}
|
||||
|
||||
order_payments.{{ payment_method }}_amount,
|
||||
|
||||
{% endfor -%}
|
||||
|
||||
order_payments.total_amount as amount
|
||||
|
||||
from orders
|
||||
|
||||
|
||||
left join order_payments
|
||||
on orders.order_id = order_payments.order_id
|
||||
|
||||
)
|
||||
|
||||
select * from final
|
||||
"""
|
||||
|
||||
# models/overview.md
|
||||
overview_md = """
|
||||
{% docs __overview__ %}
|
||||
|
||||
## Data Documentation for Jaffle Shop
|
||||
|
||||
`jaffle_shop` is a fictional ecommerce store.
|
||||
|
||||
This [dbt](https://www.getdbt.com/) project is for testing out code.
|
||||
|
||||
The source code can be found [here](https://github.com/clrcrl/jaffle_shop).
|
||||
|
||||
{% enddocs %}
|
||||
"""
|
||||
|
||||
# models/schema.yml
|
||||
schema_yml = """
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: customers
|
||||
description: This table has basic information about a customer, as well as some derived facts based on a customer's orders
|
||||
|
||||
columns:
|
||||
- name: customer_id
|
||||
description: This is a unique identifier for a customer
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
|
||||
- name: first_name
|
||||
description: Customer's first name. PII.
|
||||
|
||||
- name: last_name
|
||||
description: Customer's last name. PII.
|
||||
|
||||
- name: first_order
|
||||
description: Date (UTC) of a customer's first order
|
||||
|
||||
- name: most_recent_order
|
||||
description: Date (UTC) of a customer's most recent order
|
||||
|
||||
- name: number_of_orders
|
||||
description: Count of the number of orders a customer has placed
|
||||
|
||||
- name: total_order_amount
|
||||
description: Total value (AUD) of a customer's orders
|
||||
|
||||
- name: orders
|
||||
description: This table has basic information about orders, as well as some derived facts based on payments
|
||||
|
||||
columns:
|
||||
- name: order_id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
description: This is a unique identifier for an order
|
||||
|
||||
- name: customer_id
|
||||
description: Foreign key to the customers table
|
||||
tests:
|
||||
- not_null
|
||||
- relationships:
|
||||
to: ref('customers')
|
||||
field: customer_id
|
||||
|
||||
- name: order_date
|
||||
description: Date (UTC) that the order was placed
|
||||
|
||||
- name: status
|
||||
description: '{{ doc("orders_status") }}'
|
||||
tests:
|
||||
- accepted_values:
|
||||
values: ['placed', 'shipped', 'completed', 'return_pending', 'returned']
|
||||
|
||||
- name: amount
|
||||
description: Total amount (AUD) of the order
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: credit_card_amount
|
||||
description: Amount of the order (AUD) paid for by credit card
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: coupon_amount
|
||||
description: Amount of the order (AUD) paid for by coupon
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: bank_transfer_amount
|
||||
description: Amount of the order (AUD) paid for by bank transfer
|
||||
tests:
|
||||
- not_null
|
||||
|
||||
- name: gift_card_amount
|
||||
description: Amount of the order (AUD) paid for by gift card
|
||||
tests:
|
||||
- not_null
|
||||
"""
|
||||
|
||||
# models/staging/schema.yml
|
||||
staging_schema_yml = """
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: stg_customers
|
||||
columns:
|
||||
- name: customer_id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
|
||||
- name: stg_orders
|
||||
columns:
|
||||
- name: order_id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- name: status
|
||||
tests:
|
||||
- accepted_values:
|
||||
values: ['placed', 'shipped', 'completed', 'return_pending', 'returned']
|
||||
|
||||
- name: stg_payments
|
||||
columns:
|
||||
- name: payment_id
|
||||
tests:
|
||||
- unique
|
||||
- not_null
|
||||
- name: payment_method
|
||||
tests:
|
||||
- accepted_values:
|
||||
values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card']
|
||||
"""
|
||||
|
||||
# models/staging/stg_customers.sql
|
||||
staging_stg_customers_sql = """
|
||||
with source as (
|
||||
|
||||
{#-
|
||||
Normally we would select from the table here, but we are using seeds to load
|
||||
our data in this project
|
||||
#}
|
||||
select * from {{ ref('raw_customers') }}
|
||||
|
||||
),
|
||||
|
||||
renamed as (
|
||||
|
||||
select
|
||||
id as customer_id,
|
||||
first_name,
|
||||
last_name
|
||||
|
||||
from source
|
||||
|
||||
)
|
||||
|
||||
select * from renamed
|
||||
"""
|
||||
|
||||
# models/staging/stg_orders.sql
|
||||
staging_stg_orders_sql = """
|
||||
with source as (
|
||||
|
||||
{#-
|
||||
Normally we would select from the table here, but we are using seeds to load
|
||||
our data in this project
|
||||
#}
|
||||
select * from {{ ref('raw_orders') }}
|
||||
|
||||
),
|
||||
|
||||
renamed as (
|
||||
|
||||
select
|
||||
id as order_id,
|
||||
user_id as customer_id,
|
||||
order_date,
|
||||
status
|
||||
|
||||
from source
|
||||
|
||||
)
|
||||
|
||||
select * from renamed
|
||||
"""
|
||||
|
||||
# models/staging/stg_payments.sql
|
||||
staging_stg_payments_sql = """
|
||||
with source as (
|
||||
|
||||
{#-
|
||||
Normally we would select from the table here, but we are using seeds to load
|
||||
our data in this project
|
||||
#}
|
||||
select * from {{ ref('raw_payments') }}
|
||||
|
||||
),
|
||||
|
||||
renamed as (
|
||||
|
||||
select
|
||||
id as payment_id,
|
||||
order_id,
|
||||
payment_method,
|
||||
|
||||
-- `amount` is currently stored in cents, so we convert it to dollars
|
||||
amount / 100 as amount
|
||||
|
||||
from source
|
||||
|
||||
)
|
||||
|
||||
select * from renamed
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {
|
||||
"customers.sql": customers_sql,
|
||||
"docs.md": docs_md,
|
||||
"orders.sql": orders_sql,
|
||||
"overview.md": overview_md,
|
||||
"schema.yml": schema_yml,
|
||||
"staging": {
|
||||
"schema.yml": staging_schema_yml,
|
||||
"stg_customers.sql": staging_stg_customers_sql,
|
||||
"stg_orders.sql": staging_stg_orders_sql,
|
||||
"stg_payments.sql": staging_stg_payments_sql,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def seeds():
|
||||
# Read seed file and return
|
||||
seeds = {}
|
||||
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||
for file_name in ("raw_customers.csv", "raw_orders.csv", "raw_payments.csv"):
|
||||
path = os.path.join(dir_path, "jaffle_shop_data", file_name)
|
||||
with open(path, "rb") as fp:
|
||||
seeds[file_name] = fp.read()
|
||||
return seeds
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_config_update():
|
||||
return {
|
||||
"name": "jaffle_shop",
|
||||
"models": {
|
||||
"jaffle_shop": {
|
||||
"materialized": "table",
|
||||
"staging": {
|
||||
"materialized": "view",
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
0
tests/fixtures/jaffle_shop_data/.gitkeep
vendored
Normal file
0
tests/fixtures/jaffle_shop_data/.gitkeep
vendored
Normal file
101
tests/fixtures/jaffle_shop_data/raw_customers.csv
vendored
Normal file
101
tests/fixtures/jaffle_shop_data/raw_customers.csv
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
id,first_name,last_name
|
||||
1,Michael,P.
|
||||
2,Shawn,M.
|
||||
3,Kathleen,P.
|
||||
4,Jimmy,C.
|
||||
5,Katherine,R.
|
||||
6,Sarah,R.
|
||||
7,Martin,M.
|
||||
8,Frank,R.
|
||||
9,Jennifer,F.
|
||||
10,Henry,W.
|
||||
11,Fred,S.
|
||||
12,Amy,D.
|
||||
13,Kathleen,M.
|
||||
14,Steve,F.
|
||||
15,Teresa,H.
|
||||
16,Amanda,H.
|
||||
17,Kimberly,R.
|
||||
18,Johnny,K.
|
||||
19,Virginia,F.
|
||||
20,Anna,A.
|
||||
21,Willie,H.
|
||||
22,Sean,H.
|
||||
23,Mildred,A.
|
||||
24,David,G.
|
||||
25,Victor,H.
|
||||
26,Aaron,R.
|
||||
27,Benjamin,B.
|
||||
28,Lisa,W.
|
||||
29,Benjamin,K.
|
||||
30,Christina,W.
|
||||
31,Jane,G.
|
||||
32,Thomas,O.
|
||||
33,Katherine,M.
|
||||
34,Jennifer,S.
|
||||
35,Sara,T.
|
||||
36,Harold,O.
|
||||
37,Shirley,J.
|
||||
38,Dennis,J.
|
||||
39,Louise,W.
|
||||
40,Maria,A.
|
||||
41,Gloria,C.
|
||||
42,Diana,S.
|
||||
43,Kelly,N.
|
||||
44,Jane,R.
|
||||
45,Scott,B.
|
||||
46,Norma,C.
|
||||
47,Marie,P.
|
||||
48,Lillian,C.
|
||||
49,Judy,N.
|
||||
50,Billy,L.
|
||||
51,Howard,R.
|
||||
52,Laura,F.
|
||||
53,Anne,B.
|
||||
54,Rose,M.
|
||||
55,Nicholas,R.
|
||||
56,Joshua,K.
|
||||
57,Paul,W.
|
||||
58,Kathryn,K.
|
||||
59,Adam,A.
|
||||
60,Norma,W.
|
||||
61,Timothy,R.
|
||||
62,Elizabeth,P.
|
||||
63,Edward,G.
|
||||
64,David,C.
|
||||
65,Brenda,W.
|
||||
66,Adam,W.
|
||||
67,Michael,H.
|
||||
68,Jesse,E.
|
||||
69,Janet,P.
|
||||
70,Helen,F.
|
||||
71,Gerald,C.
|
||||
72,Kathryn,O.
|
||||
73,Alan,B.
|
||||
74,Harry,A.
|
||||
75,Andrea,H.
|
||||
76,Barbara,W.
|
||||
77,Anne,W.
|
||||
78,Harry,H.
|
||||
79,Jack,R.
|
||||
80,Phillip,H.
|
||||
81,Shirley,H.
|
||||
82,Arthur,D.
|
||||
83,Virginia,R.
|
||||
84,Christina,R.
|
||||
85,Theresa,M.
|
||||
86,Jason,C.
|
||||
87,Phillip,B.
|
||||
88,Adam,T.
|
||||
89,Margaret,J.
|
||||
90,Paul,P.
|
||||
91,Todd,W.
|
||||
92,Willie,O.
|
||||
93,Frances,R.
|
||||
94,Gregory,H.
|
||||
95,Lisa,P.
|
||||
96,Jacqueline,A.
|
||||
97,Shirley,D.
|
||||
98,Nicole,M.
|
||||
99,Mary,G.
|
||||
100,Jean,M.
|
||||
|
100
tests/fixtures/jaffle_shop_data/raw_orders.csv
vendored
Normal file
100
tests/fixtures/jaffle_shop_data/raw_orders.csv
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
id,user_id,order_date,status
|
||||
1,1,2018-01-01,returned
|
||||
2,3,2018-01-02,completed
|
||||
3,94,2018-01-04,completed
|
||||
4,50,2018-01-05,completed
|
||||
5,64,2018-01-05,completed
|
||||
6,54,2018-01-07,completed
|
||||
7,88,2018-01-09,completed
|
||||
8,2,2018-01-11,returned
|
||||
9,53,2018-01-12,completed
|
||||
10,7,2018-01-14,completed
|
||||
11,99,2018-01-14,completed
|
||||
12,59,2018-01-15,completed
|
||||
13,84,2018-01-17,completed
|
||||
14,40,2018-01-17,returned
|
||||
15,25,2018-01-17,completed
|
||||
16,39,2018-01-18,completed
|
||||
17,71,2018-01-18,completed
|
||||
18,64,2018-01-20,returned
|
||||
19,54,2018-01-22,completed
|
||||
20,20,2018-01-23,completed
|
||||
21,71,2018-01-23,completed
|
||||
22,86,2018-01-24,completed
|
||||
23,22,2018-01-26,return_pending
|
||||
24,3,2018-01-27,completed
|
||||
25,51,2018-01-28,completed
|
||||
26,32,2018-01-28,completed
|
||||
27,94,2018-01-29,completed
|
||||
28,8,2018-01-29,completed
|
||||
29,57,2018-01-31,completed
|
||||
30,69,2018-02-02,completed
|
||||
31,16,2018-02-02,completed
|
||||
32,28,2018-02-04,completed
|
||||
33,42,2018-02-04,completed
|
||||
34,38,2018-02-06,completed
|
||||
35,80,2018-02-08,completed
|
||||
36,85,2018-02-10,completed
|
||||
37,1,2018-02-10,completed
|
||||
38,51,2018-02-10,completed
|
||||
39,26,2018-02-11,completed
|
||||
40,33,2018-02-13,completed
|
||||
41,99,2018-02-14,completed
|
||||
42,92,2018-02-16,completed
|
||||
43,31,2018-02-17,completed
|
||||
44,66,2018-02-17,completed
|
||||
45,22,2018-02-17,completed
|
||||
46,6,2018-02-19,completed
|
||||
47,50,2018-02-20,completed
|
||||
48,27,2018-02-21,completed
|
||||
49,35,2018-02-21,completed
|
||||
50,51,2018-02-23,completed
|
||||
51,71,2018-02-24,completed
|
||||
52,54,2018-02-25,return_pending
|
||||
53,34,2018-02-26,completed
|
||||
54,54,2018-02-26,completed
|
||||
55,18,2018-02-27,completed
|
||||
56,79,2018-02-28,completed
|
||||
57,93,2018-03-01,completed
|
||||
58,22,2018-03-01,completed
|
||||
59,30,2018-03-02,completed
|
||||
60,12,2018-03-03,completed
|
||||
61,63,2018-03-03,completed
|
||||
62,57,2018-03-05,completed
|
||||
63,70,2018-03-06,completed
|
||||
64,13,2018-03-07,completed
|
||||
65,26,2018-03-08,completed
|
||||
66,36,2018-03-10,completed
|
||||
67,79,2018-03-11,completed
|
||||
68,53,2018-03-11,completed
|
||||
69,3,2018-03-11,completed
|
||||
70,8,2018-03-12,completed
|
||||
71,42,2018-03-12,shipped
|
||||
72,30,2018-03-14,shipped
|
||||
73,19,2018-03-16,completed
|
||||
74,9,2018-03-17,shipped
|
||||
75,69,2018-03-18,completed
|
||||
76,25,2018-03-20,completed
|
||||
77,35,2018-03-21,shipped
|
||||
78,90,2018-03-23,shipped
|
||||
79,52,2018-03-23,shipped
|
||||
80,11,2018-03-23,shipped
|
||||
81,76,2018-03-23,shipped
|
||||
82,46,2018-03-24,shipped
|
||||
83,54,2018-03-24,shipped
|
||||
84,70,2018-03-26,placed
|
||||
85,47,2018-03-26,shipped
|
||||
86,68,2018-03-26,placed
|
||||
87,46,2018-03-27,placed
|
||||
88,91,2018-03-27,shipped
|
||||
89,21,2018-03-28,placed
|
||||
90,66,2018-03-30,shipped
|
||||
91,47,2018-03-31,placed
|
||||
92,84,2018-04-02,placed
|
||||
93,66,2018-04-03,placed
|
||||
94,63,2018-04-03,placed
|
||||
95,27,2018-04-04,placed
|
||||
96,90,2018-04-06,placed
|
||||
97,89,2018-04-07,placed
|
||||
98,41,2018-04-07,placed
|
||||
99,85,2018-04-09,placed
|
||||
|
114
tests/fixtures/jaffle_shop_data/raw_payments.csv
vendored
Normal file
114
tests/fixtures/jaffle_shop_data/raw_payments.csv
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
id,order_id,payment_method,amount
|
||||
1,1,credit_card,1000
|
||||
2,2,credit_card,2000
|
||||
3,3,coupon,100
|
||||
4,4,coupon,2500
|
||||
5,5,bank_transfer,1700
|
||||
6,6,credit_card,600
|
||||
7,7,credit_card,1600
|
||||
8,8,credit_card,2300
|
||||
9,9,gift_card,2300
|
||||
10,9,bank_transfer,0
|
||||
11,10,bank_transfer,2600
|
||||
12,11,credit_card,2700
|
||||
13,12,credit_card,100
|
||||
14,13,credit_card,500
|
||||
15,13,bank_transfer,1400
|
||||
16,14,bank_transfer,300
|
||||
17,15,coupon,2200
|
||||
18,16,credit_card,1000
|
||||
19,17,bank_transfer,200
|
||||
20,18,credit_card,500
|
||||
21,18,credit_card,800
|
||||
22,19,gift_card,600
|
||||
23,20,bank_transfer,1500
|
||||
24,21,credit_card,1200
|
||||
25,22,bank_transfer,800
|
||||
26,23,gift_card,2300
|
||||
27,24,coupon,2600
|
||||
28,25,bank_transfer,2000
|
||||
29,25,credit_card,2200
|
||||
30,25,coupon,1600
|
||||
31,26,credit_card,3000
|
||||
32,27,credit_card,2300
|
||||
33,28,bank_transfer,1900
|
||||
34,29,bank_transfer,1200
|
||||
35,30,credit_card,1300
|
||||
36,31,credit_card,1200
|
||||
37,32,credit_card,300
|
||||
38,33,credit_card,2200
|
||||
39,34,bank_transfer,1500
|
||||
40,35,credit_card,2900
|
||||
41,36,bank_transfer,900
|
||||
42,37,credit_card,2300
|
||||
43,38,credit_card,1500
|
||||
44,39,bank_transfer,800
|
||||
45,40,credit_card,1400
|
||||
46,41,credit_card,1700
|
||||
47,42,coupon,1700
|
||||
48,43,gift_card,1800
|
||||
49,44,gift_card,1100
|
||||
50,45,bank_transfer,500
|
||||
51,46,bank_transfer,800
|
||||
52,47,credit_card,2200
|
||||
53,48,bank_transfer,300
|
||||
54,49,credit_card,600
|
||||
55,49,credit_card,900
|
||||
56,50,credit_card,2600
|
||||
57,51,credit_card,2900
|
||||
58,51,credit_card,100
|
||||
59,52,bank_transfer,1500
|
||||
60,53,credit_card,300
|
||||
61,54,credit_card,1800
|
||||
62,54,bank_transfer,1100
|
||||
63,55,credit_card,2900
|
||||
64,56,credit_card,400
|
||||
65,57,bank_transfer,200
|
||||
66,58,coupon,1800
|
||||
67,58,gift_card,600
|
||||
68,59,gift_card,2800
|
||||
69,60,credit_card,400
|
||||
70,61,bank_transfer,1600
|
||||
71,62,gift_card,1400
|
||||
72,63,credit_card,2900
|
||||
73,64,bank_transfer,2600
|
||||
74,65,credit_card,0
|
||||
75,66,credit_card,2800
|
||||
76,67,bank_transfer,400
|
||||
77,67,credit_card,1900
|
||||
78,68,credit_card,1600
|
||||
79,69,credit_card,1900
|
||||
80,70,credit_card,2600
|
||||
81,71,credit_card,500
|
||||
82,72,credit_card,2900
|
||||
83,73,bank_transfer,300
|
||||
84,74,credit_card,3000
|
||||
85,75,credit_card,1900
|
||||
86,76,coupon,200
|
||||
87,77,credit_card,0
|
||||
88,77,bank_transfer,1900
|
||||
89,78,bank_transfer,2600
|
||||
90,79,credit_card,1800
|
||||
91,79,credit_card,900
|
||||
92,80,gift_card,300
|
||||
93,81,coupon,200
|
||||
94,82,credit_card,800
|
||||
95,83,credit_card,100
|
||||
96,84,bank_transfer,2500
|
||||
97,85,bank_transfer,1700
|
||||
98,86,coupon,2300
|
||||
99,87,gift_card,3000
|
||||
100,87,credit_card,2600
|
||||
101,88,credit_card,2900
|
||||
102,89,bank_transfer,2200
|
||||
103,90,bank_transfer,200
|
||||
104,91,credit_card,1900
|
||||
105,92,bank_transfer,1500
|
||||
106,92,coupon,200
|
||||
107,93,gift_card,2600
|
||||
108,94,coupon,700
|
||||
109,95,coupon,2400
|
||||
110,96,gift_card,1700
|
||||
111,97,bank_transfer,1400
|
||||
112,98,bank_transfer,1000
|
||||
113,99,credit_card,2400
|
||||
|
14
tests/functional/README.md
Normal file
14
tests/functional/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# This is where we are putting the pytest conversions of test/integration
|
||||
|
||||
# Goals of moving tests to pytest
|
||||
* Readability
|
||||
* Modularity
|
||||
* Easier to create and debug
|
||||
* Ability to create a project for external debugging
|
||||
|
||||
# TODO
|
||||
* Create the ability to export a project
|
||||
* Explore using:
|
||||
* https://github.com/pytest-docker-compose/pytest-docker-compose or
|
||||
* https://github.com/avast/pytest-docker for automatically managing a postgres instance running in a docker container
|
||||
* Track test converage (https://pytest-cov.readthedocs.io/en/latest)
|
||||
3
tests/functional/__init__.py
Normal file
3
tests/functional/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# Functional tests focus on the business requirements of an application. They
|
||||
# only verify the output of an action and do not check the intermediate states
|
||||
# of the system when performing that action.
|
||||
3
tests/functional/basic/data/summary_expected.csv
Normal file
3
tests/functional/basic/data/summary_expected.csv
Normal file
@@ -0,0 +1,3 @@
|
||||
gender,ct
|
||||
Female,40
|
||||
Male,60
|
||||
|
3
tests/functional/basic/data/summary_expected_update.csv
Normal file
3
tests/functional/basic/data/summary_expected_update.csv
Normal file
@@ -0,0 +1,3 @@
|
||||
gender,ct
|
||||
Female,94
|
||||
Male,106
|
||||
|
20
tests/functional/basic/test_basic.py
Normal file
20
tests/functional/basic/test_basic.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt, get_manifest
|
||||
|
||||
|
||||
my_model_sql = """
|
||||
select 1 as fun
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {"my_model.sql": my_model_sql}
|
||||
|
||||
|
||||
def test_basic(project):
|
||||
# Tests that a project with a single model works
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 1
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert "model.test.my_model" in manifest.nodes
|
||||
88
tests/functional/basic/test_copy_uppercase.py
Normal file
88
tests/functional/basic/test_copy_uppercase.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import pytest
|
||||
import os
|
||||
from dbt.tests.tables import TableComparison
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
from tests.functional.basic.test_simple_copy import (
|
||||
advanced_incremental_sql,
|
||||
compound_sort_sql,
|
||||
disabled_sql,
|
||||
empty_sql,
|
||||
incremental_sql,
|
||||
interleaved_sort_sql,
|
||||
materialized_sql,
|
||||
schema_yml,
|
||||
view_model_sql,
|
||||
)
|
||||
|
||||
get_and_ref_sql = """
|
||||
{%- do adapter.get_relation(database=target.database, schema=target.schema, identifier='MATERIALIZED') -%}
|
||||
|
||||
select * from {{ ref('MATERIALIZED') }}
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dbt_profile_data(unique_schema, database_host):
|
||||
return {
|
||||
"config": {"send_anonymous_usage_stats": False},
|
||||
"test": {
|
||||
"outputs": {
|
||||
"default": {
|
||||
"type": "postgres",
|
||||
"threads": 4,
|
||||
"host": database_host,
|
||||
"port": 5432,
|
||||
"user": "root",
|
||||
"pass": "password",
|
||||
"dbname": "dbtMixedCase",
|
||||
"schema": unique_schema,
|
||||
},
|
||||
},
|
||||
"target": "default",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {
|
||||
"ADVANCED_INCREMENTAL.sql": advanced_incremental_sql,
|
||||
"COMPOUND_SORT.sql": compound_sort_sql,
|
||||
"DISABLED.sql": disabled_sql,
|
||||
"EMPTY.sql": empty_sql,
|
||||
"GET_AND_REF.sql": get_and_ref_sql,
|
||||
"INCREMENTAL.sql": incremental_sql,
|
||||
"INTERLEAVED_SORT.sql": interleaved_sort_sql,
|
||||
"MATERIALIZED.sql": materialized_sql,
|
||||
"SCHEMA.yml": schema_yml,
|
||||
"VIEW_MODEL.sql": view_model_sql,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def seeds(test_data_dir):
|
||||
# Read seed file and return
|
||||
path = os.path.join(test_data_dir, "seed-initial.csv")
|
||||
with open(path, "rb") as fp:
|
||||
seed_csv = fp.read()
|
||||
return {"seed.csv": seed_csv}
|
||||
return {}
|
||||
|
||||
|
||||
def test_simple_copy_uppercase(project):
|
||||
|
||||
# Load the seed file and check that it worked
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 1
|
||||
|
||||
# Run the project and ensure that all the models loaded
|
||||
results = run_dbt()
|
||||
assert len(results) == 7
|
||||
|
||||
table_comp = TableComparison(
|
||||
adapter=project.adapter, unique_schema=project.test_schema, database=project.database
|
||||
)
|
||||
table_comp.assert_many_tables_equal(
|
||||
["seed", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"]
|
||||
)
|
||||
28
tests/functional/basic/test_invalid_reference.py
Normal file
28
tests/functional/basic/test_invalid_reference.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt
|
||||
from dbt.exceptions import CompilationException
|
||||
|
||||
|
||||
descendant_sql = """
|
||||
-- should be ref('model')
|
||||
select * from {{ ref(model) }}
|
||||
"""
|
||||
|
||||
|
||||
model_sql = """
|
||||
select 1 as id
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {
|
||||
"descendant.sql": descendant_sql,
|
||||
"model.sql": model_sql,
|
||||
}
|
||||
|
||||
|
||||
def test_undefined_value(project):
|
||||
# Tests that a project with an invalid reference fails
|
||||
with pytest.raises(CompilationException):
|
||||
run_dbt(["compile"])
|
||||
15
tests/functional/basic/test_jaffle_shop.py
Normal file
15
tests/functional/basic/test_jaffle_shop.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from dbt.tests.util import run_dbt, get_manifest
|
||||
|
||||
|
||||
from tests.fixtures.jaffle_shop import models, seeds, project_config_update # noqa: F401
|
||||
|
||||
|
||||
def test_basic(project):
|
||||
# Create the data from seeds
|
||||
results = run_dbt(["seed"])
|
||||
|
||||
# Tests that the jaffle_shop project runs
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 5
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert "model.jaffle_shop.orders" in manifest.nodes
|
||||
48
tests/functional/basic/test_mixed_case_db.py
Normal file
48
tests/functional/basic/test_mixed_case_db.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt, get_manifest
|
||||
|
||||
|
||||
model_sql = """
|
||||
select 1 as id
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {"model.sql": model_sql}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dbt_profile_data(unique_schema, database_host):
|
||||
|
||||
return {
|
||||
"config": {"send_anonymous_usage_stats": False},
|
||||
"test": {
|
||||
"outputs": {
|
||||
"default": {
|
||||
"type": "postgres",
|
||||
"threads": 4,
|
||||
"host": database_host,
|
||||
"port": 5432,
|
||||
"user": "root",
|
||||
"pass": "password",
|
||||
"dbname": "dbtMixedCase",
|
||||
"schema": unique_schema,
|
||||
},
|
||||
},
|
||||
"target": "default",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_basic(project_root, project):
|
||||
|
||||
assert project.database == "dbtMixedCase"
|
||||
|
||||
# Tests that a project with a single model works
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 1
|
||||
manifest = get_manifest(project_root)
|
||||
assert "model.test.model" in manifest.nodes
|
||||
# Running a second time works
|
||||
results = run_dbt(["run"])
|
||||
217
tests/functional/basic/test_simple_copy.py
Normal file
217
tests/functional/basic/test_simple_copy.py
Normal file
@@ -0,0 +1,217 @@
|
||||
import pytest
|
||||
import os
|
||||
from dbt.tests.util import run_dbt, copy_file
|
||||
from dbt.tests.tables import TableComparison
|
||||
|
||||
# advanced_incremental.sql
|
||||
advanced_incremental_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental",
|
||||
unique_key = "id",
|
||||
persist_docs = {"relation": true}
|
||||
)
|
||||
}}
|
||||
|
||||
select *
|
||||
from {{ ref('seed') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
where id > (select max(id) from {{this}})
|
||||
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
# compound_sort.sql
|
||||
compound_sort_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = 'first_name',
|
||||
sort_type = 'compound'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
"""
|
||||
|
||||
# disabled.sql
|
||||
disabled_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "view",
|
||||
enabled = False
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
"""
|
||||
|
||||
# empty.sql
|
||||
empty_sql = """
|
||||
"""
|
||||
|
||||
|
||||
# get_and_ref.sql
|
||||
get_and_ref_sql = """
|
||||
{%- do adapter.get_relation(database=target.database, schema=target.schema, identifier='materialized') -%}
|
||||
|
||||
select * from {{ ref('materialized') }}
|
||||
"""
|
||||
|
||||
# incremental.sql
|
||||
incremental_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
|
||||
{% if is_incremental() %}
|
||||
where id > (select max(id) from {{this}})
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
# interleaved_sort.sql
|
||||
interleaved_sort_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = ['first_name', 'last_name'],
|
||||
sort_type = 'interleaved'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
"""
|
||||
|
||||
# materialized.sql
|
||||
materialized_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
-- ensure that dbt_utils' relation check will work
|
||||
{% set relation = ref('seed') %}
|
||||
{%- if not (relation is mapping and relation.get('metadata', {}).get('type', '').endswith('Relation')) -%}
|
||||
{%- do exceptions.raise_compiler_error("Macro " ~ macro ~ " expected a Relation but received the value: " ~ relation) -%}
|
||||
{%- endif -%}
|
||||
-- this is a unicode character: å
|
||||
select * from {{ relation }}
|
||||
"""
|
||||
|
||||
# schema.yml
|
||||
schema_yml = """
|
||||
version: 2
|
||||
models:
|
||||
- name: disabled
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
"""
|
||||
|
||||
# view_model.sql
|
||||
view_model_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ ref('seed') }}
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {
|
||||
"advanced_incremental.sql": advanced_incremental_sql,
|
||||
"compound_sort.sql": compound_sort_sql,
|
||||
"disabled.sql": disabled_sql,
|
||||
"empty.sql": empty_sql,
|
||||
"get_and_ref.sql": get_and_ref_sql,
|
||||
"incremental.sql": incremental_sql,
|
||||
"interleaved_sort.sql": interleaved_sort_sql,
|
||||
"materialized.sql": materialized_sql,
|
||||
"schema.yml": schema_yml,
|
||||
"view_model.sql": view_model_sql,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def seeds(test_data_dir):
|
||||
# Read seed file and return
|
||||
path = os.path.join(test_data_dir, "seed-initial.csv")
|
||||
with open(path, "rb") as fp:
|
||||
seed_csv = fp.read()
|
||||
return {"seed.csv": seed_csv}
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_config_update():
|
||||
return {"seeds": {"quote_columns": False}}
|
||||
|
||||
|
||||
def test_simple_copy(project, test_data_dir):
|
||||
|
||||
# Load the seed file and check that it worked
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 1
|
||||
|
||||
# Run the project and ensure that all the models loaded
|
||||
results = run_dbt()
|
||||
assert len(results) == 7
|
||||
table_comp = TableComparison(
|
||||
adapter=project.adapter, unique_schema=project.test_schema, database=project.database
|
||||
)
|
||||
table_comp.assert_many_tables_equal(
|
||||
["seed", "view_model", "incremental", "materialized", "get_and_ref"]
|
||||
)
|
||||
|
||||
# Change the seed.csv file and see if everything is the same, i.e. everything has been updated
|
||||
copy_file(test_data_dir, "seed-update.csv", project.project_root, ["seeds", "seed.csv"])
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 1
|
||||
results = run_dbt()
|
||||
assert len(results) == 7
|
||||
table_comp.assert_many_tables_equal(
|
||||
["seed", "view_model", "incremental", "materialized", "get_and_ref"]
|
||||
)
|
||||
|
||||
|
||||
def test_simple_copy_with_materialized_views(project):
|
||||
project.run_sql(f"create table {project.test_schema}.unrelated_table (id int)")
|
||||
sql = f"""
|
||||
create materialized view {project.test_schema}.unrelated_materialized_view as (
|
||||
select * from {project.test_schema}.unrelated_table
|
||||
)
|
||||
"""
|
||||
project.run_sql(sql)
|
||||
sql = f"""
|
||||
create view {project.test_schema}.unrelated_view as (
|
||||
select * from {project.test_schema}.unrelated_materialized_view
|
||||
)
|
||||
"""
|
||||
project.run_sql(sql)
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 1
|
||||
results = run_dbt()
|
||||
assert len(results) == 7
|
||||
|
||||
|
||||
def test_dbt_doesnt_run_empty_models(project):
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 1
|
||||
results = run_dbt()
|
||||
assert len(results) == 7
|
||||
|
||||
tables = project.get_tables_in_schema()
|
||||
|
||||
assert "empty" not in tables.keys()
|
||||
assert "disabled" not in tables.keys()
|
||||
274
tests/functional/basic/test_simple_reference.py
Normal file
274
tests/functional/basic/test_simple_reference.py
Normal file
@@ -0,0 +1,274 @@
|
||||
import pytest
|
||||
import os
|
||||
from dbt.tests.util import run_dbt, copy_file
|
||||
from dbt.tests.tables import TableComparison
|
||||
|
||||
|
||||
ephemeral_copy_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "ephemeral"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.users
|
||||
"""
|
||||
|
||||
ephemeral_summary_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('ephemeral_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
"""
|
||||
|
||||
incremental_copy_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "incremental"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.users
|
||||
|
||||
{% if is_incremental() %}
|
||||
|
||||
where id > (select max(id) from {{this}})
|
||||
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
incremental_summary_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('incremental_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
"""
|
||||
|
||||
materialized_copy_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.users
|
||||
"""
|
||||
|
||||
materialized_summary_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "table"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('materialized_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
"""
|
||||
|
||||
view_copy_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.users
|
||||
"""
|
||||
|
||||
view_summary_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ref('view_copy')}}
|
||||
group by gender
|
||||
order by gender asc
|
||||
"""
|
||||
|
||||
view_using_ref_sql = """
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select gender, count(*) as ct from {{ var('var_ref') }}
|
||||
group by gender
|
||||
order by gender asc
|
||||
"""
|
||||
|
||||
properties_yml = """
|
||||
version: 2
|
||||
seeds:
|
||||
- name: summary_expected
|
||||
config:
|
||||
column_types:
|
||||
ct: BIGINT
|
||||
gender: text
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def models():
|
||||
return {
|
||||
"ephemeral_copy.sql": ephemeral_copy_sql,
|
||||
"ephemeral_summary.sql": ephemeral_summary_sql,
|
||||
"incremental_copy.sql": incremental_copy_sql,
|
||||
"incremental_summary.sql": incremental_summary_sql,
|
||||
"materialized_copy.sql": materialized_copy_sql,
|
||||
"materialized_summary.sql": materialized_summary_sql,
|
||||
"view_copy.sql": view_copy_sql,
|
||||
"view_summary.sql": view_summary_sql,
|
||||
"view_using_ref.sql": view_using_ref_sql,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def seeds(test_data_dir):
|
||||
# Read seed file and return
|
||||
seeds = {"properties.yml": properties_yml}
|
||||
path = os.path.join(test_data_dir, "seed-initial.csv")
|
||||
with open(path, "rb") as fp:
|
||||
seed_csv = fp.read()
|
||||
seeds["users.csv"] = seed_csv
|
||||
path = os.path.join(test_data_dir, "summary_expected.csv")
|
||||
with open(path, "rb") as fp:
|
||||
summary_csv = fp.read()
|
||||
seeds["summary_expected.csv"] = summary_csv
|
||||
return seeds
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_config_update():
|
||||
return {
|
||||
"vars": {
|
||||
"test": {
|
||||
"var_ref": '{{ ref("view_copy") }}',
|
||||
},
|
||||
},
|
||||
"seeds": {"quote_columns": False},
|
||||
}
|
||||
|
||||
|
||||
# This test checks that with different materializations we get the right
|
||||
# tables copied or built.
|
||||
def test_simple_reference(project):
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 2
|
||||
|
||||
# Now run dbt
|
||||
results = run_dbt()
|
||||
assert len(results) == 8
|
||||
|
||||
table_comp = TableComparison(
|
||||
adapter=project.adapter, unique_schema=project.test_schema, database=project.database
|
||||
)
|
||||
|
||||
# Copies should match
|
||||
table_comp.assert_tables_equal("users", "incremental_copy")
|
||||
table_comp.assert_tables_equal("users", "materialized_copy")
|
||||
table_comp.assert_tables_equal("users", "view_copy")
|
||||
|
||||
# Summaries should match
|
||||
table_comp.assert_tables_equal("summary_expected", "incremental_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "materialized_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "view_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "ephemeral_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "view_using_ref")
|
||||
|
||||
# update the seed files and run seed
|
||||
copy_file(
|
||||
project.test_data_dir, "seed-update.csv", project.project_root, ["seeds", "users.csv"]
|
||||
)
|
||||
copy_file(
|
||||
project.test_data_dir,
|
||||
"summary_expected_update.csv",
|
||||
project.project_root,
|
||||
["seeds", "summary_expected.csv"],
|
||||
)
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 2
|
||||
|
||||
results = run_dbt()
|
||||
assert len(results) == 8
|
||||
|
||||
# Copies should match
|
||||
table_comp.assert_tables_equal("users", "incremental_copy")
|
||||
table_comp.assert_tables_equal("users", "materialized_copy")
|
||||
table_comp.assert_tables_equal("users", "view_copy")
|
||||
|
||||
# Summaries should match
|
||||
table_comp.assert_tables_equal("summary_expected", "incremental_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "materialized_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "view_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "ephemeral_summary")
|
||||
|
||||
|
||||
def test_simple_reference_with_models_and_children(project):
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 2
|
||||
|
||||
# Run materialized_copy, ephemeral_copy, and their dependents
|
||||
results = run_dbt(["run", "--models", "materialized_copy+", "ephemeral_copy+"])
|
||||
assert len(results) == 3
|
||||
|
||||
table_comp = TableComparison(
|
||||
adapter=project.adapter, unique_schema=project.test_schema, database=project.database
|
||||
)
|
||||
|
||||
# Copies should match
|
||||
table_comp.assert_tables_equal("users", "materialized_copy")
|
||||
|
||||
# Summaries should match
|
||||
table_comp.assert_tables_equal("summary_expected", "materialized_summary")
|
||||
table_comp.assert_tables_equal("summary_expected", "ephemeral_summary")
|
||||
|
||||
created_tables = project.get_tables_in_schema()
|
||||
|
||||
assert "incremental_copy" not in created_tables
|
||||
assert "incremental_summary" not in created_tables
|
||||
assert "view_copy" not in created_tables
|
||||
assert "view_summary" not in created_tables
|
||||
|
||||
# make sure this wasn't errantly materialized
|
||||
assert "ephemeral_copy" not in created_tables
|
||||
|
||||
assert "materialized_copy" in created_tables
|
||||
assert "materialized_summary" in created_tables
|
||||
assert created_tables["materialized_copy"] == "table"
|
||||
assert created_tables["materialized_summary"] == "table"
|
||||
|
||||
assert "ephemeral_summary" in created_tables
|
||||
assert created_tables["ephemeral_summary"] == "table"
|
||||
|
||||
|
||||
def test_simple_ref_with_models(project):
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 2
|
||||
|
||||
# Run materialized_copy, ephemeral_copy, and their dependents
|
||||
# ephemeral_copy should not actually be materialized b/c it is ephemeral
|
||||
results = run_dbt(["run", "--models", "materialized_copy", "ephemeral_copy"])
|
||||
assert len(results) == 1
|
||||
|
||||
# Copies should match
|
||||
table_comp = TableComparison(
|
||||
adapter=project.adapter, unique_schema=project.test_schema, database=project.database
|
||||
)
|
||||
table_comp.assert_tables_equal("users", "materialized_copy")
|
||||
|
||||
created_tables = project.get_tables_in_schema()
|
||||
assert "materialized_copy" in created_tables
|
||||
19
tests/functional/simple_seed/test_seed.py
Normal file
19
tests/functional/simple_seed/test_seed.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
# from `test/integration/test_simple_seed`, test_postgres_simple_seed
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_config_update():
|
||||
return {"seeds": {"quote_columns": False}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def seeds():
|
||||
return {"data.csv": "a,b\n1,hello\n2,goodbye"}
|
||||
|
||||
|
||||
def test_simple_seed(project):
|
||||
results = run_dbt(["seed"])
|
||||
assert len(results) == 1
|
||||
1
tests/unit/__init__.py
Normal file
1
tests/unit/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Unit testing directory
|
||||
5
tox.ini
5
tox.ini
@@ -16,7 +16,9 @@ description = adapter plugin integration testing
|
||||
skip_install = true
|
||||
passenv = DBT_* POSTGRES_TEST_* PYTEST_ADDOPTS
|
||||
commands =
|
||||
postgres: {envpython} -m pytest -m profile_postgres {posargs:test/integration}
|
||||
postgres: {envpython} -m pytest -m profile_postgres {posargs} test/integration
|
||||
postgres: {envpython} -m pytest {posargs} tests/functional
|
||||
|
||||
deps =
|
||||
-rdev-requirements.txt
|
||||
-e./core
|
||||
@@ -28,3 +30,4 @@ env_files =
|
||||
testpaths =
|
||||
test/unit
|
||||
test/integration
|
||||
tests/functional
|
||||
|
||||
Reference in New Issue
Block a user