forked from repo-mirrors/dbt-core
Compare commits
5 Commits
jerco/pyth
...
CT-866/mig
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
233bd915ab | ||
|
|
eddab74cdb | ||
|
|
0dde2032c1 | ||
|
|
6caf04d471 | ||
|
|
bbde52c824 |
@@ -1,6 +1,7 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
import os
|
||||
|
||||
# wwwhwhhhwywyw
|
||||
|
||||
class TestPrePostRunHooks(DBTIntegrationTest):
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
name: dependency
|
||||
version: '1.0.0'
|
||||
config-version: 2
|
||||
@@ -1,2 +0,0 @@
|
||||
{% macro some_macro() %}
|
||||
{% endmacro %}
|
||||
@@ -1,10 +0,0 @@
|
||||
{{
|
||||
config(
|
||||
materialized='table'
|
||||
)
|
||||
}}
|
||||
|
||||
{# we don't care what, just do anything that will fail without "dbt deps" #}
|
||||
{% do dependency.some_macro() %}
|
||||
|
||||
select 1 as id
|
||||
@@ -1,9 +0,0 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: model
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- accepted_values:
|
||||
values:
|
||||
- 1
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
|
||||
create table {schema}.seed ( id int );
|
||||
insert into {schema}.seed (id) values (1);
|
||||
@@ -1,3 +0,0 @@
|
||||
|
||||
create table {schema}.seed ( id int );
|
||||
insert into {schema}.seed (id) values (1);
|
||||
@@ -1,304 +0,0 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
import contextlib
|
||||
import os
|
||||
import shutil
|
||||
import pytest
|
||||
import tempfile
|
||||
import yaml
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def change_working_directory(directory: str) -> str:
|
||||
"""
|
||||
Context manager for changing the working directory.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
directory: str
|
||||
The directory to which the working directory should be changed.
|
||||
|
||||
Yields
|
||||
------
|
||||
out: str
|
||||
The new working directory.
|
||||
"""
|
||||
current_working_directory = os.getcwd()
|
||||
os.chdir(directory)
|
||||
try:
|
||||
yield directory
|
||||
finally:
|
||||
os.chdir(current_working_directory)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def temporary_working_directory() -> str:
|
||||
"""
|
||||
Create a temporary working directory.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out: str
|
||||
The temporary working directory.
|
||||
"""
|
||||
# N.B: supressing the OSError is necessary for older (pre 3.10) versions of python
|
||||
# which do not support the `ignore_cleanup_errors` in tempfile::TemporaryDirectory.
|
||||
# See: https://github.com/python/cpython/pull/24793
|
||||
#
|
||||
# In our case the cleanup is redundent since windows handles clearing
|
||||
# Appdata/Local/Temp at the os level anyway.
|
||||
|
||||
with contextlib.suppress(OSError):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with change_working_directory(tmpdir):
|
||||
yield tmpdir
|
||||
|
||||
|
||||
def get_custom_profiles_config(database_host, custom_schema):
|
||||
return {
|
||||
"config": {
|
||||
"send_anonymous_usage_stats": False
|
||||
},
|
||||
"test": {
|
||||
"outputs": {
|
||||
"default": {
|
||||
"type": "postgres",
|
||||
"threads": 1,
|
||||
"host": database_host,
|
||||
"port": 5432,
|
||||
"user": "root",
|
||||
"pass": "password",
|
||||
"dbname": "dbt",
|
||||
"schema": custom_schema
|
||||
},
|
||||
},
|
||||
"target": "default",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def create_directory_with_custom_profiles(
|
||||
directory: str,
|
||||
profiles: Dict
|
||||
) -> None:
|
||||
"""
|
||||
Create a directory with profiles.yml.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
directory: str
|
||||
The directory in which a profiles file is created.
|
||||
profiles: Dict
|
||||
The profiles to put into the profiles.yml
|
||||
"""
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
with open(f"{directory}/profiles.yml", "w") as f:
|
||||
yaml.safe_dump(profiles, f, default_flow_style=True)
|
||||
|
||||
|
||||
class ModelCopyingIntegrationTest(DBTIntegrationTest):
|
||||
|
||||
def _symlink_test_folders(self):
|
||||
# dbt's normal symlink behavior breaks this test, so special-case it
|
||||
for entry in os.listdir(self.test_original_source_path):
|
||||
src = os.path.join(self.test_original_source_path, entry)
|
||||
tst = os.path.join(self.test_root_dir, entry)
|
||||
if entry == 'models':
|
||||
shutil.copytree(src, tst)
|
||||
elif entry == 'local_dependency':
|
||||
continue
|
||||
elif os.path.isdir(entry) or entry.endswith('.sql'):
|
||||
os.symlink(src, tst)
|
||||
|
||||
@property
|
||||
def packages_config(self):
|
||||
path = os.path.join(self.test_original_source_path, 'local_dependency')
|
||||
return {
|
||||
'packages': [{
|
||||
'local': path,
|
||||
}],
|
||||
}
|
||||
|
||||
|
||||
class TestCLIInvocation(ModelCopyingIntegrationTest):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.run_sql_file("seed.sql")
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_cli_invocation_015"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_toplevel_dbt_run(self):
|
||||
self.run_dbt(['deps'])
|
||||
results = self.run_dbt(['run'])
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertTablesEqual("seed", "model")
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_subdir_dbt_run(self):
|
||||
os.chdir(os.path.join(self.models, "subdir1"))
|
||||
self.run_dbt(['deps'])
|
||||
|
||||
results = self.run_dbt(['run'])
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertTablesEqual("seed", "model")
|
||||
|
||||
|
||||
class TestCLIInvocationWithProfilesDir(ModelCopyingIntegrationTest):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.run_sql(f"DROP SCHEMA IF EXISTS {self.custom_schema} CASCADE;")
|
||||
self.run_sql(f"CREATE SCHEMA {self.custom_schema};")
|
||||
|
||||
profiles = get_custom_profiles_config(
|
||||
self.database_host, self.custom_schema)
|
||||
create_directory_with_custom_profiles(
|
||||
"./dbt-profile", profiles)
|
||||
|
||||
self.run_sql_file("seed_custom.sql")
|
||||
|
||||
def tearDown(self):
|
||||
self.run_sql(f"DROP SCHEMA IF EXISTS {self.custom_schema} CASCADE;")
|
||||
super().tearDown()
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_cli_invocation_015"
|
||||
|
||||
@property
|
||||
def custom_schema(self):
|
||||
return "{}_custom".format(self.unique_schema())
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_toplevel_dbt_run_with_profile_dir_arg(self):
|
||||
self.run_dbt(['deps'])
|
||||
results = self.run_dbt(['run', '--profiles-dir', 'dbt-profile'], profiles_dir=False)
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
actual = self.run_sql("select id from {}.model".format(self.custom_schema), fetch='one')
|
||||
|
||||
expected = (1, )
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
res = self.run_dbt(['test', '--profiles-dir', 'dbt-profile'], profiles_dir=False)
|
||||
|
||||
# make sure the test runs against `custom_schema`
|
||||
for test_result in res:
|
||||
self.assertTrue(self.custom_schema, test_result.node.compiled_code)
|
||||
|
||||
|
||||
class TestCLIInvocationWithProjectDir(ModelCopyingIntegrationTest):
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_cli_invocation_015"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_dbt_commands_with_cwd_as_project_dir(self):
|
||||
self._run_simple_dbt_commands(os.getcwd())
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_dbt_commands_with_randomdir_as_project_dir(self):
|
||||
workdir = self.test_root_dir
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.chdir(tmpdir)
|
||||
self._run_simple_dbt_commands(workdir)
|
||||
os.chdir(workdir)
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_dbt_commands_with_relative_dir_as_project_dir(self):
|
||||
workdir = self.test_root_dir
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
os.chdir(tmpdir)
|
||||
self._run_simple_dbt_commands(os.path.relpath(workdir, tmpdir))
|
||||
os.chdir(workdir)
|
||||
|
||||
def _run_simple_dbt_commands(self, project_dir):
|
||||
self.run_dbt(['deps', '--project-dir', project_dir])
|
||||
self.run_dbt(['seed', '--project-dir', project_dir])
|
||||
self.run_dbt(['run', '--project-dir', project_dir])
|
||||
self.run_dbt(['test', '--project-dir', project_dir])
|
||||
self.run_dbt(['parse', '--project-dir', project_dir])
|
||||
self.run_dbt(['clean', '--project-dir', project_dir])
|
||||
# In case of 'dbt clean' also test that the clean-targets directories were deleted.
|
||||
for target in self.config.clean_targets:
|
||||
assert not os.path.isdir(target)
|
||||
|
||||
|
||||
class TestCLIInvocationWithProfilesAndProjectDir(ModelCopyingIntegrationTest):
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_cli_invocation_015"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def custom_schema(self):
|
||||
return "{}_custom".format(self.unique_schema())
|
||||
|
||||
def _test_postgres_sub_command_with_profiles_separate_from_project_dir(
|
||||
self,
|
||||
dbt_sub_command: str
|
||||
):
|
||||
"""
|
||||
Test if a sub command runs well when a profiles dir is separate from a
|
||||
project dir.
|
||||
|
||||
"""
|
||||
profiles_dir = "./tmp-profile"
|
||||
workdir = os.getcwd()
|
||||
with temporary_working_directory() as tmpdir:
|
||||
|
||||
profiles = get_custom_profiles_config(
|
||||
self.database_host, self.custom_schema)
|
||||
create_directory_with_custom_profiles(profiles_dir, profiles)
|
||||
|
||||
project_dir = os.path.relpath(workdir, os.getcwd())
|
||||
if os.path.exists(f"{project_dir}/profiles.yml"):
|
||||
os.remove(f"{project_dir}/profiles.yml")
|
||||
|
||||
other_args = [
|
||||
dbt_sub_command, "--profiles-dir", profiles_dir, "--project-dir", project_dir
|
||||
]
|
||||
self.run_dbt(other_args, profiles_dir=False)
|
||||
|
||||
@use_profile("postgres")
|
||||
def test_postgres_deps_with_profiles_separate_from_project_dir(self):
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("deps")
|
||||
|
||||
@use_profile("postgres")
|
||||
def test_postgres_run_with_profiles_separate_from_project_dir(self):
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("deps")
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("run")
|
||||
|
||||
@use_profile("postgres")
|
||||
def test_postgres_test_with_profiles_separate_from_project_dir(self):
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("deps")
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("run")
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("test")
|
||||
|
||||
@use_profile("postgres")
|
||||
def test_postgres_debug_with_profiles_separate_from_project_dir(self):
|
||||
self._test_postgres_sub_command_with_profiles_separate_from_project_dir("debug")
|
||||
@@ -1,4 +0,0 @@
|
||||
{% if some_macro('foo', 'bar') != 'foobar' %}
|
||||
{% do exceptions.raise_compiler_error('invalid foobar') %}
|
||||
{% endif %}
|
||||
select 1 as id
|
||||
@@ -1,3 +0,0 @@
|
||||
{% macro some_macro(arg1, arg2) -%}
|
||||
{{ adapter_macro('some_macro', arg1, arg2) }}
|
||||
{%- endmacro %}
|
||||
@@ -1,2 +0,0 @@
|
||||
{{ dispatch_to_nowhere() }}
|
||||
select 1 as id
|
||||
@@ -1,30 +0,0 @@
|
||||
|
||||
{% macro do_something2(foo2, bar2) %}
|
||||
|
||||
select
|
||||
'{{ foo2 }}' as foo2,
|
||||
'{{ bar2 }}' as bar2
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro with_ref() %}
|
||||
|
||||
{{ ref('table_model') }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro dispatch_to_parent() %}
|
||||
{% set macro = adapter.dispatch('dispatch_to_parent') %}
|
||||
{{ macro() }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__dispatch_to_parent() %}
|
||||
{% set msg = 'No default implementation of dispatch_to_parent' %}
|
||||
{{ exceptions.raise_compiler_error(msg) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro postgres__dispatch_to_parent() %}
|
||||
{{ return('') }}
|
||||
{% endmacro %}
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
{{
|
||||
dbt_integration_project.do_something("arg1", "arg2")
|
||||
}}
|
||||
@@ -1,12 +0,0 @@
|
||||
|
||||
{{
|
||||
do_something2("arg1", "arg2")
|
||||
}}
|
||||
|
||||
union all
|
||||
|
||||
{{
|
||||
test.do_something2("arg3", "arg4")
|
||||
}}
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
select * from {{ with_ref() }}
|
||||
@@ -1,35 +0,0 @@
|
||||
|
||||
{% macro do_something2(foo2, bar2) %}
|
||||
|
||||
select
|
||||
'{{ foo2 }}' as foo2,
|
||||
'{{ bar2 }}' as bar2
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro with_ref() %}
|
||||
|
||||
{{ ref('table_model') }}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{# there is no default__dispatch_to_nowhere! #}
|
||||
{% macro dispatch_to_nowhere() %}
|
||||
{% set macro = adapter.dispatch('dispatch_to_nowhere') %}
|
||||
{{ macro() }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro dispatch_to_parent() %}
|
||||
{% set macro = adapter.dispatch('dispatch_to_parent') %}
|
||||
{{ macro() }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__dispatch_to_parent() %}
|
||||
{% set msg = 'No default implementation of dispatch_to_parent' %}
|
||||
{{ exceptions.raise_compiler_error(msg) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro postgres__dispatch_to_parent() %}
|
||||
{{ return('') }}
|
||||
{% endmacro %}
|
||||
@@ -1,3 +0,0 @@
|
||||
{% macro get_columns_in_relation(relation) %}
|
||||
{{ return('a string') }}
|
||||
{% endmacro %}
|
||||
@@ -1,5 +0,0 @@
|
||||
{% set result = adapter.get_columns_in_relation(this) %}
|
||||
{% if execute and result != 'a string' %}
|
||||
{% do exceptions.raise_compiler_error('overriding get_columns_in_relation failed') %}
|
||||
{% endif %}
|
||||
select 1 as id
|
||||
@@ -1,3 +0,0 @@
|
||||
{% macro postgres__get_columns_in_relation(relation) %}
|
||||
{{ return('a string') }}
|
||||
{% endmacro %}
|
||||
@@ -1,7 +0,0 @@
|
||||
name: 'package_macro_overrides'
|
||||
version: '1.0'
|
||||
config-version: 2
|
||||
|
||||
profile: 'default'
|
||||
|
||||
macro-paths: ["macros"]
|
||||
@@ -1,3 +0,0 @@
|
||||
{% macro get_columns_in_relation(relation) %}
|
||||
{{ return('a string') }}
|
||||
{% endmacro %}
|
||||
@@ -1,24 +0,0 @@
|
||||
create table {schema}.expected_dep_macro (
|
||||
foo TEXT,
|
||||
bar TEXT
|
||||
);
|
||||
|
||||
create table {schema}.expected_local_macro (
|
||||
foo2 TEXT,
|
||||
bar2 TEXT
|
||||
);
|
||||
|
||||
create table {schema}.seed (
|
||||
id integer,
|
||||
updated_at timestamp
|
||||
);
|
||||
|
||||
insert into {schema}.expected_dep_macro (foo, bar)
|
||||
values ('arg1', 'arg2');
|
||||
|
||||
insert into {schema}.expected_local_macro (foo2, bar2)
|
||||
values ('arg1', 'arg2'), ('arg3', 'arg4');
|
||||
|
||||
insert into {schema}.seed (id, updated_at)
|
||||
values (1, '2017-01-01'), (2, '2017-01-02');
|
||||
|
||||
@@ -1,234 +0,0 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
import dbt.exceptions
|
||||
import pytest
|
||||
|
||||
|
||||
class TestMacros(DBTIntegrationTest):
|
||||
|
||||
def setUp(self):
|
||||
DBTIntegrationTest.setUp(self)
|
||||
self.run_sql_file("seed.sql")
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def packages_config(self):
|
||||
return {
|
||||
'packages': [
|
||||
{
|
||||
'git': 'https://github.com/dbt-labs/dbt-integration-project',
|
||||
'revision': 'dbt/1.0.0',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'vars': {
|
||||
'test': {
|
||||
'test': 'DUMMY',
|
||||
},
|
||||
},
|
||||
"macro-paths": ["macros"],
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_working_macros(self):
|
||||
self.run_dbt(["deps"])
|
||||
results = self.run_dbt(["run"])
|
||||
self.assertEqual(len(results), 6)
|
||||
|
||||
self.assertTablesEqual("expected_dep_macro", "dep_macro")
|
||||
self.assertTablesEqual("expected_local_macro", "local_macro")
|
||||
|
||||
|
||||
class TestInvalidMacros(DBTIntegrationTest):
|
||||
|
||||
def setUp(self):
|
||||
DBTIntegrationTest.setUp(self)
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_invalid_macro(self):
|
||||
with pytest.raises(RuntimeError):
|
||||
self.run_dbt(["run"])
|
||||
|
||||
|
||||
class TestAdapterMacroNoDestination(DBTIntegrationTest):
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "fail-missing-macro-models"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
"macro-paths": ["no-default-macros"]
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_invalid_macro(self):
|
||||
with pytest.raises(dbt.exceptions.CompilationException) as exc:
|
||||
self.run_dbt(['run'])
|
||||
|
||||
assert "In dispatch: No macro named 'dispatch_to_nowhere' found" in str(exc.value)
|
||||
|
||||
|
||||
class TestMacroOverrideBuiltin(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return 'override-get-columns-models'
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'macro-paths': ['override-get-columns-macros'],
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_overrides(self):
|
||||
# the first time, the model doesn't exist
|
||||
self.run_dbt()
|
||||
self.run_dbt()
|
||||
|
||||
|
||||
class TestMacroOverridePackage(DBTIntegrationTest):
|
||||
"""
|
||||
The macro in `override-postgres-get-columns-macros` should override the
|
||||
`get_columns_in_relation` macro by default.
|
||||
"""
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return 'override-get-columns-models'
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'macro-paths': ['override-postgres-get-columns-macros'],
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_overrides(self):
|
||||
# the first time, the model doesn't exist
|
||||
self.run_dbt()
|
||||
self.run_dbt()
|
||||
|
||||
|
||||
class TestMacroNotOverridePackage(DBTIntegrationTest):
|
||||
"""
|
||||
The macro in `override-postgres-get-columns-macros` does NOT override the
|
||||
`get_columns_in_relation` macro because we tell dispatch to not look at the
|
||||
postgres macros.
|
||||
"""
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return 'override-get-columns-models'
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'macro-paths': ['override-postgres-get-columns-macros'],
|
||||
'dispatch': [{'macro_namespace': 'dbt', 'search_order': ['dbt']}],
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_overrides(self):
|
||||
# the first time, the model doesn't exist
|
||||
self.run_dbt(expect_pass=False)
|
||||
self.run_dbt(expect_pass=False)
|
||||
|
||||
|
||||
class TestDispatchMacroOverrideBuiltin(TestMacroOverrideBuiltin):
|
||||
# test the same functionality as above, but this time,
|
||||
# dbt.get_columns_in_relation will dispatch to a default__ macro
|
||||
# from an installed package, per dispatch config search_order
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
"config-version": 2,
|
||||
"dispatch": [
|
||||
{
|
||||
"macro_namespace": "dbt",
|
||||
"search_order": ["test", "package_macro_overrides", "dbt"],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
@property
|
||||
def packages_config(self):
|
||||
return {
|
||||
'packages': [
|
||||
{
|
||||
"local": "./package_macro_overrides",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_overrides(self):
|
||||
self.run_dbt(["deps"])
|
||||
super().test_postgres_overrides()
|
||||
|
||||
|
||||
class TestAdapterMacroDeprecated(DBTIntegrationTest):
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "test_macros_016"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "deprecated-adapter-macro-models"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
"macro-paths": ["deprecated-adapter-macro"]
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_invalid_macro(self):
|
||||
with pytest.raises(dbt.exceptions.CompilationException) as exc:
|
||||
self.run_dbt(['run'])
|
||||
|
||||
assert 'The "adapter_macro" macro has been deprecated' in str(exc.value)
|
||||
Reference in New Issue
Block a user