delete duplicate adapters tests (#12275)

This commit is contained in:
Michelle Ark
2025-12-12 16:53:04 -05:00
committed by GitHub
parent 8097a34726
commit c4dc80dcd2
169 changed files with 169 additions and 16824 deletions

169
.github/dbt-postgres-testing.yml vendored Normal file
View File

@@ -0,0 +1,169 @@
# **what?**
# Runs all tests in dbt-postgres with this branch of dbt-core to ensure nothing is broken
# **why?**
# Ensure dbt-core changes do not break dbt-postgres, as a basic proxy for other adapters
# **when?**
# This will run when trying to merge a PR into main.
# It can also be manually triggered.
# This workflow can be skipped by adding the "Skip Postgres Testing" label to the PR. This is
# useful when making a change in both `dbt-postgres` and `dbt-core` where the changes are dependant
# and cause the other repository to break.
name: "dbt-postgres Tests"
run-name: >-
${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call')
&& format('dbt-postgres@{0} with dbt-core@{1}', inputs.dbt-postgres-ref, inputs.dbt-core-ref)
|| 'dbt-postgres@main with dbt-core branch' }}
on:
push:
branches:
- "main"
- "*.latest"
- "releases/*"
pull_request:
merge_group:
types: [checks_requested]
workflow_dispatch:
inputs:
dbt-postgres-ref:
description: "The branch of dbt-postgres to test against"
default: "main"
dbt-core-ref:
description: "The branch of dbt-core to test against"
default: "main"
workflow_call:
inputs:
dbt-postgres-ref:
description: "The branch of dbt-postgres to test against"
type: string
required: true
default: "main"
dbt-core-ref:
description: "The branch of dbt-core to test against"
type: string
required: true
default: "main"
permissions: read-all
# will cancel previous workflows triggered by the same event
# and for the same ref for PRs/merges or same SHA otherwise
# and for the same inputs on workflow_dispatch or workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(fromJson('["pull_request", "merge_group"]'), github.event_name) && github.event.pull_request.head.ref || github.sha }}-${{ contains(fromJson('["workflow_call", "workflow_dispatch"]'), github.event_name) && github.event.inputs.dbt-postgres-ref && github.event.inputs.dbt-core-ref || github.sha }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
job-prep:
# This allow us to run the workflow on pull_requests as well so we can always run unit tests
# and only run integration tests on merge for time purposes
name: Setup Repo Refs
runs-on: ubuntu-latest
outputs:
dbt-postgres-ref: ${{ steps.core-ref.outputs.ref }}
dbt-core-ref: ${{ steps.common-ref.outputs.ref }}
steps:
- name: "Input Refs"
id: job-inputs
run: |
echo "inputs.dbt-postgres-ref=${{ inputs.dbt-postgres-ref }}"
echo "inputs.dbt-core-ref=${{ inputs.dbt-core-ref }}"
- name: "Determine dbt-postgres ref"
id: core-ref
run: |
if [[ -z "${{ inputs.dbt-postgres-ref }}" ]]; then
REF="main"
else
REF=${{ inputs.dbt-postgres-ref }}
fi
echo "ref=$REF" >> $GITHUB_OUTPUT
- name: "Determine dbt-core ref"
id: common-ref
run: |
if [[ -z "${{ inputs.dbt-core-ref }}" ]]; then
# these will be commits instead of branches
if [[ "${{ github.event_name }}" == "merge_group" ]]; then
REF=${{ github.event.merge_group.head_sha }}
else
REF=${{ github.event.pull_request.base.sha }}
fi
else
REF=${{ inputs.dbt-core-ref }}
fi
echo "ref=$REF" >> $GITHUB_OUTPUT
- name: "Final Refs"
run: |
echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}"
echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}"
# integration-tests-postgres:
# name: "dbt-postgres integration tests"
# needs: [job-prep]
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: "./dbt-postgres"
# environment:
# name: "dbt-postgres"
# env:
# POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }}
# POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }}
# POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }}
# POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
# POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }}
# POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }}
# services:
# postgres:
# image: postgres
# env:
# POSTGRES_PASSWORD: postgres
# options: >-
# --health-cmd pg_isready
# --health-interval 10s
# --health-timeout 5s
# --health-retries 5
# ports:
# - ${{ vars.POSTGRES_TEST_PORT }}:5432
# steps:
# - name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}"
# uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
# with:
# repository: dbt-labs/dbt-adapters
# ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }}
# - name: "Set up Python"
# uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
# with:
# python-version: ${{ inputs.python-version }}
# - name: "Set environment variables"
# run: |
# echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV
# echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV
# - name: "Setup test database"
# run: psql -f ./scripts/setup_test_database.sql
# env:
# PGHOST: ${{ vars.POSTGRES_TEST_HOST }}
# PGPORT: ${{ vars.POSTGRES_TEST_PORT }}
# PGUSER: postgres
# PGPASSWORD: postgres
# PGDATABASE: postgres
# - name: "Install hatch"
# uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install
# - name: "Run integration tests"
# run: hatch run ${{ inputs.hatch-env }}:integration-tests

View File

@@ -1,210 +0,0 @@
# macros #
MACROS__CAST_SQL = """
{% macro string_literal(s) -%}
{{ adapter.dispatch('string_literal', macro_namespace='test')(s) }}
{%- endmacro %}
{% macro default__string_literal(s) %}
'{{ s }}'::text
{% endmacro %}
"""
MACROS__EXPECT_VALUE_SQL = """
-- cross-db compatible test, similar to accepted_values
{% test expect_value(model, field, value) %}
select *
from {{ model }}
where {{ field }} != '{{ value }}'
{% endtest %}
"""
# base aliases #
MODELS__SCHEMA_YML = """
version: 2
models:
- name: foo_alias
data_tests:
- expect_value:
field: tablename
value: foo
- name: ref_foo_alias
data_tests:
- expect_value:
field: tablename
value: ref_foo_alias
- name: alias_in_project
data_tests:
- expect_value:
field: tablename
value: project_alias
- name: alias_in_project_with_override
data_tests:
- expect_value:
field: tablename
value: override_alias
"""
MODELS__FOO_ALIAS_SQL = """
{{
config(
alias='foo',
materialized='table'
)
}}
select {{ string_literal(this.name) }} as tablename
"""
MODELS__ALIAS_IN_PROJECT_SQL = """
select {{ string_literal(this.name) }} as tablename
"""
MODELS__ALIAS_IN_PROJECT_WITH_OVERRIDE_SQL = """
{{ config(alias='override_alias') }}
select {{ string_literal(this.name) }} as tablename
"""
MODELS__REF_FOO_ALIAS_SQL = """
{{
config(
materialized='table'
)
}}
with trigger_ref as (
-- we should still be able to ref a model by its filepath
select * from {{ ref('foo_alias') }}
)
-- this name should still be the filename
select {{ string_literal(this.name) }} as tablename
"""
# error #
MODELS_DUPE__MODEL_A_SQL = """
{{ config(alias='duped_alias') }}
select 1 as id
"""
MODELS_DUPE__MODEL_B_SQL = """
{{ config(alias='duped_alias') }}
select 1 as id
"""
MODELS_DUPE__README_MD = """
these should fail because both models have the same alias
and are configured to build in the same schema
"""
# dupe custom database #
MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML = """
version: 2
models:
- name: model_a
data_tests:
- expect_value:
field: tablename
value: duped_alias
- name: model_b
data_tests:
- expect_value:
field: tablename
value: duped_alias
"""
MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL = """
select {{ string_literal(this.name) }} as tablename
"""
MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL = """
select {{ string_literal(this.name) }} as tablename
"""
MODELS_DUPE_CUSTOM_DATABASE__README_MD = """
these should succeed, as both models have the same alias,
but they are configured to be built in _different_ schemas
"""
# dupe custom schema #
MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML = """
version: 2
models:
- name: model_a
data_tests:
- expect_value:
field: tablename
value: duped_alias
- name: model_b
data_tests:
- expect_value:
field: tablename
value: duped_alias
- name: model_c
data_tests:
- expect_value:
field: tablename
value: duped_alias
"""
MODELS_DUPE_CUSTOM_SCHEMA__MODEL_A_SQL = """
{{ config(alias='duped_alias', schema='schema_a') }}
select {{ string_literal(this.name) }} as tablename
"""
MODELS_DUPE_CUSTOM_SCHEMA__MODEL_B_SQL = """
{{ config(alias='duped_alias', schema='schema_b') }}
select {{ string_literal(this.name) }} as tablename
"""
MODELS_DUPE_CUSTOM_SCHEMA__MODEL_C_SQL = """
-- no custom schema for this model
{{ config(alias='duped_alias') }}
select {{ string_literal(this.name) }} as tablename
"""
MODELS_DUPE_CUSTOM_SCHEMA__README_MD = """
these should succeed, as both models have the same alias,
but they are configured to be built in _different_ schemas
"""

View File

@@ -1,162 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
from tests.functional.adapter.aliases.fixtures import (
MACROS__CAST_SQL,
MACROS__EXPECT_VALUE_SQL,
MODELS__ALIAS_IN_PROJECT_SQL,
MODELS__ALIAS_IN_PROJECT_WITH_OVERRIDE_SQL,
MODELS__FOO_ALIAS_SQL,
MODELS__REF_FOO_ALIAS_SQL,
MODELS__SCHEMA_YML,
MODELS_DUPE__MODEL_A_SQL,
MODELS_DUPE__MODEL_B_SQL,
MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL,
MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL,
MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML,
MODELS_DUPE_CUSTOM_SCHEMA__MODEL_A_SQL,
MODELS_DUPE_CUSTOM_SCHEMA__MODEL_B_SQL,
MODELS_DUPE_CUSTOM_SCHEMA__MODEL_C_SQL,
MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML,
)
class BaseAliases:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"config-version": 2,
"macro-paths": ["macros"],
"models": {
"test": {
"alias_in_project": {
"alias": "project_alias",
},
"alias_in_project_with_override": {
"alias": "project_alias",
},
}
},
}
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": MODELS__SCHEMA_YML,
"foo_alias.sql": MODELS__FOO_ALIAS_SQL,
"alias_in_project.sql": MODELS__ALIAS_IN_PROJECT_SQL,
"alias_in_project_with_override.sql": MODELS__ALIAS_IN_PROJECT_WITH_OVERRIDE_SQL,
"ref_foo_alias.sql": MODELS__REF_FOO_ALIAS_SQL,
}
@pytest.fixture(scope="class")
def macros(self):
return {"cast.sql": MACROS__CAST_SQL, "expect_value.sql": MACROS__EXPECT_VALUE_SQL}
def test_alias_model_name(self, project):
results = run_dbt(["run"])
assert len(results) == 4
run_dbt(["test"])
class BaseAliasErrors:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"config-version": 2,
"macro-paths": ["macros"],
}
@pytest.fixture(scope="class")
def macros(self):
return {"cast.sql": MACROS__CAST_SQL, "expect_value.sql": MACROS__EXPECT_VALUE_SQL}
@pytest.fixture(scope="class")
def models(self):
return {
"model_a.sql": MODELS_DUPE__MODEL_A_SQL,
"model_b.sql": MODELS_DUPE__MODEL_B_SQL,
}
def test_alias_dupe_thorews_exeption(self, project):
message = ".*identical database representation.*"
with pytest.raises(Exception) as exc:
assert message in exc
run_dbt(["run"])
class BaseSameAliasDifferentSchemas:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"config-version": 2,
"macro-paths": ["macros"],
}
@pytest.fixture(scope="class")
def macros(self):
return {"cast.sql": MACROS__CAST_SQL, "expect_value.sql": MACROS__EXPECT_VALUE_SQL}
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML,
"model_a.sql": MODELS_DUPE_CUSTOM_SCHEMA__MODEL_A_SQL,
"model_b.sql": MODELS_DUPE_CUSTOM_SCHEMA__MODEL_B_SQL,
"model_c.sql": MODELS_DUPE_CUSTOM_SCHEMA__MODEL_C_SQL,
}
def test_same_alias_succeeds_in_different_schemas(self, project):
results = run_dbt(["run"])
assert len(results) == 3
res = run_dbt(["test"])
assert len(res) > 0
class BaseSameAliasDifferentDatabases:
@pytest.fixture(scope="class")
def project_config_update(self, unique_schema):
return {
"config-version": 2,
"macro-paths": ["macros"],
"models": {
"test": {
"alias": "duped_alias",
"model_b": {"schema": unique_schema + "_alt"},
},
},
}
@pytest.fixture(scope="class")
def macros(self):
return {"cast.sql": MACROS__CAST_SQL, "expect_value.sql": MACROS__EXPECT_VALUE_SQL}
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML,
"model_a.sql": MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL,
"model_b.sql": MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL,
}
def test_alias_model_name_diff_database(self, project):
results = run_dbt(["run"])
assert len(results) == 2
res = run_dbt(["test"])
assert len(res) > 0
class TestAliases(BaseAliases):
pass
class TestAliasErrors(BaseAliasErrors):
pass
class TestSameAliasDifferentSchemas(BaseSameAliasDifferentSchemas):
pass
class TestSameAliasDifferentDatabases(BaseSameAliasDifferentDatabases):
pass

View File

@@ -1 +0,0 @@
# Adapter tests

View File

@@ -1,271 +0,0 @@
from dbt.tests.util import AnyInteger
def no_stats():
return {
"has_stats": {
"id": "has_stats",
"label": "Has Stats?",
"value": False,
"description": "Indicates whether there are statistics for this table",
"include": False,
},
}
def base_expected_catalog(
project,
role,
id_type,
text_type,
time_type,
view_type,
table_type,
model_stats,
seed_stats=None,
case=None,
case_columns=False,
):
if case is None:
def case(x):
return x
col_case = case if case_columns else lambda x: x
if seed_stats is None:
seed_stats = model_stats
model_database = project.database
my_schema_name = case(project.test_schema)
alternate_schema = case(project.test_schema + "_test")
expected_cols = {
col_case("id"): {
"name": col_case("id"),
"index": AnyInteger(),
"type": id_type,
"comment": None,
},
col_case("first_name"): {
"name": col_case("first_name"),
"index": AnyInteger(),
"type": text_type,
"comment": None,
},
col_case("email"): {
"name": col_case("email"),
"index": AnyInteger(),
"type": text_type,
"comment": None,
},
col_case("ip_address"): {
"name": col_case("ip_address"),
"index": AnyInteger(),
"type": text_type,
"comment": None,
},
col_case("updated_at"): {
"name": col_case("updated_at"),
"index": AnyInteger(),
"type": time_type,
"comment": None,
},
}
return {
"nodes": {
"model.test.model": {
"unique_id": "model.test.model",
"metadata": {
"schema": my_schema_name,
"database": model_database,
"name": case("model"),
"type": view_type,
"comment": None,
"owner": role,
},
"stats": model_stats,
"columns": expected_cols,
},
"model.test.second_model": {
"unique_id": "model.test.second_model",
"metadata": {
"schema": alternate_schema,
"database": project.database,
"name": case("second_model"),
"type": view_type,
"comment": None,
"owner": role,
},
"stats": model_stats,
"columns": expected_cols,
},
"seed.test.seed": {
"unique_id": "seed.test.seed",
"metadata": {
"schema": my_schema_name,
"database": project.database,
"name": case("seed"),
"type": table_type,
"comment": None,
"owner": role,
},
"stats": seed_stats,
"columns": expected_cols,
},
},
"sources": {
"source.test.my_source.my_table": {
"unique_id": "source.test.my_source.my_table",
"metadata": {
"schema": my_schema_name,
"database": project.database,
"name": case("seed"),
"type": table_type,
"comment": None,
"owner": role,
},
"stats": seed_stats,
"columns": expected_cols,
},
},
}
def expected_references_catalog(
project,
role,
id_type,
text_type,
time_type,
view_type,
table_type,
model_stats,
bigint_type=None,
seed_stats=None,
case=None,
case_columns=False,
view_summary_stats=None,
):
if case is None:
def case(x):
return x
col_case = case if case_columns else lambda x: x
if seed_stats is None:
seed_stats = model_stats
if view_summary_stats is None:
view_summary_stats = model_stats
model_database = project.database
my_schema_name = case(project.test_schema)
summary_columns = {
"first_name": {
"name": "first_name",
"index": 1,
"type": text_type,
"comment": None,
},
"ct": {
"name": "ct",
"index": 2,
"type": bigint_type,
"comment": None,
},
}
seed_columns = {
"id": {
"name": col_case("id"),
"index": 1,
"type": id_type,
"comment": None,
},
"first_name": {
"name": col_case("first_name"),
"index": 2,
"type": text_type,
"comment": None,
},
"email": {
"name": col_case("email"),
"index": 3,
"type": text_type,
"comment": None,
},
"ip_address": {
"name": col_case("ip_address"),
"index": 4,
"type": text_type,
"comment": None,
},
"updated_at": {
"name": col_case("updated_at"),
"index": 5,
"type": time_type,
"comment": None,
},
}
return {
"nodes": {
"seed.test.seed": {
"unique_id": "seed.test.seed",
"metadata": {
"schema": my_schema_name,
"database": project.database,
"name": case("seed"),
"type": table_type,
"comment": None,
"owner": role,
},
"stats": seed_stats,
"columns": seed_columns,
},
"model.test.ephemeral_summary": {
"unique_id": "model.test.ephemeral_summary",
"metadata": {
"schema": my_schema_name,
"database": model_database,
"name": case("ephemeral_summary"),
"type": table_type,
"comment": None,
"owner": role,
},
"stats": model_stats,
"columns": summary_columns,
},
"model.test.view_summary": {
"unique_id": "model.test.view_summary",
"metadata": {
"schema": my_schema_name,
"database": model_database,
"name": case("view_summary"),
"type": view_type,
"comment": None,
"owner": role,
},
"stats": view_summary_stats,
"columns": summary_columns,
},
},
"sources": {
"source.test.my_source.my_table": {
"unique_id": "source.test.my_source.my_table",
"metadata": {
"schema": my_schema_name,
"database": project.database,
"name": case("seed"),
"type": table_type,
"comment": None,
"owner": role,
},
"stats": seed_stats,
"columns": seed_columns,
},
},
}

View File

@@ -1,226 +0,0 @@
seeds_base_csv = """
id,name,some_date
1,Easton,1981-05-20T06:46:51
2,Lillian,1978-09-03T18:10:33
3,Jeremiah,1982-03-11T03:59:51
4,Nolan,1976-05-06T20:21:35
5,Hannah,1982-06-23T05:41:26
6,Eleanor,1991-08-10T23:12:21
7,Lily,1971-03-29T14:58:02
8,Jonathan,1988-02-26T02:55:24
9,Adrian,1994-02-09T13:14:23
10,Nora,1976-03-01T16:51:39
""".lstrip()
seeds_added_csv = (
seeds_base_csv
+ """
11,Mateo,2014-09-07T17:04:27
12,Julian,2000-02-04T11:48:30
13,Gabriel,2001-07-10T07:32:52
14,Isaac,2002-11-24T03:22:28
15,Levi,2009-11-15T11:57:15
16,Elizabeth,2005-04-09T03:50:11
17,Grayson,2019-08-06T19:28:17
18,Dylan,2014-03-01T11:50:41
19,Jayden,2009-06-06T07:12:49
20,Luke,2003-12-05T21:42:18
""".lstrip()
)
seeds_newcolumns_csv = """
id,name,some_date,last_initial
1,Easton,1981-05-20T06:46:51,A
2,Lillian,1978-09-03T18:10:33,B
3,Jeremiah,1982-03-11T03:59:51,C
4,Nolan,1976-05-06T20:21:35,D
5,Hannah,1982-06-23T05:41:26,E
6,Eleanor,1991-08-10T23:12:21,F
7,Lily,1971-03-29T14:58:02,G
8,Jonathan,1988-02-26T02:55:24,H
9,Adrian,1994-02-09T13:14:23,I
10,Nora,1976-03-01T16:51:39,J
""".lstrip()
schema_base_yml = """
version: 2
sources:
- name: raw
schema: "{{ target.schema }}"
tables:
- name: seed
identifier: "{{ var('seed_name', 'base') }}"
"""
generic_test_seed_yml = """
version: 2
models:
- name: base
columns:
- name: id
data_tests:
- not_null
"""
generic_test_view_yml = """
version: 2
groups:
- name: my_group
owner:
name: group_owner
models:
- name: view_model
group: my_group
columns:
- name: id
data_tests:
- not_null
"""
generic_test_table_yml = """
version: 2
models:
- name: table_model
columns:
- name: id
data_tests:
- not_null
"""
test_passing_sql = """
select * from (
select 1 as id
) as my_subquery
where id = 2
"""
test_failing_sql = """
select * from (
select 1 as id
) as my_subquery
where id = 1
"""
test_ephemeral_passing_sql = """
with my_other_cool_cte as (
select id, name from {{ ref('ephemeral') }}
where id > 1000
)
select name, id from my_other_cool_cte
"""
test_ephemeral_failing_sql = """
with my_other_cool_cte as (
select id, name from {{ ref('ephemeral') }}
where id < 1000
)
select name, id from my_other_cool_cte
"""
model_incremental = """
select * from {{ source('raw', 'seed') }}
{% if is_incremental() %}
where id > (select max(id) from {{ this }})
{% endif %}
""".strip()
cc_all_snapshot_sql = """
{% snapshot cc_all_snapshot %}
{{ config(
check_cols='all', unique_key='id', strategy='check',
target_database=database, target_schema=schema
) }}
select * from {{ ref(var('seed_name', 'base')) }}
{% endsnapshot %}
""".strip()
cc_name_snapshot_sql = """
{% snapshot cc_name_snapshot %}
{{ config(
check_cols=['name'], unique_key='id', strategy='check',
target_database=database, target_schema=schema
) }}
select * from {{ ref(var('seed_name', 'base')) }}
{% endsnapshot %}
""".strip()
cc_date_snapshot_sql = """
{% snapshot cc_date_snapshot %}
{{ config(
check_cols=['some_date'], unique_key='id', strategy='check',
target_database=database, target_schema=schema
) }}
select * from {{ ref(var('seed_name', 'base')) }}
{% endsnapshot %}
""".strip()
ts_snapshot_sql = """
{% snapshot ts_snapshot %}
{{ config(
strategy='timestamp',
unique_key='id',
updated_at='some_date',
target_database=database,
target_schema=schema,
)}}
select * from {{ ref(var('seed_name', 'base')) }}
{% endsnapshot %}
""".strip()
model_ephemeral_with_cte = """
with my_cool_cte as (
select name, id from {{ ref('base') }}
)
select id, name from my_cool_cte where id is not null
"""
config_materialized_table = """
{{ config(materialized="table") }}
"""
config_materialized_view = """
{{ config(materialized="view") }}
"""
config_materialized_ephemeral = """
{{ config(materialized="ephemeral") }}
"""
config_materialized_incremental = """
{{ config(materialized="incremental") }}
"""
config_materialized_var = """
{{ config(materialized=var("materialized_var", "table"))}}
"""
model_base = """
select * from {{ source('raw', 'seed') }}
"""
model_ephemeral = """
select * from {{ ref('ephemeral') }}
"""
incremental_not_schema_change_sql = """
{{ config(materialized="incremental", unique_key="user_id_current_time",on_schema_change="sync_all_columns") }}
select
1 || '-' || current_timestamp as user_id_current_time,
{% if is_incremental() %}
'thisis18characters' as platform
{% else %}
'okthisis20characters' as platform
{% endif %}
"""
base_materialized_var_sql = config_materialized_var + model_base
base_table_sql = config_materialized_table + model_base
base_view_sql = config_materialized_view + model_base
base_ephemeral_sql = config_materialized_ephemeral + model_base
ephemeral_with_cte_sql = config_materialized_ephemeral + model_ephemeral_with_cte
ephemeral_view_sql = config_materialized_view + model_ephemeral
ephemeral_table_sql = config_materialized_table + model_ephemeral
incremental_sql = config_materialized_incremental + model_incremental

View File

@@ -1,105 +0,0 @@
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
tests__get_columns_in_relation_sql = """
{% set columns = adapter.get_columns_in_relation(ref('model')) %}
{% set limit_query = 0 %}
{% if (columns | length) == 0 %}
{% set limit_query = 1 %}
{% endif %}
select 1 as id limit {{ limit_query }}
"""
models__upstream_sql = """
select 1 as id
"""
models__expected_sql = """
-- make sure this runs after 'model'
-- {{ ref('model') }}
select 2 as id
"""
models__model_sql = """
{% set upstream = ref('upstream') %}
{% if execute %}
{# don't ever do any of this #}
{%- do adapter.drop_schema(upstream) -%}
{% set existing = adapter.get_relation(upstream.database, upstream.schema, upstream.identifier) %}
{% if existing is not none %}
{% do exceptions.raise_compiler_error('expected ' ~ ' to not exist, but it did') %}
{% endif %}
{%- do adapter.create_schema(upstream) -%}
{% set sql = create_view_as(upstream, 'select 2 as id') %}
{% do run_query(sql) %}
{% endif %}
select * from {{ upstream }}
"""
class BaseAdapterMethod:
"""
This test will leverage the following adapter methods:
get_relation
get_columns_in_relation
drop_schema
create_schema
It will aims to make sure drop_shema actually works, for more context
checkout #1983
"""
@pytest.fixture(scope="class")
def tests(self):
return {"get_columns_in_relation.sql": tests__get_columns_in_relation_sql}
@pytest.fixture(scope="class")
def models(self):
return {
"upstream.sql": models__upstream_sql,
"expected.sql": models__expected_sql,
"model.sql": models__model_sql,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"name": "adapter_methods",
}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
# snowflake need all tables in CAP name
@pytest.fixture(scope="class")
def equal_tables(self):
return ["model", "expected"]
def test_adapter_methods(self, project, equal_tables):
run_dbt(["compile"]) # trigger any compile-time issues
result = run_dbt()
assert len(result) == 3
check_relations_equal(project.adapter, equal_tables)
class TestBaseCaching(BaseAdapterMethod):
pass

View File

@@ -1,122 +0,0 @@
import pytest
from dbt.tests.util import (
check_relation_types,
check_relations_equal,
check_result_nodes_by_name,
relation_from_name,
run_dbt,
)
from tests.functional.adapter.basic.files import (
base_materialized_var_sql,
base_table_sql,
base_view_sql,
schema_base_yml,
seeds_base_csv,
)
class BaseSimpleMaterializations:
@pytest.fixture(scope="class")
def models(self):
return {
"view_model.sql": base_view_sql,
"table_model.sql": base_table_sql,
"swappable.sql": base_materialized_var_sql,
"schema.yml": schema_base_yml,
}
@pytest.fixture(scope="class")
def seeds(self):
return {
"base.csv": seeds_base_csv,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"name": "base",
}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_base(self, project):
# seed command
results = run_dbt(["seed"])
# seed result length
assert len(results) == 1
# run command
results = run_dbt()
# run result length
assert len(results) == 3
# names exist in result nodes
check_result_nodes_by_name(results, ["view_model", "table_model", "swappable"])
# check relation types
expected = {
"base": "table",
"view_model": "view",
"table_model": "table",
"swappable": "table",
}
check_relation_types(project.adapter, expected)
# base table rowcount
relation = relation_from_name(project.adapter, "base")
result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one")
assert result[0] == 10
# relations_equal
check_relations_equal(project.adapter, ["base", "view_model", "table_model", "swappable"])
# check relations in catalog
catalog = run_dbt(["docs", "generate"])
assert len(catalog.nodes) == 4
assert len(catalog.sources) == 1
# run_dbt changing materialized_var to view
if project.test_config.get("require_full_refresh", False): # required for BigQuery
results = run_dbt(
["run", "--full-refresh", "-m", "swappable", "--vars", "materialized_var: view"]
)
else:
results = run_dbt(["run", "-m", "swappable", "--vars", "materialized_var: view"])
assert len(results) == 1
# check relation types, swappable is view
expected = {
"base": "table",
"view_model": "view",
"table_model": "table",
"swappable": "view",
}
check_relation_types(project.adapter, expected)
# run_dbt changing materialized_var to incremental
results = run_dbt(["run", "-m", "swappable", "--vars", "materialized_var: incremental"])
assert len(results) == 1
# check relation types, swappable is table
expected = {
"base": "table",
"view_model": "view",
"table_model": "table",
"swappable": "table",
}
check_relation_types(project.adapter, expected)
class TestSimpleMaterializations(BaseSimpleMaterializations):
pass

View File

@@ -1,518 +0,0 @@
import os
from datetime import datetime, timezone
import pytest
import dbt
from dbt.tests.fixtures.project import write_project_files
from dbt.tests.util import check_datetime_between, get_artifact, rm_file, run_dbt
from tests.functional.adapter.basic.expected_catalog import (
base_expected_catalog,
expected_references_catalog,
no_stats,
)
models__schema_yml = """
version: 2
models:
- name: model
description: "The test model"
docs:
show: false
columns:
- name: id
description: The user ID number
data_tests:
- unique
- not_null
- name: first_name
description: The user's first name
- name: email
description: The user's email
- name: ip_address
description: The user's IP address
- name: updated_at
description: The last time this user's email was updated
data_tests:
- test.nothing
- name: second_model
description: "The second test model"
docs:
show: false
columns:
- name: id
description: The user ID number
- name: first_name
description: The user's first name
- name: email
description: The user's email
- name: ip_address
description: The user's IP address
- name: updated_at
description: The last time this user's email was updated
sources:
- name: my_source
description: "My source"
loader: a_loader
schema: "{{ var('test_schema') }}"
tables:
- name: my_table
description: "My table"
identifier: seed
columns:
- name: id
description: "An ID field"
exposures:
- name: simple_exposure
type: dashboard
depends_on:
- ref('model')
- source('my_source', 'my_table')
owner:
email: something@example.com
- name: notebook_exposure
type: notebook
depends_on:
- ref('model')
- ref('second_model')
owner:
email: something@example.com
name: Some name
description: >
A description of the complex exposure
maturity: medium
meta:
tool: 'my_tool'
languages:
- python
tags: ['my_department']
url: http://example.com/notebook/1
"""
models__second_model_sql = """
{{
config(
materialized='view',
schema='test',
)
}}
select * from {{ ref('seed') }}
"""
models__readme_md = """
This is a readme.md file with {{ invalid-ish jinja }} in it
"""
models__model_sql = """
{{
config(
materialized='view',
)
}}
select * from {{ ref('seed') }}
"""
seed__schema_yml = """
version: 2
seeds:
- name: seed
description: "The test seed"
columns:
- name: id
description: The user ID number
- name: first_name
description: The user's first name
- name: email
description: The user's email
- name: ip_address
description: The user's IP address
- name: updated_at
description: The last time this user's email was updated
"""
seed__seed_csv = """id,first_name,email,ip_address,updated_at
1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31
"""
macros__schema_yml = """
version: 2
macros:
- name: test_nothing
description: "{{ doc('macro_info') }}"
meta:
some_key: 100
arguments:
- name: model
type: Relation
description: "{{ doc('macro_arg_info') }}"
"""
macros__macro_md = """
{% docs macro_info %}
My custom test that I wrote that does nothing
{% enddocs %}
{% docs macro_arg_info %}
The model for my custom test
{% enddocs %}
"""
macros__dummy_test_sql = """
{% test nothing(model) %}
-- a silly test to make sure that table-level tests show up in the manifest
-- without a column_name field
select 0
{% endtest %}
"""
snapshot__snapshot_seed_sql = """
{% snapshot snapshot_seed %}
{{
config(
unique_key='id',
strategy='check',
check_cols='all',
target_schema=var('alternate_schema')
)
}}
select * from {{ ref('seed') }}
{% endsnapshot %}
"""
ref_models__schema_yml = """
version: 2
models:
- name: ephemeral_summary
description: "{{ doc('ephemeral_summary') }}"
columns: &summary_columns
- name: first_name
description: "{{ doc('summary_first_name') }}"
- name: ct
description: "{{ doc('summary_count') }}"
- name: view_summary
description: "{{ doc('view_summary') }}"
columns: *summary_columns
exposures:
- name: notebook_exposure
type: notebook
depends_on:
- ref('view_summary')
owner:
email: something@example.com
name: Some name
description: "{{ doc('notebook_info') }}"
maturity: medium
url: http://example.com/notebook/1
meta:
tool: 'my_tool'
languages:
- python
tags: ['my_department']
"""
ref_sources__schema_yml = """
version: 2
sources:
- name: my_source
description: "{{ doc('source_info') }}"
loader: a_loader
schema: "{{ var('test_schema') }}"
tables:
- name: my_table
description: "{{ doc('table_info') }}"
identifier: seed
columns:
- name: id
description: "{{ doc('column_info') }}"
"""
ref_models__view_summary_sql = """
{{
config(
materialized = "view"
)
}}
select first_name, ct from {{ref('ephemeral_summary')}}
order by ct asc
"""
ref_models__ephemeral_summary_sql = """
{{
config(
materialized = "table"
)
}}
select first_name, count(*) as ct from {{ref('ephemeral_copy')}}
group by first_name
order by first_name asc
"""
ref_models__ephemeral_copy_sql = """
{{
config(
materialized = "ephemeral"
)
}}
select * from {{ source("my_source", "my_table") }}
"""
ref_models__docs_md = """
{% docs ephemeral_summary %}
A summmary table of the ephemeral copy of the seed data
{% enddocs %}
{% docs summary_first_name %}
The first name being summarized
{% enddocs %}
{% docs summary_count %}
The number of instances of the first name
{% enddocs %}
{% docs view_summary %}
A view of the summary of the ephemeral copy of the seed data
{% enddocs %}
{% docs source_info %}
My source
{% enddocs %}
{% docs table_info %}
My table
{% enddocs %}
{% docs column_info %}
An ID field
{% enddocs %}
{% docs notebook_info %}
A description of the complex exposure
{% enddocs %}
"""
def verify_catalog(project, expected_catalog, start_time):
# get the catalog.json
catalog_path = os.path.join(project.project_root, "target", "catalog.json")
assert os.path.exists(catalog_path)
catalog = get_artifact(catalog_path)
# verify the catalog
assert set(catalog) == {"errors", "metadata", "nodes", "sources"}
verify_metadata(
catalog["metadata"],
"https://schemas.getdbt.com/dbt/catalog/v1.json",
start_time,
)
assert not catalog["errors"]
for key in "nodes", "sources":
for unique_id, expected_node in expected_catalog[key].items():
found_node = catalog[key][unique_id]
for node_key in expected_node:
assert node_key in found_node
assert (
found_node[node_key] == expected_node[node_key]
), f"Key '{node_key}' in '{unique_id}' did not match"
def verify_metadata(metadata, dbt_schema_version, start_time):
assert "generated_at" in metadata
check_datetime_between(metadata["generated_at"], start=start_time)
assert "dbt_version" in metadata
assert metadata["dbt_version"] == dbt.version.__version__
assert "dbt_schema_version" in metadata
assert metadata["dbt_schema_version"] == dbt_schema_version
key = "env_key"
if os.name == "nt":
key = key.upper()
assert metadata["env"] == {key: "env_value"}
def run_and_generate(project, args=None):
results = run_dbt(["run"])
assert len(results) == 2
rm_file(project.project_root, "target", "manifest.json")
rm_file(project.project_root, "target", "run_results.json")
start_time = datetime.now(timezone.utc).replace(tzinfo=None)
run_args = ["docs", "generate"]
if args:
run_args.extend(args)
catalog = run_dbt(run_args)
assert catalog
return start_time
class BaseGenerateProject:
@pytest.fixture(scope="class", autouse=True)
def setup(self, project):
alternate_schema_name = project.test_schema + "_test"
project.create_test_schema(schema_name=alternate_schema_name)
os.environ["DBT_ENV_CUSTOM_ENV_env_key"] = "env_value"
assets = {"lorem-ipsum.txt": "Lorem ipsum dolor sit amet"}
write_project_files(project.project_root, "assets", assets)
run_dbt(["seed"])
yield
del os.environ["DBT_ENV_CUSTOM_ENV_env_key"]
@pytest.fixture(scope="class")
def seeds(self):
return {"schema.yml": seed__schema_yml, "seed.csv": seed__seed_csv}
@pytest.fixture(scope="class")
def macros(self):
return {
"schema.yml": macros__schema_yml,
"macro.md": macros__macro_md,
"dummy_test.sql": macros__dummy_test_sql,
}
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot_seed.sql": snapshot__snapshot_seed_sql}
@pytest.fixture(scope="class")
def project_config_update(self, unique_schema):
alternate_schema = unique_schema + "_test"
return {
"asset-paths": ["assets", "invalid-asset-paths"],
"vars": {
"test_schema": unique_schema,
"alternate_schema": alternate_schema,
},
"seeds": {
"quote_columns": True,
},
}
class BaseDocsGenerate(BaseGenerateProject):
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": models__schema_yml,
"second_model.sql": models__second_model_sql,
"readme.md": models__readme_md,
"model.sql": models__model_sql,
}
@pytest.fixture(scope="class")
def expected_catalog(self, project, profile_user):
return base_expected_catalog(
project,
role=profile_user,
id_type="integer",
text_type="text",
time_type="timestamp without time zone",
view_type="VIEW",
table_type="BASE TABLE",
model_stats=no_stats(),
)
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
alternate_schema = f"{project.test_schema}_test"
relation = project.adapter.Relation.create(
database=project.database, schema=alternate_schema
)
project.adapter.drop_schema(relation)
pass
# Test "--no-compile" flag works and produces no manifest.json
def test_run_and_generate_no_compile(self, project, expected_catalog):
start_time = run_and_generate(project, ["--no-compile"])
assert not os.path.exists(os.path.join(project.project_root, "target", "manifest.json"))
verify_catalog(project, expected_catalog, start_time)
# Test generic "docs generate" command
def test_run_and_generate(self, project, expected_catalog):
start_time = run_and_generate(project)
verify_catalog(project, expected_catalog, start_time)
# Check that assets have been copied to the target directory for use in the docs html page
assert os.path.exists(os.path.join(project.project_root, "target", "assets"))
assert os.path.exists(
os.path.join(project.project_root, "target", "assets", "lorem-ipsum.txt")
)
assert not os.path.exists(
os.path.join(project.project_root, "target", "non-existent-assets")
)
# Test generic "docs generate" command
def test_locally_run_and_generate(self, project, expected_catalog):
os.chdir(
project.profiles_dir
) # Change to random directory to test that assets doc generation works with project-dir
start_time = run_and_generate(project)
verify_catalog(project, expected_catalog, start_time)
# Check that assets have been copied to the target directory for use in the docs html page
assert os.path.exists(os.path.join(project.project_root, "target", "assets"))
assert os.path.exists(
os.path.join(project.project_root, "target", "assets", "lorem-ipsum.txt")
)
assert not os.path.exists(
os.path.join(project.project_root, "target", "non-existent-assets")
)
class TestDocsGenerate(BaseDocsGenerate):
pass
class BaseDocsGenReferences(BaseGenerateProject):
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": ref_models__schema_yml,
"sources.yml": ref_sources__schema_yml,
"view_summary.sql": ref_models__view_summary_sql,
"ephemeral_summary.sql": ref_models__ephemeral_summary_sql,
"ephemeral_copy.sql": ref_models__ephemeral_copy_sql,
"docs.md": ref_models__docs_md,
}
@pytest.fixture(scope="class")
def expected_catalog(self, project, profile_user):
return expected_references_catalog(
project,
role=profile_user,
id_type="integer",
text_type="text",
time_type="timestamp without time zone",
bigint_type="bigint",
view_type="VIEW",
table_type="BASE TABLE",
model_stats=no_stats(),
)
def test_references(self, project, expected_catalog):
start_time = run_and_generate(project)
verify_catalog(project, expected_catalog, start_time)
class TestDocsGenReferences(BaseDocsGenReferences):
pass

View File

@@ -1,30 +0,0 @@
import os
from dbt.tests.util import run_dbt
class BaseEmpty:
def test_empty(self, project):
# check seed
results = run_dbt(["seed"])
assert len(results) == 0
run_results_path = os.path.join(project.project_root, "target", "run_results.json")
assert os.path.exists(run_results_path)
# check run
results = run_dbt(["run"])
assert len(results) == 0
catalog_path = os.path.join(project.project_root, "target", "catalog.json")
assert not os.path.exists(catalog_path)
# check catalog
catalog = run_dbt(["docs", "generate"])
assert os.path.exists(run_results_path)
assert os.path.exists(catalog_path)
assert len(catalog.nodes) == 0
assert len(catalog.sources) == 0
class TestEmpty(BaseEmpty):
pass

View File

@@ -1,72 +0,0 @@
import os
import pytest
from dbt.tests.util import (
check_relations_equal,
check_result_nodes_by_name,
get_manifest,
relation_from_name,
run_dbt,
)
from tests.functional.adapter.basic.files import (
base_ephemeral_sql,
ephemeral_table_sql,
ephemeral_view_sql,
schema_base_yml,
seeds_base_csv,
)
class BaseEphemeral:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "ephemeral"}
@pytest.fixture(scope="class")
def seeds(self):
return {"base.csv": seeds_base_csv}
@pytest.fixture(scope="class")
def models(self):
return {
"ephemeral.sql": base_ephemeral_sql,
"view_model.sql": ephemeral_view_sql,
"table_model.sql": ephemeral_table_sql,
"schema.yml": schema_base_yml,
}
def test_ephemeral(self, project):
# seed command
results = run_dbt(["seed"])
assert len(results) == 1
check_result_nodes_by_name(results, ["base"])
# run command
results = run_dbt(["run"])
assert len(results) == 2
check_result_nodes_by_name(results, ["view_model", "table_model"])
# base table rowcount
relation = relation_from_name(project.adapter, "base")
result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one")
assert result[0] == 10
# relations equal
check_relations_equal(project.adapter, ["base", "view_model", "table_model"])
# catalog node count
catalog = run_dbt(["docs", "generate"])
catalog_path = os.path.join(project.project_root, "target", "catalog.json")
assert os.path.exists(catalog_path)
assert len(catalog.nodes) == 3
assert len(catalog.sources) == 1
# manifest (not in original)
manifest = get_manifest(project.project_root)
assert len(manifest.nodes) == 4
assert len(manifest.sources) == 1
class TestEphemeral(BaseEphemeral):
pass

View File

@@ -1,74 +0,0 @@
import pytest
from dbt.tests.util import run_dbt, run_dbt_and_capture
from tests.functional.adapter.basic.files import (
base_table_sql,
base_view_sql,
generic_test_seed_yml,
generic_test_table_yml,
generic_test_view_yml,
schema_base_yml,
seeds_base_csv,
)
class BaseGenericTests:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "generic_tests"}
@pytest.fixture(scope="class")
def seeds(self):
return {
"base.csv": seeds_base_csv,
"schema.yml": generic_test_seed_yml,
}
@pytest.fixture(scope="class")
def models(self):
return {
"view_model.sql": base_view_sql,
"table_model.sql": base_table_sql,
"schema.yml": schema_base_yml,
"schema_view.yml": generic_test_view_yml,
"schema_table.yml": generic_test_table_yml,
}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_generic_tests(self, project):
# seed command
results = run_dbt(["seed"])
# test command selecting base model
results = run_dbt(["test", "-m", "base"])
assert len(results) == 1
# run command
results = run_dbt(["run"])
assert len(results) == 2
# test command, all tests
results, log_output = run_dbt_and_capture(["test", "--log-format", "json"])
assert len(results) == 3
result_log_lines = [
line for line in log_output.split("\n") if "LogTestResult" in line and "group" in line
]
assert len(result_log_lines) == 1
assert "my_group" in result_log_lines[0]
assert "group_owner" in result_log_lines[0]
assert "model.generic_tests.view_model" in result_log_lines[0]
class TestGenericTests(BaseGenericTests):
pass

View File

@@ -1,99 +0,0 @@
import pytest
from dbt.artifacts.schemas.results import RunStatus
from dbt.tests.util import check_relations_equal, relation_from_name, run_dbt
from tests.functional.adapter.basic.files import (
incremental_not_schema_change_sql,
incremental_sql,
schema_base_yml,
seeds_added_csv,
seeds_base_csv,
)
class BaseIncremental:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "incremental"}
@pytest.fixture(scope="class")
def models(self):
return {"incremental.sql": incremental_sql, "schema.yml": schema_base_yml}
@pytest.fixture(scope="class")
def seeds(self):
return {"base.csv": seeds_base_csv, "added.csv": seeds_added_csv}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_incremental(self, project):
# seed command
results = run_dbt(["seed"])
assert len(results) == 2
# base table rowcount
relation = relation_from_name(project.adapter, "base")
result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one")
assert result[0] == 10
# added table rowcount
relation = relation_from_name(project.adapter, "added")
result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one")
assert result[0] == 20
# run command
# the "seed_name" var changes the seed identifier in the schema file
results = run_dbt(["run", "--vars", "seed_name: base"])
assert len(results) == 1
# check relations equal
check_relations_equal(project.adapter, ["base", "incremental"])
# change seed_name var
# the "seed_name" var changes the seed identifier in the schema file
results = run_dbt(["run", "--vars", "seed_name: added"])
assert len(results) == 1
# check relations equal
check_relations_equal(project.adapter, ["added", "incremental"])
# get catalog from docs generate
catalog = run_dbt(["docs", "generate"])
assert len(catalog.nodes) == 3
assert len(catalog.sources) == 1
class BaseIncrementalNotSchemaChange:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "incremental"}
@pytest.fixture(scope="class")
def models(self):
return {"incremental_not_schema_change.sql": incremental_not_schema_change_sql}
def test_incremental_not_schema_change(self, project):
# Schema change is not evaluated on first run, so two are needed
run_dbt(["run", "--select", "incremental_not_schema_change"])
run_result = (
run_dbt(["run", "--select", "incremental_not_schema_change"]).results[0].status
)
assert run_result == RunStatus.Success
class Testincremental(BaseIncremental):
pass
class TestBaseIncrementalNotSchemaChange(BaseIncrementalNotSchemaChange):
pass

View File

@@ -1,47 +0,0 @@
import pytest
from dbt.tests.util import check_result_nodes_by_name, run_dbt
from tests.functional.adapter.basic.files import test_failing_sql, test_passing_sql
class BaseSingularTests:
@pytest.fixture(scope="class")
def tests(self):
return {
"passing.sql": test_passing_sql,
"failing.sql": test_failing_sql,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "singular_tests"}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_singular_tests(self, project):
# test command
results = run_dbt(["test"], expect_pass=False)
assert len(results) == 2
# We have the right result nodes
check_result_nodes_by_name(results, ["passing", "failing"])
# Check result status
for result in results:
if result.node.name == "passing":
assert result.status == "pass"
elif result.node.name == "failing":
assert result.status == "fail"
class TestSingularTests(BaseSingularTests):
pass

View File

@@ -1,78 +0,0 @@
import pytest
from dbt.tests.util import check_result_nodes_by_name, run_dbt
from tests.functional.adapter.basic.files import (
ephemeral_with_cte_sql,
schema_base_yml,
seeds_base_csv,
test_ephemeral_failing_sql,
test_ephemeral_passing_sql,
)
class BaseSingularTestsEphemeral:
@pytest.fixture(scope="class")
def seeds(self):
return {
"base.csv": seeds_base_csv,
}
@pytest.fixture(scope="class")
def models(self):
return {
"ephemeral.sql": ephemeral_with_cte_sql,
"passing_model.sql": test_ephemeral_passing_sql,
"failing_model.sql": test_ephemeral_failing_sql,
"schema.yml": schema_base_yml,
}
@pytest.fixture(scope="class")
def tests(self):
return {
"passing.sql": test_ephemeral_passing_sql,
"failing.sql": test_ephemeral_failing_sql,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"name": "singular_tests_ephemeral",
}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_singular_tests_ephemeral(self, project):
# check results from seed command
results = run_dbt(["seed"])
assert len(results) == 1
check_result_nodes_by_name(results, ["base"])
# Check results from test command
results = run_dbt(["test"], expect_pass=False)
assert len(results) == 2
check_result_nodes_by_name(results, ["passing", "failing"])
# Check result status
for result in results:
if result.node.name == "passing":
assert result.status == "pass"
elif result.node.name == "failing":
assert result.status == "fail"
# check results from run command
results = run_dbt()
assert len(results) == 2
check_result_nodes_by_name(results, ["failing_model", "passing_model"])
class TestSingularTestsEphemeral(BaseSingularTestsEphemeral):
pass

View File

@@ -1,124 +0,0 @@
import pytest
from dbt.tests.util import relation_from_name, run_dbt, update_rows
from tests.functional.adapter.basic.files import (
cc_all_snapshot_sql,
cc_date_snapshot_sql,
cc_name_snapshot_sql,
seeds_added_csv,
seeds_base_csv,
)
def check_relation_rows(project, snapshot_name, count):
relation = relation_from_name(project.adapter, snapshot_name)
result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one")
assert result[0] == count
class BaseSnapshotCheckCols:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "snapshot_strategy_check_cols"}
@pytest.fixture(scope="class")
def seeds(self):
return {
"base.csv": seeds_base_csv,
"added.csv": seeds_added_csv,
}
@pytest.fixture(scope="class")
def snapshots(self):
return {
"cc_all_snapshot.sql": cc_all_snapshot_sql,
"cc_date_snapshot.sql": cc_date_snapshot_sql,
"cc_name_snapshot.sql": cc_name_snapshot_sql,
}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_snapshot_check_cols(self, project):
# seed command
results = run_dbt(["seed"])
assert len(results) == 2
# snapshot command
results = run_dbt(["snapshot"])
for result in results:
assert result.status == "success"
# check rowcounts for all snapshots
check_relation_rows(project, "cc_all_snapshot", 10)
check_relation_rows(project, "cc_name_snapshot", 10)
check_relation_rows(project, "cc_date_snapshot", 10)
relation = relation_from_name(project.adapter, "cc_all_snapshot")
result = project.run_sql(f"select * from {relation}", fetch="all")
# point at the "added" seed so the snapshot sees 10 new rows
results = run_dbt(["--no-partial-parse", "snapshot", "--vars", "seed_name: added"])
for result in results:
assert result.status == "success"
# check rowcounts for all snapshots
check_relation_rows(project, "cc_all_snapshot", 20)
check_relation_rows(project, "cc_name_snapshot", 20)
check_relation_rows(project, "cc_date_snapshot", 20)
# update some timestamps in the "added" seed so the snapshot sees 10 more new rows
update_rows_config = {
"name": "added",
"dst_col": "some_date",
"clause": {"src_col": "some_date", "type": "add_timestamp"},
"where": "id > 10 and id < 21",
}
update_rows(project.adapter, update_rows_config)
# re-run snapshots, using "added'
results = run_dbt(["snapshot", "--vars", "seed_name: added"])
for result in results:
assert result.status == "success"
# check rowcounts for all snapshots
check_relation_rows(project, "cc_all_snapshot", 30)
check_relation_rows(project, "cc_date_snapshot", 30)
# unchanged: only the timestamp changed
check_relation_rows(project, "cc_name_snapshot", 20)
# Update the name column
update_rows_config = {
"name": "added",
"dst_col": "name",
"clause": {
"src_col": "name",
"type": "add_string",
"value": "_updated",
},
"where": "id < 11",
}
update_rows(project.adapter, update_rows_config)
# re-run snapshots, using "added'
results = run_dbt(["snapshot", "--vars", "seed_name: added"])
for result in results:
assert result.status == "success"
# check rowcounts for all snapshots
check_relation_rows(project, "cc_all_snapshot", 40)
check_relation_rows(project, "cc_name_snapshot", 30)
# does not see name updates
check_relation_rows(project, "cc_date_snapshot", 30)
class TestSnapshotCheckCols(BaseSnapshotCheckCols):
pass

View File

@@ -1,102 +0,0 @@
import pytest
from dbt.tests.util import relation_from_name, run_dbt, update_rows
from tests.functional.adapter.basic.files import (
seeds_added_csv,
seeds_base_csv,
seeds_newcolumns_csv,
ts_snapshot_sql,
)
def check_relation_rows(project, snapshot_name, count):
relation = relation_from_name(project.adapter, snapshot_name)
result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one")
assert result[0] == count
class BaseSnapshotTimestamp:
@pytest.fixture(scope="class")
def seeds(self):
return {
"base.csv": seeds_base_csv,
"newcolumns.csv": seeds_newcolumns_csv,
"added.csv": seeds_added_csv,
}
@pytest.fixture(scope="class")
def snapshots(self):
return {
"ts_snapshot.sql": ts_snapshot_sql,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {"name": "snapshot_strategy_timestamp"}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def test_snapshot_timestamp(self, project):
# seed command
results = run_dbt(["seed"])
assert len(results) == 3
# snapshot command
results = run_dbt(["snapshot"])
assert len(results) == 1
# snapshot has 10 rows
check_relation_rows(project, "ts_snapshot", 10)
# point at the "added" seed so the snapshot sees 10 new rows
results = run_dbt(["snapshot", "--vars", "seed_name: added"])
# snapshot now has 20 rows
check_relation_rows(project, "ts_snapshot", 20)
# update some timestamps in the "added" seed so the snapshot sees 10 more new rows
update_rows_config = {
"name": "added",
"dst_col": "some_date",
"clause": {
"src_col": "some_date",
"type": "add_timestamp",
},
"where": "id > 10 and id < 21",
}
update_rows(project.adapter, update_rows_config)
results = run_dbt(["snapshot", "--vars", "seed_name: added"])
# snapshot now has 30 rows
check_relation_rows(project, "ts_snapshot", 30)
update_rows_config = {
"name": "added",
"dst_col": "name",
"clause": {
"src_col": "name",
"type": "add_string",
"value": "_updated",
},
"where": "id < 11",
}
update_rows(project.adapter, update_rows_config)
results = run_dbt(["snapshot", "--vars", "seed_name: added"])
# snapshot still has 30 rows because timestamp not updated
check_relation_rows(project, "ts_snapshot", 30)
class TestSnapshotTimestamp(BaseSnapshotTimestamp):
pass

View File

@@ -1,95 +0,0 @@
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
seeds__seed_csv = """id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67
12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193
13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5
14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250
15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245
16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54
17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96
18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72
19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174
20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25
21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253
22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153
23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201
24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122
25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95
26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52
27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26
28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118
29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28
30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177
31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233
32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203
33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149
34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167
35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110
36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68
37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89
38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81
39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15
40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255
41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140
42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24
43,Sean,Mason,smason16@icq.com,Male,159.219.155.249
44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218
45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198
46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18
47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238
48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61
49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21
50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209
"""
model_sql = """
{{
config(
materialized = "table",
sort = 'first_name',
dist = 'first_name'
)
}}
select * from {{ this.schema }}.seed
"""
class BaseTableMaterialization:
@pytest.fixture(scope="class")
def seeds(self):
return {"seed.csv": seeds__seed_csv}
@pytest.fixture(scope="class")
def models(self):
return {"materialized.sql": model_sql}
def test_table_materialization_sort_dist_no_op(self, project):
# basic table materialization test, sort and dist is not supported by postgres so the result table would still be same as input
# check seed
results = run_dbt(["seed"])
assert len(results) == 1
# check run
results = run_dbt(["run"])
assert len(results) == 1
check_relations_equal(project.adapter, ["seed", "materialized"])
class TestTableMat(BaseTableMaterialization):
pass

View File

@@ -1,14 +0,0 @@
import dbt.task.debug
class BaseValidateConnection:
# project need to be here otherwise some other tests might break
def test_validate_connection(self, project, dbt_profile_data):
dbt.task.debug.DebugTask.validate_connection(
dbt_profile_data["test"]["outputs"]["default"]
)
class TestValidateConnection(BaseValidateConnection):
pass

View File

@@ -1,117 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
model_sql = """
{{
config(
materialized='table'
)
}}
select 1 as id
"""
another_schema_model_sql = """
{{
config(
materialized='table',
schema='another_schema'
)
}}
select 1 as id
"""
class BaseCachingTest:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"config-version": 2,
"quoting": {
"identifier": False,
"schema": False,
},
}
def run_and_inspect_cache(self, project, run_args=None):
run_dbt(run_args)
# the cache was empty at the start of the run.
# the model materialization returned an unquoted relation and added to the cache.
adapter = project.adapter
assert len(adapter.cache.relations) == 1
relation = list(adapter.cache.relations).pop()
assert relation.schema == project.test_schema
assert relation.schema == project.test_schema.lower()
# on the second run, dbt will find a relation in the database during cache population.
# this relation will be quoted, because list_relations_without_caching (by default) uses
# quote_policy = {"database": True, "schema": True, "identifier": True}
# when adding relations to the cache.
run_dbt(run_args)
adapter = project.adapter
assert len(adapter.cache.relations) == 1
second_relation = list(adapter.cache.relations).pop()
# perform a case-insensitive + quote-insensitive comparison
for key in ["database", "schema", "identifier"]:
assert getattr(relation, key).lower() == getattr(second_relation, key).lower()
def test_cache(self, project):
self.run_and_inspect_cache(project, run_args=["run"])
class BaseCachingLowercaseModel(BaseCachingTest):
@pytest.fixture(scope="class")
def models(self):
return {
"model.sql": model_sql,
}
class BaseCachingUppercaseModel(BaseCachingTest):
@pytest.fixture(scope="class")
def models(self):
return {
"MODEL.sql": model_sql,
}
class BaseCachingSelectedSchemaOnly(BaseCachingTest):
@pytest.fixture(scope="class")
def models(self):
return {
"model.sql": model_sql,
"another_schema_model.sql": another_schema_model_sql,
}
def test_cache(self, project):
# this should only cache the schema containing the selected model
run_args = ["--cache-selected-only", "run", "--select", "model"]
self.run_and_inspect_cache(project, run_args)
class TestNoPopulateCache(BaseCachingTest):
@pytest.fixture(scope="class")
def models(self):
return {
"model.sql": model_sql,
}
def test_cache(self, project):
# --no-populate-cache still allows the cache to populate all relations
# under a schema, so the behavior here remains the same as other tests
run_args = ["--no-populate-cache", "run"]
self.run_and_inspect_cache(project, run_args)
class TestCachingLowerCaseModel(BaseCachingLowercaseModel):
pass
class TestCachingUppercaseModel(BaseCachingUppercaseModel):
pass
class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly):
pass

View File

@@ -1,33 +0,0 @@
MY_SEED = """
id,value,record_valid_date
1,100,2023-01-01 00:00:00
2,200,2023-01-02 00:00:00
3,300,2023-01-02 00:00:00
""".strip()
MY_TABLE = """
{{ config(
materialized='table',
) }}
select *
from {{ ref('my_seed') }}
"""
MY_VIEW = """
{{ config(
materialized='view',
) }}
select *
from {{ ref('my_seed') }}
"""
MY_MATERIALIZED_VIEW = """
{{ config(
materialized='materialized_view',
) }}
select *
from {{ ref('my_seed') }}
"""

View File

@@ -1,84 +0,0 @@
import pytest
from dbt.artifacts.schemas.catalog import CatalogArtifact
from dbt.tests.util import run_dbt
from tests.functional.adapter.catalog import files
class CatalogRelationTypes:
"""
Many adapters can use this test as-is. However, if your adapter contains different
relation types or uses different strings to describe the node (e.g. 'table' instead of 'BASE TABLE'),
then you'll need to configure this test.
To configure this test, you'll most likely need to update either `models`
and/or `test_relation_types_populate_correctly`. For example, `dbt-snowflake`
supports dynamic tables and does not support materialized views. It's implementation
might look like this:
class TestCatalogRelationTypes:
@pytest.fixture(scope="class", autouse=True)
def models(self):
yield {
"my_table.sql": files.MY_TABLE,
"my_view.sql": files.MY_VIEW,
"my_dynamic_table.sql": files.MY_DYNAMIC_TABLE,
}
@pytest.mark.parametrize(
"node_name,relation_type",
[
("seed.test.my_seed", "BASE TABLE"),
("model.test.my_table", "BASE TABLE"),
("model.test.my_view", "VIEW"),
("model.test.my_dynamic_table", "DYNAMIC TABLE"),
],
)
def test_relation_types_populate_correctly(
self, docs: CatalogArtifact, node_name: str, relation_type: str
):
super().test_relation_types_populate_correctly(
docs, node_name, relation_type
)
Note that we're able to configure the test case using pytest parameterization
and call back to the original test. That way any updates to the test are incorporated
into your adapter.
"""
@pytest.fixture(scope="class", autouse=True)
def seeds(self):
return {"my_seed.csv": files.MY_SEED}
@pytest.fixture(scope="class", autouse=True)
def models(self):
yield {
"my_table.sql": files.MY_TABLE,
"my_view.sql": files.MY_VIEW,
"my_materialized_view.sql": files.MY_MATERIALIZED_VIEW,
}
@pytest.fixture(scope="class", autouse=True)
def docs(self, project):
run_dbt(["seed"])
run_dbt(["run"])
yield run_dbt(["docs", "generate"])
@pytest.mark.parametrize(
"node_name,relation_type",
[
("seed.test.my_seed", "BASE TABLE"),
("model.test.my_table", "BASE TABLE"),
("model.test.my_view", "VIEW"),
("model.test.my_materialized_view", "MATERIALIZED VIEW"),
],
)
def test_relation_types_populate_correctly(
self, docs: CatalogArtifact, node_name: str, relation_type: str
):
"""
This test addresses: https://github.com/dbt-labs/dbt-core/issues/8864
"""
assert node_name in docs.nodes
node = docs.nodes[node_name]
assert node.metadata.type == relation_type

View File

@@ -1,113 +0,0 @@
# macros
macro_test_alter_column_type = """
-- Macro to alter a column type
{% macro test_alter_column_type(model_name, column_name, new_column_type) %}
{% set relation = ref(model_name) %}
{{ alter_column_type(relation, column_name, new_column_type) }}
{% endmacro %}
"""
macro_test_is_type_sql = """
{% macro simple_type_check_column(column, check) %}
{% if check == 'string' %}
{{ return(column.is_string()) }}
{% elif check == 'float' %}
{{ return(column.is_float()) }}
{% elif check == 'number' %}
{{ return(column.is_number()) }}
{% elif check == 'numeric' %}
{{ return(column.is_numeric()) }}
{% elif check == 'integer' %}
{{ return(column.is_integer()) }}
{% else %}
{% do exceptions.raise_compiler_error('invalid type check value: ' ~ check) %}
{% endif %}
{% endmacro %}
{% macro type_check_column(column, type_checks) %}
{% set failures = [] %}
{% for type_check in type_checks %}
{% if type_check.startswith('not ') %}
{% if simple_type_check_column(column, type_check[4:]) %}
{% do log('simple_type_check_column got ', True) %}
{% do failures.append(type_check) %}
{% endif %}
{% else %}
{% if not simple_type_check_column(column, type_check) %}
{% do failures.append(type_check) %}
{% endif %}
{% endif %}
{% endfor %}
{% if (failures | length) > 0 %}
{% do log('column ' ~ column.name ~ ' had failures: ' ~ failures, info=True) %}
{% endif %}
{% do return((failures | length) == 0) %}
{% endmacro %}
{% test is_type(model, column_map) %}
{% if not execute %}
{{ return(None) }}
{% endif %}
{% if not column_map %}
{% do exceptions.raise_compiler_error('test_is_type must have a column name') %}
{% endif %}
{% set columns = adapter.get_columns_in_relation(model) %}
{% if (column_map | length) != (columns | length) %}
{% set column_map_keys = (column_map | list | string) %}
{% set column_names = (columns | map(attribute='name') | list | string) %}
{% do exceptions.raise_compiler_error('did not get all the columns/all columns not specified:\n' ~ column_map_keys ~ '\nvs\n' ~ column_names) %}
{% endif %}
{% set bad_columns = [] %}
{% for column in columns %}
{% set column_key = (column.name | lower) %}
{% if column_key in column_map %}
{% set type_checks = column_map[column_key] %}
{% if not type_checks %}
{% do exceptions.raise_compiler_error('no type checks?') %}
{% endif %}
{% if not type_check_column(column, type_checks) %}
{% do bad_columns.append(column.name) %}
{% endif %}
{% else %}
{% do exceptions.raise_compiler_error('column key ' ~ column_key ~ ' not found in ' ~ (column_map | list | string)) %}
{% endif %}
{% endfor %}
{% do log('bad columns: ' ~ bad_columns, info=True) %}
{% for bad_column in bad_columns %}
select '{{ bad_column }}' as bad_column
{{ 'union all' if not loop.last }}
{% endfor %}
select * from (select 1 limit 0) as nothing
{% endtest %}
"""
# models/schema
model_sql = """
select
1::smallint as smallint_col,
2::integer as int_col,
3::bigint as bigint_col,
4.0::real as real_col,
5.0::double precision as double_col,
6.0::numeric as numeric_col,
'7'::text as text_col,
'8'::varchar(20) as varchar_col
"""
schema_yml = """
version: 2
models:
- name: model
data_tests:
- is_type:
column_map:
smallint_col: ['integer', 'number']
int_col: ['integer', 'number']
bigint_col: ['integer', 'number']
real_col: ['float', 'number']
double_col: ['float', 'number']
numeric_col: ['numeric', 'number']
text_col: ['string', 'not number']
varchar_col: ['string', 'not number']
"""

View File

@@ -1,29 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
from tests.functional.adapter.column_types.fixtures import (
macro_test_is_type_sql,
model_sql,
schema_yml,
)
class BaseColumnTypes:
@pytest.fixture(scope="class")
def macros(self):
return {"test_is_type.sql": macro_test_is_type_sql}
def run_and_test(self):
results = run_dbt(["run"])
assert len(results) == 1
results = run_dbt(["test"])
assert len(results) == 1
class TestPostgresColumnTypes(BaseColumnTypes):
@pytest.fixture(scope="class")
def models(self):
return {"model.sql": model_sql, "schema.yml": schema_yml}
def test_run_and_test(self, project):
self.run_and_test()

View File

@@ -1,328 +0,0 @@
import pytest
from dbt.tests.util import (
check_relations_equal,
check_table_does_not_exist,
rm_file,
run_dbt,
run_dbt_and_capture,
write_file,
)
models__invalid_sql = """
{{
config(
materialized = "table"
)
}}
select a_field_that_does_not_exist from {{ this.schema }}.seed
"""
models__table_a_sql = """
{{
config(
materialized = "table"
)
}}
select * from {{ this.schema }}.seed
"""
models__table_b_sql = """
{{
config(
materialized = "table"
)
}}
select * from {{ this.schema }}.seed
"""
models__view_model_sql = """
{{
config(
materialized = "view"
)
}}
select * from {{ this.schema }}.seed
"""
models__dep_sql = """
{{
config(
materialized = "table"
)
}}
select * from {{ref('view_model')}}
"""
models__view_with_conflicting_cascade_sql = """
select * from {{ref('table_a')}}
union all
select * from {{ref('table_b')}}
"""
models__skip_sql = """
select * from {{ref('invalid')}}
"""
seeds__seed_csv = """id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67
12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193
13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5
14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250
15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245
16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54
17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96
18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72
19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174
20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25
21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253
22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153
23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201
24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122
25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95
26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52
27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26
28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118
29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28
30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177
31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233
32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203
33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149
34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167
35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110
36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68
37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89
38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81
39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15
40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255
41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140
42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24
43,Sean,Mason,smason16@icq.com,Male,159.219.155.249
44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218
45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198
46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18
47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238
48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61
49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21
50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209
51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87
52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142
53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126
54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212
55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194
56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22
57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60
58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50
59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222
60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115
61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155
62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94
63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106
64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68
65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41
66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109
67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77
68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194
69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135
70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87
71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44
72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182
73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241
74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24
75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214
76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199
77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41
78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255
79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144
80,Rose,King,rking27@ucoz.com,Female,212.123.168.231
81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188
82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61
83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30
84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192
85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232
86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109
87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156
88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84
89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235
90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53
91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221
92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187
93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57
94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189
95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180
96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144
97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117
98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126
99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244
100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88
"""
seeds__update_csv = """id,first_name,last_name,email,gender,ip_address
1,Michael,Perez,mperez0@chronoengine.com,Male,106.239.70.175
2,Shawn,Mccoy,smccoy1@reddit.com,Male,24.165.76.182
3,Kathleen,Payne,kpayne2@cargocollective.com,Female,113.207.168.106
4,Jimmy,Cooper,jcooper3@cargocollective.com,Male,198.24.63.114
5,Katherine,Rice,krice4@typepad.com,Female,36.97.186.238
6,Sarah,Ryan,sryan5@gnu.org,Female,119.117.152.40
7,Martin,Mcdonald,mmcdonald6@opera.com,Male,8.76.38.115
8,Frank,Robinson,frobinson7@wunderground.com,Male,186.14.64.194
9,Jennifer,Franklin,jfranklin8@mail.ru,Female,91.216.3.131
10,Henry,Welch,hwelch9@list-manage.com,Male,176.35.182.168
11,Fred,Snyder,fsnydera@reddit.com,Male,217.106.196.54
12,Amy,Dunn,adunnb@nba.com,Female,95.39.163.195
13,Kathleen,Meyer,kmeyerc@cdc.gov,Female,164.142.188.214
14,Steve,Ferguson,sfergusond@reverbnation.com,Male,138.22.204.251
15,Teresa,Hill,thille@dion.ne.jp,Female,82.84.228.235
16,Amanda,Harper,aharperf@mail.ru,Female,16.123.56.176
17,Kimberly,Ray,krayg@xing.com,Female,48.66.48.12
18,Johnny,Knight,jknighth@jalbum.net,Male,99.30.138.123
19,Virginia,Freeman,vfreemani@tiny.cc,Female,225.172.182.63
20,Anna,Austin,aaustinj@diigo.com,Female,62.111.227.148
21,Willie,Hill,whillk@mail.ru,Male,0.86.232.249
22,Sean,Harris,sharrisl@zdnet.com,Male,117.165.133.249
23,Mildred,Adams,madamsm@usatoday.com,Female,163.44.97.46
24,David,Graham,dgrahamn@zimbio.com,Male,78.13.246.202
25,Victor,Hunter,vhuntero@ehow.com,Male,64.156.179.139
26,Aaron,Ruiz,aruizp@weebly.com,Male,34.194.68.78
27,Benjamin,Brooks,bbrooksq@jalbum.net,Male,20.192.189.107
28,Lisa,Wilson,lwilsonr@japanpost.jp,Female,199.152.130.217
29,Benjamin,King,bkings@comsenz.com,Male,29.189.189.213
30,Christina,Williamson,cwilliamsont@boston.com,Female,194.101.52.60
31,Jane,Gonzalez,jgonzalezu@networksolutions.com,Female,109.119.12.87
32,Thomas,Owens,towensv@psu.edu,Male,84.168.213.153
33,Katherine,Moore,kmoorew@naver.com,Female,183.150.65.24
34,Jennifer,Stewart,jstewartx@yahoo.com,Female,38.41.244.58
35,Sara,Tucker,stuckery@topsy.com,Female,181.130.59.184
36,Harold,Ortiz,hortizz@vkontakte.ru,Male,198.231.63.137
37,Shirley,James,sjames10@yelp.com,Female,83.27.160.104
38,Dennis,Johnson,djohnson11@slate.com,Male,183.178.246.101
39,Louise,Weaver,lweaver12@china.com.cn,Female,1.14.110.18
40,Maria,Armstrong,marmstrong13@prweb.com,Female,181.142.1.249
41,Gloria,Cruz,gcruz14@odnoklassniki.ru,Female,178.232.140.243
42,Diana,Spencer,dspencer15@ifeng.com,Female,125.153.138.244
43,Kelly,Nguyen,knguyen16@altervista.org,Female,170.13.201.119
44,Jane,Rodriguez,jrodriguez17@biblegateway.com,Female,12.102.249.81
45,Scott,Brown,sbrown18@geocities.jp,Male,108.174.99.192
46,Norma,Cruz,ncruz19@si.edu,Female,201.112.156.197
47,Marie,Peters,mpeters1a@mlb.com,Female,231.121.197.144
48,Lillian,Carr,lcarr1b@typepad.com,Female,206.179.164.163
49,Judy,Nichols,jnichols1c@t-online.de,Female,158.190.209.194
50,Billy,Long,blong1d@yahoo.com,Male,175.20.23.160
51,Howard,Reid,hreid1e@exblog.jp,Male,118.99.196.20
52,Laura,Ferguson,lferguson1f@tuttocitta.it,Female,22.77.87.110
53,Anne,Bailey,abailey1g@geocities.com,Female,58.144.159.245
54,Rose,Morgan,rmorgan1h@ehow.com,Female,118.127.97.4
55,Nicholas,Reyes,nreyes1i@google.ru,Male,50.135.10.252
56,Joshua,Kennedy,jkennedy1j@house.gov,Male,154.6.163.209
57,Paul,Watkins,pwatkins1k@upenn.edu,Male,177.236.120.87
58,Kathryn,Kelly,kkelly1l@businessweek.com,Female,70.28.61.86
59,Adam,Armstrong,aarmstrong1m@techcrunch.com,Male,133.235.24.202
60,Norma,Wallace,nwallace1n@phoca.cz,Female,241.119.227.128
61,Timothy,Reyes,treyes1o@google.cn,Male,86.28.23.26
62,Elizabeth,Patterson,epatterson1p@sun.com,Female,139.97.159.149
63,Edward,Gomez,egomez1q@google.fr,Male,158.103.108.255
64,David,Cox,dcox1r@friendfeed.com,Male,206.80.80.58
65,Brenda,Wood,bwood1s@over-blog.com,Female,217.207.44.179
66,Adam,Walker,awalker1t@blogs.com,Male,253.211.54.93
67,Michael,Hart,mhart1u@wix.com,Male,230.206.200.22
68,Jesse,Ellis,jellis1v@google.co.uk,Male,213.254.162.52
69,Janet,Powell,jpowell1w@un.org,Female,27.192.194.86
70,Helen,Ford,hford1x@creativecommons.org,Female,52.160.102.168
71,Gerald,Carpenter,gcarpenter1y@about.me,Male,36.30.194.218
72,Kathryn,Oliver,koliver1z@army.mil,Female,202.63.103.69
73,Alan,Berry,aberry20@gov.uk,Male,246.157.112.211
74,Harry,Andrews,handrews21@ameblo.jp,Male,195.108.0.12
75,Andrea,Hall,ahall22@hp.com,Female,149.162.163.28
76,Barbara,Wells,bwells23@behance.net,Female,224.70.72.1
77,Anne,Wells,awells24@apache.org,Female,180.168.81.153
78,Harry,Harper,hharper25@rediff.com,Male,151.87.130.21
79,Jack,Ray,jray26@wufoo.com,Male,220.109.38.178
80,Phillip,Hamilton,phamilton27@joomla.org,Male,166.40.47.30
81,Shirley,Hunter,shunter28@newsvine.com,Female,97.209.140.194
82,Arthur,Daniels,adaniels29@reuters.com,Male,5.40.240.86
83,Virginia,Rodriguez,vrodriguez2a@walmart.com,Female,96.80.164.184
84,Christina,Ryan,cryan2b@hibu.com,Female,56.35.5.52
85,Theresa,Mendoza,tmendoza2c@vinaora.com,Female,243.42.0.210
86,Jason,Cole,jcole2d@ycombinator.com,Male,198.248.39.129
87,Phillip,Bryant,pbryant2e@rediff.com,Male,140.39.116.251
88,Adam,Torres,atorres2f@sun.com,Male,101.75.187.135
89,Margaret,Johnston,mjohnston2g@ucsd.edu,Female,159.30.69.149
90,Paul,Payne,ppayne2h@hhs.gov,Male,199.234.140.220
91,Todd,Willis,twillis2i@businessweek.com,Male,191.59.136.214
92,Willie,Oliver,woliver2j@noaa.gov,Male,44.212.35.197
93,Frances,Robertson,frobertson2k@go.com,Female,31.117.65.136
94,Gregory,Hawkins,ghawkins2l@joomla.org,Male,91.3.22.49
95,Lisa,Perkins,lperkins2m@si.edu,Female,145.95.31.186
96,Jacqueline,Anderson,janderson2n@cargocollective.com,Female,14.176.0.187
97,Shirley,Diaz,sdiaz2o@ucla.edu,Female,207.12.95.46
98,Nicole,Meyer,nmeyer2p@flickr.com,Female,231.79.115.13
99,Mary,Gray,mgray2q@constantcontact.com,Female,210.116.64.253
100,Jean,Mcdonald,jmcdonald2r@baidu.com,Female,122.239.235.117
"""
class BaseConcurrency:
@pytest.fixture(scope="class")
def seeds(self):
return {"seed.csv": seeds__seed_csv}
@pytest.fixture(scope="class")
def models(self):
return {
"invalid.sql": models__invalid_sql,
"table_a.sql": models__table_a_sql,
"table_b.sql": models__table_b_sql,
"view_model.sql": models__view_model_sql,
"dep.sql": models__dep_sql,
"view_with_conflicting_cascade.sql": models__view_with_conflicting_cascade_sql,
"skip.sql": models__skip_sql,
}
class TestConcurrency(BaseConcurrency):
def test_concurrency(self, project):
run_dbt(["seed", "--select", "seed"])
results = run_dbt(["run"], expect_pass=False)
assert len(results) == 7
check_relations_equal(project.adapter, ["seed", "view_model"])
check_relations_equal(project.adapter, ["seed", "dep"])
check_relations_equal(project.adapter, ["seed", "table_a"])
check_relations_equal(project.adapter, ["seed", "table_b"])
check_table_does_not_exist(project.adapter, "invalid")
check_table_does_not_exist(project.adapter, "skip")
rm_file(project.project_root, "seeds", "seed.csv")
write_file(seeds__update_csv, project.project_root, "seeds", "seed.csv")
results, output = run_dbt_and_capture(["run"], expect_pass=False)
assert len(results) == 7
check_relations_equal(project.adapter, ["seed", "view_model"])
check_relations_equal(project.adapter, ["seed", "dep"])
check_relations_equal(project.adapter, ["seed", "table_a"])
check_relations_equal(project.adapter, ["seed", "table_b"])
check_table_does_not_exist(project.adapter, "invalid")
check_table_does_not_exist(project.adapter, "skip")

View File

@@ -1,585 +0,0 @@
# base mode definitions
my_model_sql = """
{{
config(
materialized = "table"
)
}}
select
1 as id,
'blue' as color,
'2019-01-01' as date_day
"""
foreign_key_model_sql = """
{{
config(
materialized = "table"
)
}}
select
1 as id
"""
my_model_view_sql = """
{{
config(
materialized = "view"
)
}}
select
1 as id,
'blue' as color,
'2019-01-01' as date_day
"""
my_incremental_model_sql = """
{{
config(
materialized = "incremental",
on_schema_change='append_new_columns'
)
}}
select
1 as id,
'blue' as color,
'2019-01-01' as date_day
"""
# model columns in a different order to schema definitions
my_model_wrong_order_sql = """
{{
config(
materialized = "table"
)
}}
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
"""
# force dependency on foreign_key_model so that foreign key constraint is enforceable
my_model_wrong_order_depends_on_fk_sql = """
{{
config(
materialized = "table"
)
}}
-- depends_on: {{ ref('foreign_key_model') }}
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
"""
my_model_view_wrong_order_sql = """
{{
config(
materialized = "view"
)
}}
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
"""
my_model_incremental_wrong_order_sql = """
{{
config(
materialized = "incremental",
on_schema_change='append_new_columns'
)
}}
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
"""
# force dependency on foreign_key_model so that foreign key constraint is enforceable
my_model_incremental_wrong_order_depends_on_fk_sql = """
{{
config(
materialized = "incremental",
on_schema_change='append_new_columns'
)
}}
-- depends_on: {{ ref('foreign_key_model') }}
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
"""
# model columns name different to schema definitions
my_model_wrong_name_sql = """
{{
config(
materialized = "table"
)
}}
select
'blue' as color,
1 as error,
'2019-01-01' as date_day
"""
my_model_view_wrong_name_sql = """
{{
config(
materialized = "view"
)
}}
select
'blue' as color,
1 as error,
'2019-01-01' as date_day
"""
my_model_incremental_wrong_name_sql = """
{{
config(
materialized = "incremental",
on_schema_change='append_new_columns'
)
}}
select
'blue' as color,
1 as error,
'2019-01-01' as date_day
"""
# model columns data types different to schema definitions
my_model_data_type_sql = """
{{{{
config(
materialized = "table"
)
}}}}
select
{sql_value} as wrong_data_type_column_name
"""
my_model_contract_sql_header_sql = """
{{
config(
materialized = "table"
)
}}
{% call set_sql_header(config) %}
set session time zone 'Asia/Kolkata';
{%- endcall %}
select current_setting('timezone') as column_name
"""
my_model_incremental_contract_sql_header_sql = """
{{
config(
materialized = "incremental",
on_schema_change="append_new_columns"
)
}}
{% call set_sql_header(config) %}
set session time zone 'Asia/Kolkata';
{%- endcall %}
select current_setting('timezone') as column_name
"""
# model breaking constraints
my_model_with_nulls_sql = """
{{
config(
materialized = "table"
)
}}
select
-- null value for 'id'
cast(null as {{ dbt.type_int() }}) as id,
-- change the color as well (to test rollback)
'red' as color,
'2019-01-01' as date_day
"""
my_model_view_with_nulls_sql = """
{{
config(
materialized = "view"
)
}}
select
-- null value for 'id'
cast(null as {{ dbt.type_int() }}) as id,
-- change the color as well (to test rollback)
'red' as color,
'2019-01-01' as date_day
"""
my_model_incremental_with_nulls_sql = """
{{
config(
materialized = "incremental",
on_schema_change='append_new_columns' )
}}
select
-- null value for 'id'
cast(null as {{ dbt.type_int() }}) as id,
-- change the color as well (to test rollback)
'red' as color,
'2019-01-01' as date_day
"""
# 'from' is a reserved word, so it must be quoted
my_model_with_quoted_column_name_sql = """
select
'blue' as {{ adapter.quote('from') }},
1 as id,
'2019-01-01' as date_day
"""
model_schema_yml = """
version: 2
models:
- name: my_model
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
- type: check
expression: id >= 1
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: my_model_error
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: my_model_wrong_order
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: my_model_wrong_name
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
"""
model_fk_constraint_schema_yml = """
version: 2
models:
- name: my_model
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
- type: check
expression: id >= 1
- type: foreign_key
expression: {schema}.foreign_key_model (id)
- type: unique
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: my_model_error
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: my_model_wrong_order
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: my_model_wrong_name
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
- type: primary_key
- type: check
expression: (id > 0)
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: foreign_key_model
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
constraints:
- type: unique
- type: primary_key
"""
constrained_model_schema_yml = """
version: 2
models:
- name: my_model
config:
contract:
enforced: true
constraints:
- type: check
expression: (id > 0)
- type: check
expression: id >= 1
- type: primary_key
columns: [ id ]
- type: unique
columns: [ color, date_day ]
name: strange_uniqueness_requirement
- type: foreign_key
columns: [ id ]
expression: {schema}.foreign_key_model (id)
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
data_tests:
- unique
- name: color
data_type: text
- name: date_day
data_type: text
- name: foreign_key_model
config:
contract:
enforced: true
columns:
- name: id
data_type: integer
constraints:
- type: unique
- type: primary_key
"""
model_data_type_schema_yml = """
version: 2
models:
- name: my_model_data_type
config:
contract:
enforced: true
columns:
- name: wrong_data_type_column_name
data_type: {data_type}
"""
model_quoted_column_schema_yml = """
version: 2
models:
- name: my_model
config:
contract:
enforced: true
materialized: table
constraints:
- type: check
# this one is the on the user
expression: ("from" = 'blue')
columns: [ '"from"' ]
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
data_tests:
- unique
- name: from # reserved word
quote: true
data_type: text
constraints:
- type: not_null
- name: date_day
data_type: text
"""
model_contract_header_schema_yml = """
version: 2
models:
- name: my_model_contract_sql_header
config:
contract:
enforced: true
columns:
- name: column_name
data_type: text
"""
create_table_macro_sql = """
{% macro create_table_macro() %}
create table if not exists numbers (n int not null primary key)
{% endmacro %}
"""
incremental_foreign_key_schema_yml = """
version: 2
models:
- name: raw_numbers
config:
contract:
enforced: true
materialized: table
columns:
- name: n
data_type: integer
constraints:
- type: primary_key
- type: not_null
- name: stg_numbers
config:
contract:
enforced: true
materialized: incremental
on_schema_change: append_new_columns
unique_key: n
columns:
- name: n
data_type: integer
constraints:
- type: foreign_key
expression: {schema}.raw_numbers (n)
"""
incremental_foreign_key_model_raw_numbers_sql = """
select 1 as n
"""
incremental_foreign_key_model_stg_numbers_sql = """
select * from {{ ref('raw_numbers') }}
"""

View File

@@ -1,564 +0,0 @@
import re
import pytest
from dbt.tests.util import (
get_manifest,
read_file,
relation_from_name,
run_dbt,
run_dbt_and_capture,
write_file,
)
from tests.functional.adapter.constraints.fixtures import (
constrained_model_schema_yml,
create_table_macro_sql,
foreign_key_model_sql,
incremental_foreign_key_model_raw_numbers_sql,
incremental_foreign_key_model_stg_numbers_sql,
incremental_foreign_key_schema_yml,
model_contract_header_schema_yml,
model_data_type_schema_yml,
model_fk_constraint_schema_yml,
model_quoted_column_schema_yml,
model_schema_yml,
my_incremental_model_sql,
my_model_contract_sql_header_sql,
my_model_data_type_sql,
my_model_incremental_contract_sql_header_sql,
my_model_incremental_with_nulls_sql,
my_model_incremental_wrong_name_sql,
my_model_incremental_wrong_order_depends_on_fk_sql,
my_model_incremental_wrong_order_sql,
my_model_sql,
my_model_view_wrong_name_sql,
my_model_view_wrong_order_sql,
my_model_with_nulls_sql,
my_model_with_quoted_column_name_sql,
my_model_wrong_name_sql,
my_model_wrong_order_depends_on_fk_sql,
my_model_wrong_order_sql,
)
class BaseConstraintsColumnsEqual:
"""
dbt should catch these mismatches during its "preflight" checks.
"""
@pytest.fixture
def string_type(self):
return "TEXT"
@pytest.fixture
def int_type(self):
return "INT"
@pytest.fixture
def schema_string_type(self, string_type):
return string_type
@pytest.fixture
def schema_int_type(self, int_type):
return int_type
@pytest.fixture
def data_types(self, schema_int_type, int_type, string_type):
# sql_column_value, schema_data_type, error_data_type
return [
["1", schema_int_type, int_type],
["'1'", string_type, string_type],
["true", "bool", "BOOL"],
["'2013-11-03 00:00:00-07'::timestamptz", "timestamptz", "DATETIMETZ"],
["'2013-11-03 00:00:00-07'::timestamp", "timestamp", "DATETIME"],
["ARRAY['a','b','c']", "text[]", "STRINGARRAY"],
["ARRAY[1,2,3]", "int[]", "INTEGERARRAY"],
["'1'::numeric", "numeric", "DECIMAL"],
["""'{"bar": "baz", "balance": 7.77, "active": false}'::json""", "json", "JSON"],
]
def test__constraints_wrong_column_order(self, project):
# This no longer causes an error, since we enforce yaml column order
run_dbt(["run", "-s", "my_model_wrong_order"], expect_pass=True)
manifest = get_manifest(project.project_root)
model_id = "model.test.my_model_wrong_order"
my_model_config = manifest.nodes[model_id].config
contract_actual_config = my_model_config.contract
assert contract_actual_config.enforced is True
def test__constraints_wrong_column_names(self, project, string_type, int_type):
_, log_output = run_dbt_and_capture(
["run", "-s", "my_model_wrong_name"], expect_pass=False
)
manifest = get_manifest(project.project_root)
model_id = "model.test.my_model_wrong_name"
my_model_config = manifest.nodes[model_id].config
contract_actual_config = my_model_config.contract
assert contract_actual_config.enforced is True
expected = ["id", "error", "missing in definition", "missing in contract"]
assert all([(exp in log_output or exp.upper() in log_output) for exp in expected])
def test__constraints_wrong_column_data_types(
self, project, string_type, int_type, schema_string_type, schema_int_type, data_types
):
for sql_column_value, schema_data_type, error_data_type in data_types:
# Write parametrized data_type to sql file
write_file(
my_model_data_type_sql.format(sql_value=sql_column_value),
"models",
"my_model_data_type.sql",
)
# Write wrong data_type to corresponding schema file
# Write integer type for all schema yaml values except when testing integer type itself
wrong_schema_data_type = (
schema_int_type
if schema_data_type.upper() != schema_int_type.upper()
else schema_string_type
)
wrong_schema_error_data_type = (
int_type if schema_data_type.upper() != schema_int_type.upper() else string_type
)
write_file(
model_data_type_schema_yml.format(data_type=wrong_schema_data_type),
"models",
"constraints_schema.yml",
)
results, log_output = run_dbt_and_capture(
["run", "-s", "my_model_data_type"], expect_pass=False
)
manifest = get_manifest(project.project_root)
model_id = "model.test.my_model_data_type"
my_model_config = manifest.nodes[model_id].config
contract_actual_config = my_model_config.contract
assert contract_actual_config.enforced is True
expected = [
"wrong_data_type_column_name",
error_data_type,
wrong_schema_error_data_type,
"data type mismatch",
]
assert all([(exp in log_output or exp.upper() in log_output) for exp in expected])
def test__constraints_correct_column_data_types(self, project, data_types):
for sql_column_value, schema_data_type, _ in data_types:
# Write parametrized data_type to sql file
write_file(
my_model_data_type_sql.format(sql_value=sql_column_value),
"models",
"my_model_data_type.sql",
)
# Write correct data_type to corresponding schema file
write_file(
model_data_type_schema_yml.format(data_type=schema_data_type),
"models",
"constraints_schema.yml",
)
run_dbt(["run", "-s", "my_model_data_type"])
manifest = get_manifest(project.project_root)
model_id = "model.test.my_model_data_type"
my_model_config = manifest.nodes[model_id].config
contract_actual_config = my_model_config.contract
assert contract_actual_config.enforced is True
def _normalize_whitespace(input: str) -> str:
subbed = re.sub(r"\s+", " ", input)
return re.sub(r"\s?([\(\),])\s?", r"\1", subbed).lower().strip()
def _find_and_replace(sql, find, replace):
sql_tokens = sql.split()
for idx in [n for n, x in enumerate(sql_tokens) if find in x]:
sql_tokens[idx] = replace
return " ".join(sql_tokens)
class BaseConstraintsRuntimeDdlEnforcement:
"""
These constraints pass muster for dbt's preflight checks. Make sure they're
passed into the DDL statement. If they don't match up with the underlying data,
the data platform should raise an error at runtime.
"""
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_model_wrong_order_depends_on_fk_sql,
"foreign_key_model.sql": foreign_key_model_sql,
"constraints_schema.yml": model_fk_constraint_schema_yml,
}
@pytest.fixture(scope="class")
def expected_sql(self):
return """
create table <model_identifier> (
id integer not null primary key check ((id > 0)) check (id >= 1) references <foreign_key_model_identifier> (id) unique,
color text,
date_day text
) ;
insert into <model_identifier> (
id ,
color ,
date_day
)
(
select
id,
color,
date_day
from
(
-- depends_on: <foreign_key_model_identifier>
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
) as model_subq
);
"""
def test__constraints_ddl(self, project, expected_sql):
unformatted_constraint_schema_yml = read_file("models", "constraints_schema.yml")
write_file(
unformatted_constraint_schema_yml.format(schema=project.test_schema),
"models",
"constraints_schema.yml",
)
results = run_dbt(["run", "-s", "+my_model"])
# assert at least my_model was run - additional upstreams may or may not be provided to the test setup via models fixture
assert len(results) >= 1
# grab the sql and replace the model identifier to make it generic for all adapters
# the name is not what we're testing here anyways and varies based on materialization
# TODO: consider refactoring this to introspect logs instead
generated_sql = read_file("target", "run", "test", "models", "my_model.sql")
generated_sql_generic = _find_and_replace(generated_sql, "my_model", "<model_identifier>")
generated_sql_generic = _find_and_replace(
generated_sql_generic, "foreign_key_model", "<foreign_key_model_identifier>"
)
assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_generic)
class BaseConstraintsRollback:
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_model_sql,
"constraints_schema.yml": model_schema_yml,
}
@pytest.fixture(scope="class")
def null_model_sql(self):
return my_model_with_nulls_sql
@pytest.fixture(scope="class")
def expected_color(self):
return "blue"
@pytest.fixture(scope="class")
def expected_error_messages(self):
return ['null value in column "id"', "violates not-null constraint"]
def assert_expected_error_messages(self, error_message, expected_error_messages):
assert all(msg in error_message for msg in expected_error_messages)
def test__constraints_enforcement_rollback(
self, project, expected_color, expected_error_messages, null_model_sql
):
results = run_dbt(["run", "-s", "my_model"])
assert len(results) == 1
# Make a contract-breaking change to the model
write_file(null_model_sql, "models", "my_model.sql")
failing_results = run_dbt(["run", "-s", "my_model"], expect_pass=False)
assert len(failing_results) == 1
# Verify the previous table still exists
relation = relation_from_name(project.adapter, "my_model")
old_model_exists_sql = f"select * from {relation}"
old_model_exists = project.run_sql(old_model_exists_sql, fetch="all")
assert len(old_model_exists) == 1
assert old_model_exists[0][1] == expected_color
# Confirm this model was contracted
# TODO: is this step really necessary?
manifest = get_manifest(project.project_root)
model_id = "model.test.my_model"
my_model_config = manifest.nodes[model_id].config
contract_actual_config = my_model_config.contract
assert contract_actual_config.enforced is True
# Its result includes the expected error messages
self.assert_expected_error_messages(failing_results[0].message, expected_error_messages)
class BaseTableConstraintsColumnsEqual(BaseConstraintsColumnsEqual):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_wrong_order.sql": my_model_wrong_order_sql,
"my_model_wrong_name.sql": my_model_wrong_name_sql,
"constraints_schema.yml": model_schema_yml,
}
class BaseViewConstraintsColumnsEqual(BaseConstraintsColumnsEqual):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_wrong_order.sql": my_model_view_wrong_order_sql,
"my_model_wrong_name.sql": my_model_view_wrong_name_sql,
"constraints_schema.yml": model_schema_yml,
}
class BaseIncrementalConstraintsColumnsEqual(BaseConstraintsColumnsEqual):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_wrong_order.sql": my_model_incremental_wrong_order_sql,
"my_model_wrong_name.sql": my_model_incremental_wrong_name_sql,
"constraints_schema.yml": model_schema_yml,
}
class BaseIncrementalConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_model_incremental_wrong_order_depends_on_fk_sql,
"foreign_key_model.sql": foreign_key_model_sql,
"constraints_schema.yml": model_fk_constraint_schema_yml,
}
class BaseIncrementalConstraintsRollback(BaseConstraintsRollback):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_incremental_model_sql,
"constraints_schema.yml": model_schema_yml,
}
@pytest.fixture(scope="class")
def null_model_sql(self):
return my_model_incremental_with_nulls_sql
class TestTableConstraintsColumnsEqual(BaseTableConstraintsColumnsEqual):
pass
class TestViewConstraintsColumnsEqual(BaseViewConstraintsColumnsEqual):
pass
class TestIncrementalConstraintsColumnsEqual(BaseIncrementalConstraintsColumnsEqual):
pass
class TestTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement):
pass
class TestTableConstraintsRollback(BaseConstraintsRollback):
pass
class TestIncrementalConstraintsRuntimeDdlEnforcement(
BaseIncrementalConstraintsRuntimeDdlEnforcement
):
pass
class TestIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback):
pass
class BaseContractSqlHeader:
"""Tests a contracted model with a sql header dependency."""
def test__contract_sql_header(self, project):
run_dbt(["run", "-s", "my_model_contract_sql_header"])
manifest = get_manifest(project.project_root)
model_id = "model.test.my_model_contract_sql_header"
model_config = manifest.nodes[model_id].config
assert model_config.contract.enforced
class BaseTableContractSqlHeader(BaseContractSqlHeader):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_contract_sql_header.sql": my_model_contract_sql_header_sql,
"constraints_schema.yml": model_contract_header_schema_yml,
}
class BaseIncrementalContractSqlHeader(BaseContractSqlHeader):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_contract_sql_header.sql": my_model_incremental_contract_sql_header_sql,
"constraints_schema.yml": model_contract_header_schema_yml,
}
class TestTableContractSqlHeader(BaseTableContractSqlHeader):
pass
class TestIncrementalContractSqlHeader(BaseIncrementalContractSqlHeader):
pass
class BaseModelConstraintsRuntimeEnforcement:
"""
These model-level constraints pass muster for dbt's preflight checks. Make sure they're
passed into the DDL statement. If they don't match up with the underlying data,
the data platform should raise an error at runtime.
"""
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_model_wrong_order_depends_on_fk_sql,
"foreign_key_model.sql": foreign_key_model_sql,
"constraints_schema.yml": constrained_model_schema_yml,
}
@pytest.fixture(scope="class")
def expected_sql(self):
return """
create table <model_identifier> (
id integer not null,
color text,
date_day text,
check ((id > 0)),
check (id >= 1),
primary key (id),
constraint strange_uniqueness_requirement unique (color, date_day),
foreign key (id) references <foreign_key_model_identifier> (id)
) ;
insert into <model_identifier> (
id ,
color ,
date_day
)
(
select
id,
color,
date_day
from
(
-- depends_on: <foreign_key_model_identifier>
select
'blue' as color,
1 as id,
'2019-01-01' as date_day
) as model_subq
);
"""
def test__model_constraints_ddl(self, project, expected_sql):
unformatted_constraint_schema_yml = read_file("models", "constraints_schema.yml")
write_file(
unformatted_constraint_schema_yml.format(schema=project.test_schema),
"models",
"constraints_schema.yml",
)
results = run_dbt(["run", "-s", "+my_model"])
# assert at least my_model was run - additional upstreams may or may not be provided to the test setup via models fixture
assert len(results) >= 1
generated_sql = read_file("target", "run", "test", "models", "my_model.sql")
generated_sql_generic = _find_and_replace(generated_sql, "my_model", "<model_identifier>")
generated_sql_generic = _find_and_replace(
generated_sql_generic, "foreign_key_model", "<foreign_key_model_identifier>"
)
assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_generic)
class TestModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement):
pass
class BaseConstraintQuotedColumn(BaseConstraintsRuntimeDdlEnforcement):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_model_with_quoted_column_name_sql,
"constraints_schema.yml": model_quoted_column_schema_yml,
}
@pytest.fixture(scope="class")
def expected_sql(self):
return """
create table <model_identifier> (
id integer not null,
"from" text not null,
date_day text,
check (("from" = 'blue'))
) ;
insert into <model_identifier> (
id, "from", date_day
)
(
select id, "from", date_day
from (
select
'blue' as "from",
1 as id,
'2019-01-01' as date_day
) as model_subq
);
"""
class TestConstraintQuotedColumn(BaseConstraintQuotedColumn):
pass
class TestIncrementalForeignKeyConstraint:
@pytest.fixture(scope="class")
def macros(self):
return {
"create_table.sql": create_table_macro_sql,
}
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": incremental_foreign_key_schema_yml,
"raw_numbers.sql": incremental_foreign_key_model_raw_numbers_sql,
"stg_numbers.sql": incremental_foreign_key_model_stg_numbers_sql,
}
def test_incremental_foreign_key_constraint(self, project):
unformatted_constraint_schema_yml = read_file("models", "schema.yml")
write_file(
unformatted_constraint_schema_yml.format(schema=project.test_schema),
"models",
"schema.yml",
)
run_dbt(["run", "--select", "raw_numbers"])
run_dbt(["run", "--select", "stg_numbers"])
run_dbt(["run", "--select", "stg_numbers"])

View File

@@ -1,106 +0,0 @@
seed_csv = """id,name
1,Alice
2,Bob
"""
table_model_sql = """
{{ config(materialized='table') }}
select * from {{ ref('ephemeral_model') }}
-- establish a macro dependency to trigger state:modified.macros
-- depends on: {{ my_macro() }}
"""
view_model_sql = """
select * from {{ ref('seed') }}
-- establish a macro dependency that trips infinite recursion if not handled
-- depends on: {{ my_infinitely_recursive_macro() }}
"""
ephemeral_model_sql = """
{{ config(materialized='ephemeral') }}
select * from {{ ref('view_model') }}
"""
exposures_yml = """
version: 2
exposures:
- name: my_exposure
type: application
depends_on:
- ref('view_model')
owner:
email: test@example.com
"""
schema_yml = """
version: 2
models:
- name: view_model
columns:
- name: id
data_tests:
- unique:
severity: error
- not_null
- name: name
"""
get_schema_name_sql = """
{% macro generate_schema_name(custom_schema_name, node) -%}
{%- set default_schema = target.schema -%}
{%- if custom_schema_name is not none -%}
{{ return(default_schema ~ '_' ~ custom_schema_name|trim) }}
-- put seeds into a separate schema in "prod", to verify that cloning in "dev" still works
{%- elif target.name == 'default' and node.resource_type == 'seed' -%}
{{ return(default_schema ~ '_' ~ 'seeds') }}
{%- else -%}
{{ return(default_schema) }}
{%- endif -%}
{%- endmacro %}
"""
snapshot_sql = """
{% snapshot my_cool_snapshot %}
{{
config(
target_database=database,
target_schema=schema,
unique_key='id',
strategy='check',
check_cols=['id'],
)
}}
select * from {{ ref('view_model') }}
{% endsnapshot %}
"""
macros_sql = """
{% macro my_macro() %}
{% do log('in a macro' ) %}
{% endmacro %}
"""
infinite_macros_sql = """
{# trigger infinite recursion if not handled #}
{% macro my_infinitely_recursive_macro() %}
{{ return(adapter.dispatch('my_infinitely_recursive_macro')()) }}
{% endmacro %}
{% macro default__my_infinitely_recursive_macro() %}
{% if unmet_condition %}
{{ my_infinitely_recursive_macro() }}
{% else %}
{{ return('') }}
{% endif %}
{% endmacro %}
"""
custom_can_clone_tables_false_macros_sql = """
{% macro can_clone_table() %}
{{ return(False) }}
{% endmacro %}
"""

View File

@@ -1,232 +0,0 @@
import os
import shutil
from collections import Counter
from copy import deepcopy
import pytest
from dbt.exceptions import DbtRuntimeError
from dbt.tests.util import run_dbt, run_dbt_and_capture
from tests.functional.adapter.dbt_clone.fixtures import (
custom_can_clone_tables_false_macros_sql,
ephemeral_model_sql,
exposures_yml,
get_schema_name_sql,
infinite_macros_sql,
macros_sql,
schema_yml,
seed_csv,
snapshot_sql,
table_model_sql,
view_model_sql,
)
class BaseClone:
@pytest.fixture(scope="class")
def models(self):
return {
"table_model.sql": table_model_sql,
"view_model.sql": view_model_sql,
"ephemeral_model.sql": ephemeral_model_sql,
"schema.yml": schema_yml,
"exposures.yml": exposures_yml,
}
@pytest.fixture(scope="class")
def macros(self):
return {
"macros.sql": macros_sql,
"infinite_macros.sql": infinite_macros_sql,
"get_schema_name.sql": get_schema_name_sql,
}
@pytest.fixture(scope="class")
def seeds(self):
return {
"seed.csv": seed_csv,
}
@pytest.fixture(scope="class")
def snapshots(self):
return {
"snapshot.sql": snapshot_sql,
}
@pytest.fixture(scope="class")
def other_schema(self, unique_schema):
return unique_schema + "_other"
@property
def project_config_update(self):
return {
"seeds": {
"test": {
"quote_columns": False,
}
}
}
@pytest.fixture(scope="class")
def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema):
outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)}
outputs["default"]["schema"] = unique_schema
outputs["otherschema"]["schema"] = other_schema
return {"test": {"outputs": outputs, "target": "default"}}
def copy_state(self, project_root):
state_path = os.path.join(project_root, "state")
if not os.path.exists(state_path):
os.makedirs(state_path)
shutil.copyfile(
f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json"
)
def run_and_save_state(self, project_root, with_snapshot=False):
results = run_dbt(["seed"])
assert len(results) == 1
results = run_dbt(["run"])
assert len(results) == 2
results = run_dbt(["test"])
assert len(results) == 2
if with_snapshot:
results = run_dbt(["snapshot"])
assert len(results) == 1
# copy files
self.copy_state(project_root)
# -- Below we define base classes for tests you import the one based on if your adapter uses dbt clone or not --
class BaseClonePossible(BaseClone):
def test_can_clone_true(self, project, unique_schema, other_schema):
project.create_test_schema(other_schema)
self.run_and_save_state(project.project_root, with_snapshot=True)
clone_args = [
"clone",
"--state",
"state",
"--target",
"otherschema",
]
results = run_dbt(clone_args)
assert len(results) == 4
schema_relations = project.adapter.list_relations(
database=project.database, schema=other_schema
)
types = [r.type for r in schema_relations]
count_types = Counter(types)
assert count_types == Counter({"table": 3, "view": 1})
# objects already exist, so this is a no-op
results = run_dbt(clone_args)
assert len(results) == 4
assert all("no-op" in r.message.lower() for r in results)
# recreate all objects
results = run_dbt([*clone_args, "--full-refresh"])
assert len(results) == 4
# select only models this time
results = run_dbt([*clone_args, "--resource-type", "model"])
assert len(results) == 2
assert all("no-op" in r.message.lower() for r in results)
def test_clone_no_state(self, project, unique_schema, other_schema):
project.create_test_schema(other_schema)
self.run_and_save_state(project.project_root, with_snapshot=True)
clone_args = [
"clone",
"--target",
"otherschema",
]
with pytest.raises(
DbtRuntimeError,
match="--state or --defer-state are required for deferral, but neither was provided",
):
run_dbt(clone_args)
class BaseCloneNotPossible(BaseClone):
@pytest.fixture(scope="class")
def macros(self):
return {
"macros.sql": macros_sql,
"my_can_clone_tables.sql": custom_can_clone_tables_false_macros_sql,
"infinite_macros.sql": infinite_macros_sql,
"get_schema_name.sql": get_schema_name_sql,
}
def test_can_clone_false(self, project, unique_schema, other_schema):
project.create_test_schema(other_schema)
self.run_and_save_state(project.project_root, with_snapshot=True)
clone_args = [
"clone",
"--state",
"state",
"--target",
"otherschema",
]
results = run_dbt(clone_args)
assert len(results) == 4
schema_relations = project.adapter.list_relations(
database=project.database, schema=other_schema
)
assert all(r.type == "view" for r in schema_relations)
# objects already exist, so this is a no-op
results = run_dbt(clone_args)
assert len(results) == 4
assert all("no-op" in r.message.lower() for r in results)
# recreate all objects
results = run_dbt([*clone_args, "--full-refresh"])
assert len(results) == 4
# select only models this time
results = run_dbt([*clone_args, "--resource-type", "model"])
assert len(results) == 2
assert all("no-op" in r.message.lower() for r in results)
class TestPostgresCloneNotPossible(BaseCloneNotPossible):
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=f"{project.test_schema}_seeds"
)
project.adapter.drop_schema(relation)
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
class TestCloneSameTargetAndState(BaseClone):
def test_clone_same_target_and_state(self, project, unique_schema, other_schema):
project.create_test_schema(other_schema)
self.run_and_save_state(project.project_root)
clone_args = [
"clone",
"--defer",
"--state",
"target",
]
results, output = run_dbt_and_capture(clone_args, expect_pass=False)
assert "Warning: The state and target directories are the same: 'target'" in output

View File

@@ -1,128 +0,0 @@
import os
import re
import pytest
import yaml
from dbt.cli.exceptions import DbtUsageException
from dbt.tests.util import run_dbt, run_dbt_and_capture
MODELS__MODEL_SQL = """
seled 1 as id
"""
class BaseDebug:
@pytest.fixture(scope="class")
def models(self):
return {"model.sql": MODELS__MODEL_SQL}
@pytest.fixture(autouse=True)
def capsys(self, capsys):
self.capsys = capsys
def assertGotValue(self, linepat, result):
found = False
output = self.capsys.readouterr().out
for line in output.split("\n"):
if linepat.match(line):
found = True
assert result in line
if not found:
with pytest.raises(Exception) as exc:
msg = f"linepat {linepat} not found in stdout: {output}"
assert msg in str(exc.value)
def check_project(self, splitout, msg="ERROR invalid"):
for line in splitout:
if line.strip().startswith("dbt_project.yml file"):
assert msg in line
elif line.strip().startswith("profiles.yml file"):
assert "ERROR invalid" not in line
class BaseDebugProfileVariable(BaseDebug):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"config-version": 2, "profile": '{{ "te" ~ "st" }}'}
class TestDebugPostgres(BaseDebug):
def test_ok(self, project):
run_dbt(["debug"])
assert "ERROR" not in self.capsys.readouterr().out
def test_connection_flag(self, project):
"""Testing that the --connection flag works as expected, including that output is not lost"""
_, out = run_dbt_and_capture(["debug", "--connection"])
assert "Skipping steps before connection verification" in out
_, out = run_dbt_and_capture(
["debug", "--connection", "--target", "NONE"], expect_pass=False
)
assert "1 check failed" in out
assert "The profile 'test' does not have a target named 'NONE'." in out
_, out = run_dbt_and_capture(
["debug", "--connection", "--profiles-dir", "NONE"], expect_pass=False
)
assert "Using profiles dir at NONE"
assert "1 check failed" in out
assert "dbt looked for a profiles.yml file in NONE" in out
def test_nopass(self, project):
run_dbt(["debug", "--target", "nopass"], expect_pass=False)
self.assertGotValue(re.compile(r"\s+profiles\.yml file"), "ERROR invalid")
def test_wronguser(self, project):
run_dbt(["debug", "--target", "wronguser"], expect_pass=False)
self.assertGotValue(re.compile(r"\s+Connection test"), "ERROR")
def test_empty_target(self, project):
run_dbt(["debug", "--target", "none_target"], expect_pass=False)
self.assertGotValue(re.compile(r"\s+output 'none_target'"), "misconfigured")
class TestDebugProfileVariablePostgres(BaseDebugProfileVariable):
pass
class TestDebugInvalidProjectPostgres(BaseDebug):
def test_empty_project(self, project):
with open("dbt_project.yml", "w") as f: # noqa: F841
pass
run_dbt(["debug", "--profile", "test"], expect_pass=False)
splitout = self.capsys.readouterr().out.split("\n")
self.check_project(splitout)
def test_badproject(self, project):
update_project = {"invalid-key": "not a valid key so this is bad project"}
with open("dbt_project.yml", "w") as f:
yaml.safe_dump(update_project, f)
run_dbt(["debug", "--profile", "test"], expect_pass=False)
splitout = self.capsys.readouterr().out.split("\n")
self.check_project(splitout)
def test_not_found_project(self, project):
with pytest.raises(DbtUsageException):
run_dbt(["debug", "--project-dir", "nopass"])
def test_invalid_project_outside_current_dir(self, project):
# create a dbt_project.yml
project_config = {"invalid-key": "not a valid key in this project"}
os.makedirs("custom", exist_ok=True)
with open("custom/dbt_project.yml", "w") as f:
yaml.safe_dump(project_config, f, default_flow_style=True)
run_dbt(["debug", "--project-dir", "custom"], expect_pass=False)
splitout = self.capsys.readouterr().out.split("\n")
self.check_project(splitout)
def test_profile_not_found(self, project):
_, out = run_dbt_and_capture(
["debug", "--connection", "--profile", "NONE"], expect_pass=False
)
assert "Profile loading failed for the following reason" in out
assert "Could not find profile named 'NONE'" in out

View File

@@ -1,36 +0,0 @@
models__sql_header = """
{% call set_sql_header(config) %}
with _variables as (
select 1 as my_variable
)
{%- endcall %}
select my_variable from _variables
"""
models__ephemeral_model = """
{{ config(materialized = 'ephemeral') }}
select
coalesce(sample_num, 0) + 10 as col_deci
from {{ ref('sample_model') }}
"""
models__second_ephemeral_model = """
{{ config(materialized = 'ephemeral') }}
select
col_deci + 100 as col_hundo
from {{ ref('ephemeral_model') }}
"""
models__sample_model = """
select * from {{ ref('sample_seed') }}
"""
seeds__sample_seed = """sample_num,sample_bool
1,true
2,false
3,true
4,false
5,true
6,false
7,true
"""

View File

@@ -1,61 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
from tests.functional.adapter.dbt_show.fixtures import (
models__ephemeral_model,
models__sample_model,
models__second_ephemeral_model,
models__sql_header,
seeds__sample_seed,
)
# -- Below we define base classes for tests you import based on if your adapter supports dbt show or not --
class BaseShowLimit:
@pytest.fixture(scope="class")
def models(self):
return {
"sample_model.sql": models__sample_model,
"ephemeral_model.sql": models__ephemeral_model,
}
@pytest.fixture(scope="class")
def seeds(self):
return {"sample_seed.csv": seeds__sample_seed}
@pytest.mark.parametrize(
"args,expected",
[
([], 5), # default limit
(["--limit", 3], 3), # fetch 3 rows
(["--limit", -1], 7), # fetch all rows
],
)
def test_limit(self, project, args, expected):
run_dbt(["build"])
dbt_args = ["show", "--inline", models__second_ephemeral_model, *args]
results = run_dbt(dbt_args)
assert len(results.results[0].agate_table) == expected
# ensure limit was injected in compiled_code when limit specified in command args
limit = results.args.get("limit")
if limit > 0:
assert f"limit {limit}" in results.results[0].node.compiled_code
class BaseShowSqlHeader:
@pytest.fixture(scope="class")
def models(self):
return {
"sql_header.sql": models__sql_header,
}
def test_sql_header(self, project):
run_dbt(["show", "--select", "sql_header", "--vars", "timezone: Asia/Kolkata"])
class TestPostgresShowSqlHeader(BaseShowSqlHeader):
pass
class TestPostgresShowLimit(BaseShowLimit):
pass

View File

@@ -1,331 +0,0 @@
import os
import re
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
models__dependent_sql = """
-- multiple ephemeral refs should share a cte
select * from {{ref('base')}} where gender = 'Male'
union all
select * from {{ref('base')}} where gender = 'Female'
"""
models__double_dependent_sql = """
-- base_copy just pulls from base. Make sure the listed
-- graph of CTEs all share the same dbt_cte__base cte
select * from {{ref('base')}} where gender = 'Male'
union all
select * from {{ref('base_copy')}} where gender = 'Female'
"""
models__super_dependent_sql = """
select * from {{ref('female_only')}}
union all
select * from {{ref('double_dependent')}} where gender = 'Male'
"""
models__base__female_only_sql = """
{{ config(materialized='ephemeral') }}
select * from {{ ref('base_copy') }} where gender = 'Female'
"""
models__base__base_sql = """
{{ config(materialized='ephemeral') }}
select * from {{ this.schema }}.seed
"""
models__base__base_copy_sql = """
{{ config(materialized='ephemeral') }}
select * from {{ ref('base') }}
"""
ephemeral_errors__dependent_sql = """
-- base copy is an error
select * from {{ref('base_copy')}} where gender = 'Male'
"""
ephemeral_errors__base__base_sql = """
{{ config(materialized='ephemeral') }}
select * from {{ this.schema }}.seed
"""
ephemeral_errors__base__base_copy_sql = """
{{ config(materialized='ephemeral') }}
{{ adapter.invalid_method() }}
select * from {{ ref('base') }}
"""
models_n__ephemeral_level_two_sql = """
{{
config(
materialized = "ephemeral",
)
}}
select * from {{ ref('source_table') }}
"""
models_n__root_view_sql = """
select * from {{ref("ephemeral")}}
"""
models_n__ephemeral_sql = """
{{
config(
materialized = "ephemeral",
)
}}
select * from {{ref("ephemeral_level_two")}}
"""
models_n__source_table_sql = """
{{ config(materialized='table') }}
with source_data as (
select 1 as id
union all
select null as id
)
select *
from source_data
"""
seeds__seed_csv = """id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67
12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193
13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5
14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250
15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245
16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54
17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96
18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72
19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174
20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25
21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253
22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153
23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201
24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122
25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95
26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52
27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26
28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118
29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28
30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177
31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233
32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203
33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149
34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167
35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110
36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68
37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89
38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81
39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15
40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255
41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140
42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24
43,Sean,Mason,smason16@icq.com,Male,159.219.155.249
44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218
45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198
46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18
47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238
48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61
49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21
50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209
51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87
52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142
53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126
54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212
55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194
56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22
57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60
58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50
59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222
60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115
61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155
62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94
63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106
64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68
65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41
66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109
67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77
68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194
69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135
70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87
71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44
72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182
73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241
74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24
75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214
76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199
77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41
78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255
79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144
80,Rose,King,rking27@ucoz.com,Female,212.123.168.231
81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188
82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61
83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30
84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192
85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232
86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109
87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156
88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84
89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235
90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53
91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221
92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187
93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57
94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189
95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180
96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144
97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117
98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126
99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244
100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88
"""
class BaseEphemeral:
@pytest.fixture(scope="class")
def seeds(self):
return {
"seed.csv": seeds__seed_csv,
}
class BaseEphemeralMulti:
@pytest.fixture(scope="class")
def seeds(self):
return {
"seed.csv": seeds__seed_csv,
}
@pytest.fixture(scope="class")
def models(self):
return {
"dependent.sql": models__dependent_sql,
"double_dependent.sql": models__double_dependent_sql,
"super_dependent.sql": models__super_dependent_sql,
"base": {
"female_only.sql": models__base__female_only_sql,
"base.sql": models__base__base_sql,
"base_copy.sql": models__base__base_copy_sql,
},
}
class TestEphemeralMulti(BaseEphemeralMulti):
def test_ephemeral_multi(self, project):
run_dbt(["seed"])
results = run_dbt(["run"])
assert len(results) == 3
check_relations_equal(project.adapter, ["seed", "dependent"])
check_relations_equal(project.adapter, ["seed", "double_dependent"])
check_relations_equal(project.adapter, ["seed", "super_dependent"])
assert os.path.exists("./target/run/test/models/double_dependent.sql")
with open("./target/run/test/models/double_dependent.sql", "r") as fp:
sql_file = fp.read()
sql_file = re.sub(r"\d+", "", sql_file)
expected_sql = (
'create view "dbt"."test_test_ephemeral"."double_dependent__dbt_tmp" as ('
"with __dbt__cte__base as ("
"select * from test_test_ephemeral.seed"
"), __dbt__cte__base_copy as ("
"select * from __dbt__cte__base"
")-- base_copy just pulls from base. Make sure the listed"
"-- graph of CTEs all share the same dbt_cte__base cte"
"select * from __dbt__cte__base where gender = 'Male'"
"union all"
"select * from __dbt__cte__base_copy where gender = 'Female'"
");"
)
sql_file = "".join(sql_file.split())
expected_sql = "".join(expected_sql.split())
assert sql_file == expected_sql
class TestEphemeralNested(BaseEphemeral):
@pytest.fixture(scope="class")
def models(self):
return {
"ephemeral_level_two.sql": models_n__ephemeral_level_two_sql,
"root_view.sql": models_n__root_view_sql,
"ephemeral.sql": models_n__ephemeral_sql,
"source_table.sql": models_n__source_table_sql,
}
def test_ephemeral_nested(self, project):
results = run_dbt(["run"])
assert len(results) == 2
assert os.path.exists("./target/run/test/models/root_view.sql")
with open("./target/run/test/models/root_view.sql", "r") as fp:
sql_file = fp.read()
sql_file = re.sub(r"\d+", "", sql_file)
expected_sql = (
'create view "dbt"."test_test_ephemeral"."root_view__dbt_tmp" as ('
"with __dbt__cte__ephemeral_level_two as ("
'select * from "dbt"."test_test_ephemeral"."source_table"'
"), __dbt__cte__ephemeral as ("
"select * from __dbt__cte__ephemeral_level_two"
")select * from __dbt__cte__ephemeral"
");"
)
sql_file = "".join(sql_file.split())
expected_sql = "".join(expected_sql.split())
assert sql_file == expected_sql
class TestEphemeralErrorHandling(BaseEphemeral):
@pytest.fixture(scope="class")
def models(self):
return {
"dependent.sql": ephemeral_errors__dependent_sql,
"base": {
"base.sql": ephemeral_errors__base__base_sql,
"base_copy.sql": ephemeral_errors__base__base_copy_sql,
},
}
def test_ephemeral_error_handling(self, project):
results = run_dbt(["run"], expect_pass=False)
assert len(results) == 1
assert results[0].status == "skipped"
assert "Compilation Error" in results[0].message

View File

@@ -1,17 +0,0 @@
drop table if exists {schema}.on_model_hook;
create table {schema}.on_model_hook (
test_state TEXT, -- start|end
target_dbname TEXT,
target_host TEXT,
target_name TEXT,
target_schema TEXT,
target_type TEXT,
target_user TEXT,
target_pass TEXT,
target_threads INTEGER,
run_started_at TEXT,
invocation_id TEXT,
thread_id TEXT
);

View File

@@ -1,17 +0,0 @@
drop table if exists {schema}.on_run_hook;
create table {schema}.on_run_hook (
test_state TEXT, -- start|end
target_dbname TEXT,
target_host TEXT,
target_name TEXT,
target_schema TEXT,
target_type TEXT,
target_user TEXT,
target_pass TEXT,
target_threads INTEGER,
run_started_at TEXT,
invocation_id TEXT,
thread_id TEXT
);

View File

@@ -1,382 +0,0 @@
macros_missing_column = """
{% macro export_table_check() %}
{% set table = 'test_column' %}
{% set query %}
SELECT column_name
FROM {{ref(table)}}
LIMIT 1
{% endset %}
{%- if flags.WHICH in ('run', 'build') -%}
{% set results = run_query(query) %}
{% if execute %}
{%- if results.rows -%}
{{ exceptions.raise_compiler_error("ON_RUN_START_CHECK_NOT_PASSED: Data already exported. DBT Run aborted.") }}
{% else -%}
{{ log("No data found in " ~ table ~ " for current day and runtime region. Proceeding...", true) }}
{%- endif -%}
{%- endif -%}
{%- endif -%}
{% endmacro %}
"""
models__missing_column = """
select 1 as col
"""
macros__before_and_after = """
{% macro custom_run_hook(state, target, run_started_at, invocation_id, table_name="on_run_hook") %}
insert into {{ target.schema }}.{{ table_name }} (
test_state,
target_dbname,
target_host,
target_name,
target_schema,
target_type,
target_user,
target_pass,
target_threads,
run_started_at,
invocation_id,
thread_id
) VALUES (
'{{ state }}',
'{{ target.dbname }}',
'{{ target.host }}',
'{{ target.name }}',
'{{ target.schema }}',
'{{ target.type }}',
'{{ target.user }}',
'{{ target.get("pass", "") }}',
{{ target.threads }},
'{{ run_started_at }}',
'{{ invocation_id }}',
'{{ thread_id }}'
)
{% endmacro %}
"""
macros__hook = """
{% macro hook() %}
select 1
{% endmacro %}
"""
models__hooks = """
select 1 as id
"""
models__hooks_configured = """
{{
config({
"pre_hook": "\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id
) VALUES (\
'start',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'\
)",
"post-hook": "\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id
) VALUES (\
'end',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'\
)"
})
}}
select 3 as id
"""
models__hooks_error = """
{{
config({
"pre_hook": "\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id
) VALUES (\
'start',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'
)",
"pre-hook": "\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id
) VALUES (\
'start',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'
)",
"post-hook": "\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id
) VALUES (\
'end',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'\
)"
})
}}
select 3 as id
"""
models__hooks_kwargs = """
{{
config(
pre_hook="\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id
) VALUES (\
'start',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'\
)",
post_hook="\
insert into {{this.schema}}.on_model_hook (\
test_state,\
target_dbname,\
target_host,\
target_name,\
target_schema,\
target_type,\
target_user,\
target_pass,\
target_threads,\
run_started_at,\
invocation_id,\
thread_id\
) VALUES (\
'end',\
'{{ target.dbname }}',\
'{{ target.host }}',\
'{{ target.name }}',\
'{{ target.schema }}',\
'{{ target.type }}',\
'{{ target.user }}',\
'{{ target.get(\\"pass\\", \\"\\") }}',\
{{ target.threads }},\
'{{ run_started_at }}',\
'{{ invocation_id }}',\
'{{ thread_id }}'\
)"
)
}}
select 3 as id
"""
models__hooked = """
{{
config({
"pre_hook": "\
insert into {{this.schema}}.on_model_hook select
test_state,
'{{ target.dbname }}' as target_dbname,\
'{{ target.host }}' as target_host,\
'{{ target.name }}' as target_name,\
'{{ target.schema }}' as target_schema,\
'{{ target.type }}' as target_type,\
'{{ target.user }}' as target_user,\
'{{ target.get(\\"pass\\", \\"\\") }}' as target_pass,\
{{ target.threads }} as target_threads,\
'{{ run_started_at }}' as run_started_at,\
'{{ invocation_id }}' as invocation_id,\
'{{ thread_id }}' as thread_id
from {{ ref('pre') }}\
"
})
}}
select 1 as id
"""
models__post = """
select 'end' as test_state
"""
models__pre = """
select 'start' as test_state
"""
snapshots__test_snapshot = """
{% snapshot example_snapshot %}
{{
config(target_schema=schema, unique_key='a', strategy='check', check_cols='all')
}}
select * from {{ ref('example_seed') }}
{% endsnapshot %}
"""
properties__seed_models = """
version: 2
seeds:
- name: example_seed
columns:
- name: new_col
data_tests:
- not_null
"""
properties__test_snapshot_models = """
version: 2
snapshots:
- name: example_snapshot
columns:
- name: new_col
data_tests:
- not_null
"""
properties__model_hooks = """
version: 2
models:
- name: hooks
config:
pre_hook: "{{ custom_run_hook('start', target, run_started_at, invocation_id, table_name='on_model_hook') }}"
post_hook: "{{ custom_run_hook('end', target, run_started_at, invocation_id, table_name='on_model_hook') }}"
"""
properties__model_hooks_list = """
version: 2
models:
- name: hooks
config:
pre_hook:
- "{{ custom_run_hook('start', target, run_started_at, invocation_id, table_name='on_model_hook') }}"
post_hook:
- "{{ custom_run_hook('end', target, run_started_at, invocation_id, table_name='on_model_hook') }}"
"""
seeds__example_seed_csv = """a,b,c
1,2,3
4,5,6
7,8,9
"""

View File

@@ -1,476 +0,0 @@
from pathlib import Path
import pytest
from dbt.exceptions import ParsingError
from dbt.tests.util import run_dbt, write_file
from dbt_common.exceptions import CompilationError
from tests.functional.adapter.hooks.fixtures import (
macros__before_and_after,
models__hooked,
models__hooks,
models__hooks_configured,
models__hooks_error,
models__hooks_kwargs,
models__post,
models__pre,
properties__model_hooks,
properties__model_hooks_list,
properties__seed_models,
properties__test_snapshot_models,
seeds__example_seed_csv,
snapshots__test_snapshot,
)
MODEL_PRE_HOOK = """
insert into {{this.schema}}.on_model_hook (
test_state,
target_dbname,
target_host,
target_name,
target_schema,
target_type,
target_user,
target_pass,
target_threads,
run_started_at,
invocation_id,
thread_id
) VALUES (
'start',
'{{ target.dbname }}',
'{{ target.host }}',
'{{ target.name }}',
'{{ target.schema }}',
'{{ target.type }}',
'{{ target.user }}',
'{{ target.get("pass", "") }}',
{{ target.threads }},
'{{ run_started_at }}',
'{{ invocation_id }}',
'{{ thread_id }}'
)
"""
MODEL_POST_HOOK = """
insert into {{this.schema}}.on_model_hook (
test_state,
target_dbname,
target_host,
target_name,
target_schema,
target_type,
target_user,
target_pass,
target_threads,
run_started_at,
invocation_id,
thread_id
) VALUES (
'end',
'{{ target.dbname }}',
'{{ target.host }}',
'{{ target.name }}',
'{{ target.schema }}',
'{{ target.type }}',
'{{ target.user }}',
'{{ target.get("pass", "") }}',
{{ target.threads }},
'{{ run_started_at }}',
'{{ invocation_id }}',
'{{ thread_id }}'
)
"""
class BaseTestPrePost(object):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
project.run_sql_file(project.test_data_dir / Path("seed_model.sql"))
def get_ctx_vars(self, state, count, project):
fields = [
"test_state",
"target_dbname",
"target_host",
"target_name",
"target_schema",
"target_threads",
"target_type",
"target_user",
"target_pass",
"run_started_at",
"invocation_id",
"thread_id",
]
field_list = ", ".join(['"{}"'.format(f) for f in fields])
query = f"select {field_list} from {project.test_schema}.on_model_hook where test_state = '{state}'"
vals = project.run_sql(query, fetch="all")
assert len(vals) != 0, "nothing inserted into hooks table"
assert len(vals) >= count, "too few rows in hooks table"
assert len(vals) <= count, "too many rows in hooks table"
return [{k: v for k, v in zip(fields, val)} for val in vals]
def check_hooks(self, state, project, host, count=1):
ctxs = self.get_ctx_vars(state, count=count, project=project)
for ctx in ctxs:
assert ctx["test_state"] == state
assert ctx["target_dbname"] == "dbt"
assert ctx["target_host"] == host
assert ctx["target_name"] == "default"
assert ctx["target_schema"] == project.test_schema
assert ctx["target_threads"] == 4
assert ctx["target_type"] == "postgres"
assert ctx["target_user"] == "root"
assert ctx["target_pass"] == ""
assert (
ctx["run_started_at"] is not None and len(ctx["run_started_at"]) > 0
), "run_started_at was not set"
assert (
ctx["invocation_id"] is not None and len(ctx["invocation_id"]) > 0
), "invocation_id was not set"
assert ctx["thread_id"].startswith("Thread-")
class TestPrePostModelHooks(BaseTestPrePost):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"pre-hook": [
# inside transaction (runs second)
MODEL_PRE_HOOK,
# outside transaction (runs first)
{"sql": "vacuum {{ this.schema }}.on_model_hook", "transaction": False},
],
"post-hook": [
# outside transaction (runs second)
{"sql": "vacuum {{ this.schema }}.on_model_hook", "transaction": False},
# inside transaction (runs first)
MODEL_POST_HOOK,
],
}
}
}
@pytest.fixture(scope="class")
def models(self):
return {"hooks.sql": models__hooks}
def test_pre_and_post_run_hooks(self, project, dbt_profile_target):
run_dbt()
self.check_hooks("start", project, dbt_profile_target.get("host", None))
self.check_hooks("end", project, dbt_profile_target.get("host", None))
class TestPrePostModelHooksUnderscores(TestPrePostModelHooks):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"pre_hook": [
# inside transaction (runs second)
MODEL_PRE_HOOK,
# outside transaction (runs first)
{"sql": "vacuum {{ this.schema }}.on_model_hook", "transaction": False},
],
"post_hook": [
# outside transaction (runs second)
{"sql": "vacuum {{ this.schema }}.on_model_hook", "transaction": False},
# inside transaction (runs first)
MODEL_POST_HOOK,
],
}
}
}
class TestHookRefs(BaseTestPrePost):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"hooked": {
"post-hook": [
"""
insert into {{this.schema}}.on_model_hook select
test_state,
'{{ target.dbname }}' as target_dbname,
'{{ target.host }}' as target_host,
'{{ target.name }}' as target_name,
'{{ target.schema }}' as target_schema,
'{{ target.type }}' as target_type,
'{{ target.user }}' as target_user,
'{{ target.get(pass, "") }}' as target_pass,
{{ target.threads }} as target_threads,
'{{ run_started_at }}' as run_started_at,
'{{ invocation_id }}' as invocation_id,
'{{ thread_id }}' as thread_id
from {{ ref('post') }}""".strip()
],
}
},
}
}
@pytest.fixture(scope="class")
def models(self):
return {"hooked.sql": models__hooked, "post.sql": models__post, "pre.sql": models__pre}
def test_pre_post_model_hooks_refed(self, project, dbt_profile_target):
run_dbt()
self.check_hooks("start", project, dbt_profile_target.get("host", None))
self.check_hooks("end", project, dbt_profile_target.get("host", None))
class TestPrePostModelHooksOnSeeds(object):
@pytest.fixture(scope="class")
def seeds(self):
return {"example_seed.csv": seeds__example_seed_csv}
@pytest.fixture(scope="class")
def models(self):
return {"schema.yml": properties__seed_models}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seed-paths": ["seeds"],
"models": {},
"seeds": {
"post-hook": [
"alter table {{ this }} add column new_col int",
"update {{ this }} set new_col = 1",
# call any macro to track dependency: https://github.com/dbt-labs/dbt-core/issues/6806
"select null::{{ dbt.type_int() }} as id",
],
"quote_columns": False,
},
}
def test_hooks_on_seeds(self, project):
res = run_dbt(["seed"])
assert len(res) == 1, "Expected exactly one item"
res = run_dbt(["test"])
assert len(res) == 1, "Expected exactly one item"
class TestPrePostModelHooksWithMacros(BaseTestPrePost):
@pytest.fixture(scope="class")
def macros(self):
return {"before-and-after.sql": macros__before_and_after}
@pytest.fixture(scope="class")
def models(self):
return {"schema.yml": properties__model_hooks, "hooks.sql": models__hooks}
def test_pre_and_post_run_hooks(self, project, dbt_profile_target):
run_dbt()
self.check_hooks("start", project, dbt_profile_target.get("host", None))
self.check_hooks("end", project, dbt_profile_target.get("host", None))
class TestPrePostModelHooksListWithMacros(TestPrePostModelHooksWithMacros):
@pytest.fixture(scope="class")
def models(self):
return {"schema.yml": properties__model_hooks_list, "hooks.sql": models__hooks}
class TestHooksRefsOnSeeds:
"""
This should not succeed, and raise an explicit error
https://github.com/dbt-labs/dbt-core/issues/6806
"""
@pytest.fixture(scope="class")
def seeds(self):
return {"example_seed.csv": seeds__example_seed_csv}
@pytest.fixture(scope="class")
def models(self):
return {"schema.yml": properties__seed_models, "post.sql": models__post}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"post-hook": [
"select * from {{ ref('post') }}",
],
},
}
def test_hook_with_ref_on_seeds(self, project):
with pytest.raises(ParsingError) as excinfo:
run_dbt(["parse"])
assert "Seeds cannot depend on other nodes" in str(excinfo.value)
class TestPrePostModelHooksOnSeedsPlusPrefixed(TestPrePostModelHooksOnSeeds):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seed-paths": ["seeds"],
"models": {},
"seeds": {
"+post-hook": [
"alter table {{ this }} add column new_col int",
"update {{ this }} set new_col = 1",
],
"quote_columns": False,
},
}
class TestPrePostModelHooksOnSeedsPlusPrefixedWhitespace(TestPrePostModelHooksOnSeeds):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seed-paths": ["seeds"],
"models": {},
"seeds": {
"+post-hook": [
"alter table {{ this }} add column new_col int",
"update {{ this }} set new_col = 1",
],
"quote_columns": False,
},
}
class TestPrePostModelHooksOnSnapshots(object):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
path = Path(project.project_root) / "test-snapshots"
Path.mkdir(path)
write_file(snapshots__test_snapshot, path, "snapshot.sql")
@pytest.fixture(scope="class")
def models(self):
return {"schema.yml": properties__test_snapshot_models}
@pytest.fixture(scope="class")
def seeds(self):
return {"example_seed.csv": seeds__example_seed_csv}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seed-paths": ["seeds"],
"snapshot-paths": ["test-snapshots"],
"models": {},
"snapshots": {
"post-hook": [
"alter table {{ this }} add column new_col int",
"update {{ this }} set new_col = 1",
]
},
"seeds": {
"quote_columns": False,
},
}
def test_hooks_on_snapshots(self, project):
res = run_dbt(["seed"])
assert len(res) == 1, "Expected exactly one item"
res = run_dbt(["snapshot"])
assert len(res) == 1, "Expected exactly one item"
res = run_dbt(["test"])
assert len(res) == 1, "Expected exactly one item"
class PrePostModelHooksInConfigSetup(BaseTestPrePost):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"macro-paths": ["macros"],
}
@pytest.fixture(scope="class")
def models(self):
return {"hooks.sql": models__hooks_configured}
class TestPrePostModelHooksInConfig(PrePostModelHooksInConfigSetup):
def test_pre_and_post_model_hooks_model(self, project, dbt_profile_target):
run_dbt()
self.check_hooks("start", project, dbt_profile_target.get("host", None))
self.check_hooks("end", project, dbt_profile_target.get("host", None))
class TestPrePostModelHooksInConfigWithCount(PrePostModelHooksInConfigSetup):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"pre-hook": [
# inside transaction (runs second)
MODEL_PRE_HOOK,
# outside transaction (runs first)
{"sql": "vacuum {{ this.schema }}.on_model_hook", "transaction": False},
],
"post-hook": [
# outside transaction (runs second)
{"sql": "vacuum {{ this.schema }}.on_model_hook", "transaction": False},
# inside transaction (runs first)
MODEL_POST_HOOK,
],
}
}
}
def test_pre_and_post_model_hooks_model_and_project(self, project, dbt_profile_target):
run_dbt()
self.check_hooks("start", project, dbt_profile_target.get("host", None), count=2)
self.check_hooks("end", project, dbt_profile_target.get("host", None), count=2)
class TestPrePostModelHooksInConfigKwargs(TestPrePostModelHooksInConfig):
@pytest.fixture(scope="class")
def models(self):
return {"hooks.sql": models__hooks_kwargs}
class TestPrePostSnapshotHooksInConfigKwargs(TestPrePostModelHooksOnSnapshots):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
path = Path(project.project_root) / "test-kwargs-snapshots"
Path.mkdir(path)
write_file(snapshots__test_snapshot, path, "snapshot.sql")
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seed-paths": ["seeds"],
"snapshot-paths": ["test-kwargs-snapshots"],
"models": {},
"snapshots": {
"post-hook": [
"alter table {{ this }} add column new_col int",
"update {{ this }} set new_col = 1",
]
},
"seeds": {
"quote_columns": False,
},
}
class TestDuplicateHooksInConfigs(object):
@pytest.fixture(scope="class")
def models(self):
return {"hooks.sql": models__hooks_error}
def test_run_duplicate_hook_defs(self, project):
with pytest.raises(CompilationError) as exc:
run_dbt()
assert "pre_hook" in str(exc.value)
assert "pre-hook" in str(exc.value)

View File

@@ -1,268 +0,0 @@
import pytest
from dbt.artifacts.schemas.results import RunStatus
from dbt.contracts.graph.nodes import HookNode
from dbt.tests.util import get_artifact, run_dbt, run_dbt_and_capture
from dbt_common.exceptions import CompilationError
class Test__StartHookFail__FlagIsNone__ModelFail:
@pytest.fixture(scope="class")
def flags(self):
return {}
@pytest.fixture(scope="class")
def project_config_update(self, flags):
return {
"on-run-start": [
"create table {{ target.schema }}.my_hook_table ( id int )", # success
"drop table {{ target.schema }}.my_hook_table", # success
"insert into {{ target.schema }}.my_hook_table (id) values (1, 2, 3)", # fail
"create table {{ target.schema }}.my_hook_table ( id int )", # skip
],
"flags": flags,
}
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": "select * from {{ target.schema }}.my_hook_table"
" union all "
"select * from {{ target.schema }}.my_end_table"
}
@pytest.fixture(scope="class")
def my_model_run_status(self):
return RunStatus.Error
def test_results(self, project, my_model_run_status):
results, log_output = run_dbt_and_capture(["run"], expect_pass=False)
expected_results = [
("operation.test.test-on-run-start-0", RunStatus.Success),
("operation.test.test-on-run-start-1", RunStatus.Success),
("operation.test.test-on-run-start-2", RunStatus.Error),
("operation.test.test-on-run-start-3", RunStatus.Skipped),
("model.test.my_model", my_model_run_status),
]
assert [(result.node.unique_id, result.status) for result in results] == expected_results
assert [
(result.node.unique_id, result.node.node_info["node_status"])
for result in results
if isinstance(result.node, HookNode)
] == [(id, str(status)) for id, status in expected_results if id.startswith("operation")]
for result in results:
if result.status == RunStatus.Skipped:
continue
timing_keys = [timing.name for timing in result.timing]
assert timing_keys == ["compile", "execute"]
assert "4 project hooks, 1 view model" in log_output
run_results = get_artifact(project.project_root, "target", "run_results.json")
assert [
(result["unique_id"], result["status"]) for result in run_results["results"]
] == expected_results
assert (
f'relation "{project.test_schema}.my_hook_table" does not exist'
in run_results["results"][2]["message"]
)
class Test__StartHookFail__FlagIsFalse__ModelFail(Test__StartHookFail__FlagIsNone__ModelFail):
@pytest.fixture(scope="class")
def flags(self):
return {"skip_nodes_if_on_run_start_fails": False}
class Test__StartHookFail__FlagIsTrue__ModelSkipped(Test__StartHookFail__FlagIsNone__ModelFail):
@pytest.fixture(scope="class")
def flags(self):
return {"skip_nodes_if_on_run_start_fails": True}
@pytest.fixture(scope="class")
def my_model_run_status(self):
return RunStatus.Skipped
class Test__ModelPass__EndHookFail:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"on-run-end": [
"create table {{ target.schema }}.my_hook_table ( id int )", # success
"drop table {{ target.schema }}.my_hook_table", # success
"insert into {{ target.schema }}.my_hook_table (id) values (1, 2, 3)", # fail
"create table {{ target.schema }}.my_hook_table ( id int )", # skip
],
}
@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": "select 1"}
def test_results(self, project):
results, log_output = run_dbt_and_capture(["run"], expect_pass=False)
expected_results = [
("model.test.my_model", RunStatus.Success),
("operation.test.test-on-run-end-0", RunStatus.Success),
("operation.test.test-on-run-end-1", RunStatus.Success),
("operation.test.test-on-run-end-2", RunStatus.Error),
("operation.test.test-on-run-end-3", RunStatus.Skipped),
]
assert [(result.node.unique_id, result.status) for result in results] == expected_results
assert "4 project hooks, 1 view model" in log_output
run_results = get_artifact(project.project_root, "target", "run_results.json")
assert [
(result["unique_id"], result["status"]) for result in run_results["results"]
] == expected_results
assert (
f'relation "{project.test_schema}.my_hook_table" does not exist'
in run_results["results"][3]["message"]
)
class Test__SelectorEmpty__NoHooksRan:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"on-run-start": [
"create table {{ target.schema }}.my_hook_table ( id int )", # success
"drop table {{ target.schema }}.my_hook_table", # success
],
"on-run-end": [
"create table {{ target.schema }}.my_hook_table ( id int )", # success
"drop table {{ target.schema }}.my_hook_table", # success
],
}
@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": "select 1"}
def test_results(self, project):
results, log_output = run_dbt_and_capture(
["--debug", "run", "--select", "tag:no_such_tag", "--log-format", "json"]
)
assert results.results == []
assert (
"The selection criterion 'tag:no_such_tag' does not match any enabled nodes"
in log_output
)
run_results = get_artifact(project.project_root, "target", "run_results.json")
assert run_results["results"] == []
class Test__HookContext__HookSuccess:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"on-run-start": [
"select 1 as id", # success
"select 1 as id", # success
],
"on-run-end": [
'{{ log("Num Results in context: " ~ results|length)}}'
"{{ output_thread_ids(results) }}",
],
}
@pytest.fixture(scope="class")
def macros(self):
return {
"log.sql": """
{% macro output_thread_ids(results) %}
{% for result in results %}
{{ log("Thread ID: " ~ result.thread_id) }}
{% endfor %}
{% endmacro %}
"""
}
@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": "select 1"}
def test_results_in_context_success(self, project):
results, log_output = run_dbt_and_capture(["--debug", "run"])
assert "Thread ID: " in log_output
assert "Thread ID: main" not in log_output
assert results[0].thread_id == "main" # hook still exists in run results
assert "Num Results in context: 1" in log_output # only model given hook was successful
class Test__HookContext__HookFail:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"on-run-start": [
"select a as id", # fail
],
"on-run-end": [
'{{ log("Num Results in context: " ~ results|length)}}'
"{{ output_thread_ids(results) }}",
],
}
@pytest.fixture(scope="class")
def macros(self):
return {
"log.sql": """
{% macro output_thread_ids(results) %}
{% for result in results %}
{{ log("Thread ID: " ~ result.thread_id) }}
{% endfor %}
{% endmacro %}
"""
}
@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": "select 1"}
def test_results_in_context_hook_fail(self, project):
results, log_output = run_dbt_and_capture(["--debug", "run"], expect_pass=False)
assert "Thread ID: main" in log_output
assert results[0].thread_id == "main"
assert "Num Results in context: 2" in log_output # failed hook and model
class Test__HookCompilationError:
@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": "select 1 as id"}
@pytest.fixture(scope="class")
def macros(self):
return {
"rce.sql": """
{% macro rce(relation) %}
{% if execute %}
{{ exceptions.raise_compiler_error("Always raise a compiler error in execute") }}
{% endif %}
{% endmacro %}
"""
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"on-run-end": ["{{ rce() }}"],
}
def test_results(self, project):
with pytest.raises(CompilationError, match="Always raise a compiler error in execute"):
run_dbt(["run"], expect_pass=False)
run_results = get_artifact(project.project_root, "target", "run_results.json")
assert [(result["unique_id"], result["status"]) for result in run_results["results"]] == [
("model.test.my_model", RunStatus.Success)
]

View File

@@ -1,305 +0,0 @@
#
# Models
#
_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY = """
{{
config(
materialized='incremental',
unique_key='id',
on_schema_change='sync_all_columns'
)
}}
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
{% set string_type = dbt.type_string() %}
{% if is_incremental() %}
SELECT id,
cast(field1 as {{string_type}}) as field1
FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )
{% else %}
select id,
cast(field1 as {{string_type}}) as field1,
cast(field2 as {{string_type}}) as field2
from source_data where id <= 3
{% endif %}
"""
_MODELS__INCREMENTAL_IGNORE = """
{{
config(
materialized='incremental',
unique_key='id',
on_schema_change='ignore'
)
}}
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
{% if is_incremental() %}
SELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )
{% else %}
SELECT id, field1, field2 FROM source_data LIMIT 3
{% endif %}
"""
_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET = """
{{
config(materialized='table')
}}
with source_data as (
select * from {{ ref('model_a') }}
)
{% set string_type = dbt.type_string() %}
select id
,cast(field1 as {{string_type}}) as field1
from source_data
order by id
"""
_MODELS__INCREMENTAL_IGNORE_TARGET = """
{{
config(materialized='table')
}}
with source_data as (
select * from {{ ref('model_a') }}
)
select id
,field1
,field2
from source_data
"""
_MODELS__INCREMENTAL_FAIL = """
{{
config(
materialized='incremental',
unique_key='id',
on_schema_change='fail'
)
}}
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
{% if is_incremental() %}
SELECT id, field1, field2 FROM source_data
{% else %}
SELECT id, field1, field3 FROm source_data
{% endif %}
"""
_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS = """
{{
config(
materialized='incremental',
unique_key='id',
on_schema_change='sync_all_columns'
)
}}
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
{% set string_type = dbt.type_string() %}
{% if is_incremental() %}
SELECT id,
cast(field1 as {{string_type}}) as field1,
cast(field3 as {{string_type}}) as field3, -- to validate new fields
cast(field4 as {{string_type}}) AS field4 -- to validate new fields
FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )
{% else %}
select id,
cast(field1 as {{string_type}}) as field1,
cast(field2 as {{string_type}}) as field2
from source_data where id <= 3
{% endif %}
"""
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE = """
{{
config(
materialized='incremental',
unique_key='id',
on_schema_change='append_new_columns'
)
}}
{% set string_type = dbt.type_string() %}
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
{% if is_incremental() %}
SELECT id,
cast(field1 as {{string_type}}) as field1,
cast(field3 as {{string_type}}) as field3,
cast(field4 as {{string_type}}) as field4
FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )
{% else %}
SELECT id,
cast(field1 as {{string_type}}) as field1,
cast(field2 as {{string_type}}) as field2
FROM source_data where id <= 3
{% endif %}
"""
_MODELS__A = """
{{
config(materialized='table')
}}
with source_data as (
select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4
union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4
union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4
union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4
union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4
union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4
)
select id
,field1
,field2
,field3
,field4
from source_data
"""
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET = """
{{
config(materialized='table')
}}
{% set string_type = dbt.type_string() %}
with source_data as (
select * from {{ ref('model_a') }}
)
select id
,cast(field1 as {{string_type}}) as field1
,cast(field2 as {{string_type}}) as field2
,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3
,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4
from source_data
"""
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS = """
{{
config(
materialized='incremental',
unique_key='id',
on_schema_change='append_new_columns'
)
}}
{% set string_type = dbt.type_string() %}
WITH source_data AS (SELECT * FROM {{ ref('model_a') }} )
{% if is_incremental() %}
SELECT id,
cast(field1 as {{string_type}}) as field1,
cast(field2 as {{string_type}}) as field2,
cast(field3 as {{string_type}}) as field3,
cast(field4 as {{string_type}}) as field4
FROM source_data WHERE id NOT IN (SELECT id from {{ this }} )
{% else %}
SELECT id,
cast(field1 as {{string_type}}) as field1,
cast(field2 as {{string_type}}) as field2
FROM source_data where id <= 3
{% endif %}
"""
_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET = """
{{
config(materialized='table')
}}
with source_data as (
select * from {{ ref('model_a') }}
)
{% set string_type = dbt.type_string() %}
select id
,cast(field1 as {{string_type}}) as field1
--,field2
,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3
,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4
from source_data
order by id
"""
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET = """
{{
config(materialized='table')
}}
{% set string_type = dbt.type_string() %}
with source_data as (
select * from {{ ref('model_a') }}
)
select id,
cast(field1 as {{string_type}}) as field1,
cast(CASE WHEN id > 3 THEN NULL ELSE field2 END as {{string_type}}) AS field2,
cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3,
cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4
from source_data
"""

View File

@@ -1,121 +0,0 @@
from collections import namedtuple
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
models__merge_exclude_columns_sql = """
{{ config(
materialized = 'incremental',
unique_key = 'id',
incremental_strategy='merge',
merge_exclude_columns=['msg']
) }}
{% if not is_incremental() %}
-- data for first invocation of model
select 1 as id, 'hello' as msg, 'blue' as color
union all
select 2 as id, 'goodbye' as msg, 'red' as color
{% else %}
-- data for subsequent incremental update
select 1 as id, 'hey' as msg, 'blue' as color
union all
select 2 as id, 'yo' as msg, 'green' as color
union all
select 3 as id, 'anyway' as msg, 'purple' as color
{% endif %}
"""
seeds__expected_merge_exclude_columns_csv = """id,msg,color
1,hello,blue
2,goodbye,green
3,anyway,purple
"""
ResultHolder = namedtuple(
"ResultHolder",
[
"seed_count",
"model_count",
"seed_rows",
"inc_test_model_count",
"relation",
],
)
class BaseMergeExcludeColumns:
@pytest.fixture(scope="class")
def models(self):
return {"merge_exclude_columns.sql": models__merge_exclude_columns_sql}
@pytest.fixture(scope="class")
def seeds(self):
return {"expected_merge_exclude_columns.csv": seeds__expected_merge_exclude_columns_csv}
def update_incremental_model(self, incremental_model):
"""update incremental model after the seed table has been updated"""
model_result_set = run_dbt(["run", "--select", incremental_model])
return len(model_result_set)
def get_test_fields(self, project, seed, incremental_model, update_sql_file):
seed_count = len(run_dbt(["seed", "--select", seed, "--full-refresh"]))
model_count = len(run_dbt(["run", "--select", incremental_model, "--full-refresh"]))
relation = incremental_model
# update seed in anticipation of incremental model update
row_count_query = "select * from {}.{}".format(project.test_schema, seed)
seed_rows = len(project.run_sql(row_count_query, fetch="all"))
# propagate seed state to incremental model according to unique keys
inc_test_model_count = self.update_incremental_model(incremental_model=incremental_model)
return ResultHolder(seed_count, model_count, seed_rows, inc_test_model_count, relation)
def check_scenario_correctness(self, expected_fields, test_case_fields, project):
"""Invoke assertions to verify correct build functionality"""
# 1. test seed(s) should build afresh
assert expected_fields.seed_count == test_case_fields.seed_count
# 2. test model(s) should build afresh
assert expected_fields.model_count == test_case_fields.model_count
# 3. seeds should have intended row counts post update
assert expected_fields.seed_rows == test_case_fields.seed_rows
# 4. incremental test model(s) should be updated
assert expected_fields.inc_test_model_count == test_case_fields.inc_test_model_count
# 5. result table should match intended result set (itself a relation)
check_relations_equal(
project.adapter, [expected_fields.relation, test_case_fields.relation]
)
def test__merge_exclude_columns(self, project):
"""seed should match model after two incremental runs"""
expected_fields = ResultHolder(
seed_count=1,
model_count=1,
inc_test_model_count=1,
seed_rows=3,
relation="expected_merge_exclude_columns",
)
test_case_fields = self.get_test_fields(
project,
seed="expected_merge_exclude_columns",
incremental_model="merge_exclude_columns",
update_sql_file=None,
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
class TestMergeExcludeColumns(BaseMergeExcludeColumns):
pass

View File

@@ -1,100 +0,0 @@
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
from tests.functional.adapter.incremental.fixtures import (
_MODELS__A,
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS,
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE,
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET,
_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET,
_MODELS__INCREMENTAL_FAIL,
_MODELS__INCREMENTAL_IGNORE,
_MODELS__INCREMENTAL_IGNORE_TARGET,
_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS,
_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET,
_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY,
_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET,
)
class BaseIncrementalOnSchemaChangeSetup:
@pytest.fixture(scope="class")
def models(self):
return {
"incremental_sync_remove_only.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY,
"incremental_ignore.sql": _MODELS__INCREMENTAL_IGNORE,
"incremental_sync_remove_only_target.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET,
"incremental_ignore_target.sql": _MODELS__INCREMENTAL_IGNORE_TARGET,
"incremental_fail.sql": _MODELS__INCREMENTAL_FAIL,
"incremental_sync_all_columns.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS,
"incremental_append_new_columns_remove_one.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE,
"model_a.sql": _MODELS__A,
"incremental_append_new_columns_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET,
"incremental_append_new_columns.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS,
"incremental_sync_all_columns_target.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET,
"incremental_append_new_columns_remove_one_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET,
}
def run_twice_and_assert(self, include, compare_source, compare_target, project):
# dbt run (twice)
run_args = ["run"]
if include:
run_args.extend(("--select", include))
results_one = run_dbt(run_args)
assert len(results_one) == 3
results_two = run_dbt(run_args)
assert len(results_two) == 3
check_relations_equal(project.adapter, [compare_source, compare_target])
def run_incremental_append_new_columns(self, project):
select = "model_a incremental_append_new_columns incremental_append_new_columns_target"
compare_source = "incremental_append_new_columns"
compare_target = "incremental_append_new_columns_target"
self.run_twice_and_assert(select, compare_source, compare_target, project)
def run_incremental_append_new_columns_remove_one(self, project):
select = "model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target"
compare_source = "incremental_append_new_columns_remove_one"
compare_target = "incremental_append_new_columns_remove_one_target"
self.run_twice_and_assert(select, compare_source, compare_target, project)
def run_incremental_sync_all_columns(self, project):
select = "model_a incremental_sync_all_columns incremental_sync_all_columns_target"
compare_source = "incremental_sync_all_columns"
compare_target = "incremental_sync_all_columns_target"
self.run_twice_and_assert(select, compare_source, compare_target, project)
def run_incremental_sync_remove_only(self, project):
select = "model_a incremental_sync_remove_only incremental_sync_remove_only_target"
compare_source = "incremental_sync_remove_only"
compare_target = "incremental_sync_remove_only_target"
self.run_twice_and_assert(select, compare_source, compare_target, project)
class BaseIncrementalOnSchemaChange(BaseIncrementalOnSchemaChangeSetup):
def test_run_incremental_ignore(self, project):
select = "model_a incremental_ignore incremental_ignore_target"
compare_source = "incremental_ignore"
compare_target = "incremental_ignore_target"
self.run_twice_and_assert(select, compare_source, compare_target, project)
def test_run_incremental_append_new_columns(self, project):
self.run_incremental_append_new_columns(project)
self.run_incremental_append_new_columns_remove_one(project)
def test_run_incremental_sync_all_columns(self, project):
self.run_incremental_sync_all_columns(project)
self.run_incremental_sync_remove_only(project)
def test_run_incremental_fail_on_schema_change(self, project):
select = "model_a incremental_fail"
run_dbt(["run", "--models", select, "--full-refresh"])
results_two = run_dbt(["run", "--models", select], expect_pass=False)
assert "Compilation Error" in results_two[1].message
class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange):
pass

View File

@@ -1,151 +0,0 @@
from collections import namedtuple
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
models__delete_insert_incremental_predicates_sql = """
{{ config(
materialized = 'incremental',
unique_key = 'id'
) }}
{% if not is_incremental() %}
select 1 as id, 'hello' as msg, 'blue' as color
union all
select 2 as id, 'goodbye' as msg, 'red' as color
{% else %}
-- delete will not happen on the above record where id = 2, so new record will be inserted instead
select 1 as id, 'hey' as msg, 'blue' as color
union all
select 2 as id, 'yo' as msg, 'green' as color
union all
select 3 as id, 'anyway' as msg, 'purple' as color
{% endif %}
"""
seeds__expected_delete_insert_incremental_predicates_csv = """id,msg,color
1,hey,blue
2,goodbye,red
2,yo,green
3,anyway,purple
"""
ResultHolder = namedtuple(
"ResultHolder",
[
"seed_count",
"model_count",
"seed_rows",
"inc_test_model_count",
"opt_model_count",
"relation",
],
)
class BaseIncrementalPredicates:
@pytest.fixture(scope="class")
def models(self):
return {
"delete_insert_incremental_predicates.sql": models__delete_insert_incremental_predicates_sql
}
@pytest.fixture(scope="class")
def seeds(self):
return {
"expected_delete_insert_incremental_predicates.csv": seeds__expected_delete_insert_incremental_predicates_csv
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"+incremental_predicates": ["id != 2"],
"+incremental_strategy": "delete+insert",
}
}
def update_incremental_model(self, incremental_model):
"""update incremental model after the seed table has been updated"""
model_result_set = run_dbt(["run", "--select", incremental_model])
return len(model_result_set)
def get_test_fields(
self, project, seed, incremental_model, update_sql_file, opt_model_count=None
):
seed_count = len(run_dbt(["seed", "--select", seed, "--full-refresh"]))
model_count = len(run_dbt(["run", "--select", incremental_model, "--full-refresh"]))
# pass on kwarg
relation = incremental_model
# update seed in anticipation of incremental model update
row_count_query = "select * from {}.{}".format(project.test_schema, seed)
# project.run_sql_file(Path("seeds") / Path(update_sql_file + ".sql"))
seed_rows = len(project.run_sql(row_count_query, fetch="all"))
# propagate seed state to incremental model according to unique keys
inc_test_model_count = self.update_incremental_model(incremental_model=incremental_model)
return ResultHolder(
seed_count, model_count, seed_rows, inc_test_model_count, opt_model_count, relation
)
def check_scenario_correctness(self, expected_fields, test_case_fields, project):
"""Invoke assertions to verify correct build functionality"""
# 1. test seed(s) should build afresh
assert expected_fields.seed_count == test_case_fields.seed_count
# 2. test model(s) should build afresh
assert expected_fields.model_count == test_case_fields.model_count
# 3. seeds should have intended row counts post update
assert expected_fields.seed_rows == test_case_fields.seed_rows
# 4. incremental test model(s) should be updated
assert expected_fields.inc_test_model_count == test_case_fields.inc_test_model_count
# 5. extra incremental model(s) should be built; optional since
# comparison may be between an incremental model and seed
if expected_fields.opt_model_count and test_case_fields.opt_model_count:
assert expected_fields.opt_model_count == test_case_fields.opt_model_count
# 6. result table should match intended result set (itself a relation)
check_relations_equal(
project.adapter, [expected_fields.relation, test_case_fields.relation]
)
def get_expected_fields(self, relation, seed_rows, opt_model_count=None):
return ResultHolder(
seed_count=1,
model_count=1,
inc_test_model_count=1,
seed_rows=seed_rows,
opt_model_count=opt_model_count,
relation=relation,
)
# no unique_key test
def test__incremental_predicates(self, project):
"""seed should match model after two incremental runs"""
expected_fields = self.get_expected_fields(
relation="expected_delete_insert_incremental_predicates", seed_rows=4
)
test_case_fields = self.get_test_fields(
project,
seed="expected_delete_insert_incremental_predicates",
incremental_model="delete_insert_incremental_predicates",
update_sql_file=None,
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
class TestIncrementalPredicatesDeleteInsert(BaseIncrementalPredicates):
pass
class TestPredicatesDeleteInsert(BaseIncrementalPredicates):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}}

View File

@@ -1,567 +0,0 @@
from collections import namedtuple
from pathlib import Path
import pytest
from dbt.artifacts.schemas.results import RunStatus
from dbt.tests.util import check_relations_equal, run_dbt
models__trinary_unique_key_list_sql = """
-- a multi-argument unique key list should see overwriting on rows in the model
-- where all unique key fields apply
{{
config(
materialized='incremental',
unique_key=['state', 'county', 'city']
)
}}
select
state as state,
county as county,
city as city,
last_visit_date as last_visit_date
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__nontyped_trinary_unique_key_list_sql = """
-- a multi-argument unique key list should see overwriting on rows in the model
-- where all unique key fields apply
-- N.B. needed for direct comparison with seed
{{
config(
materialized='incremental',
unique_key=['state', 'county', 'city']
)
}}
select
state as state,
county as county,
city as city,
last_visit_date as last_visit_date
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__unary_unique_key_list_sql = """
-- a one argument unique key list should result in overwritting semantics for
-- that one matching field
{{
config(
materialized='incremental',
unique_key=['state']
)
}}
select
state as state,
county as county,
city as city,
last_visit_date as last_visit_date
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__not_found_unique_key_sql = """
-- a model with a unique key not found in the table itself will error out
{{
config(
materialized='incremental',
unique_key='thisisnotacolumn'
)
}}
select
*
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__empty_unique_key_list_sql = """
-- model with empty list unique key should build normally
{{
config(
materialized='incremental',
unique_key=[]
)
}}
select * from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__no_unique_key_sql = """
-- no specified unique key should cause no special build behavior
{{
config(
materialized='incremental'
)
}}
select
*
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__empty_str_unique_key_sql = """
-- ensure model with empty string unique key should build normally
{{
config(
materialized='incremental',
unique_key=''
)
}}
select
*
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__str_unique_key_sql = """
-- a unique key with a string should trigger to overwrite behavior when
-- the source has entries in conflict (i.e. more than one row per unique key
-- combination)
{{
config(
materialized='incremental',
unique_key='state'
)
}}
select
state as state,
county as county,
city as city,
last_visit_date as last_visit_date
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__duplicated_unary_unique_key_list_sql = """
{{
config(
materialized='incremental',
unique_key=['state', 'state']
)
}}
select
state as state,
county as county,
city as city,
last_visit_date as last_visit_date
from {{ ref('seed') }}
{% if is_incremental() %}
where last_visit_date > (select max(last_visit_date) from {{ this }})
{% endif %}
"""
models__not_found_unique_key_list_sql = """
-- a unique key list with any element not in the model itself should error out
{{
config(
materialized='incremental',
unique_key=['state', 'thisisnotacolumn']
)
}}
select * from {{ ref('seed') }}
"""
models__expected__one_str__overwrite_sql = """
{{
config(
materialized='table'
)
}}
select
'CT' as state,
'Hartford' as county,
'Hartford' as city,
cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston','2020-02-12'
union all
select 'NJ','Mercer','Trenton','2022-01-01'
union all
select 'NY','Kings','Brooklyn','2021-04-02'
union all
select 'NY','New York','Manhattan','2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia','2021-05-21'
"""
models__expected__unique_key_list__inplace_overwrite_sql = """
{{
config(
materialized='table'
)
}}
select
'CT' as state,
'Hartford' as county,
'Hartford' as city,
cast('2022-02-14' as date) as last_visit_date
union all
select 'MA','Suffolk','Boston','2020-02-12'
union all
select 'NJ','Mercer','Trenton','2022-01-01'
union all
select 'NY','Kings','Brooklyn','2021-04-02'
union all
select 'NY','New York','Manhattan','2021-04-01'
union all
select 'PA','Philadelphia','Philadelphia','2021-05-21'
"""
seeds__duplicate_insert_sql = """
-- Insert statement which when applied to seed.csv triggers the inplace
-- overwrite strategy of incremental models. Seed and incremental model
-- diverge.
-- insert new row, which should not be in incremental model
-- with primary or first three columns unique
insert into {schema}.seed
(state, county, city, last_visit_date)
values ('CT','Hartford','Hartford','2022-02-14');
"""
seeds__seed_csv = """state,county,city,last_visit_date
CT,Hartford,Hartford,2020-09-23
MA,Suffolk,Boston,2020-02-12
NJ,Mercer,Trenton,2022-01-01
NY,Kings,Brooklyn,2021-04-02
NY,New York,Manhattan,2021-04-01
PA,Philadelphia,Philadelphia,2021-05-21
"""
seeds__add_new_rows_sql = """
-- Insert statement which when applied to seed.csv sees incremental model
-- grow in size while not (necessarily) diverging from the seed itself.
-- insert two new rows, both of which should be in incremental model
-- with any unique columns
insert into {schema}.seed
(state, county, city, last_visit_date)
values ('WA','King','Seattle','2022-02-01');
insert into {schema}.seed
(state, county, city, last_visit_date)
values ('CA','Los Angeles','Los Angeles','2022-02-01');
"""
ResultHolder = namedtuple(
"ResultHolder",
[
"seed_count",
"model_count",
"seed_rows",
"inc_test_model_count",
"opt_model_count",
"relation",
],
)
class BaseIncrementalUniqueKey:
@pytest.fixture(scope="class")
def models(self):
return {
"trinary_unique_key_list.sql": models__trinary_unique_key_list_sql,
"nontyped_trinary_unique_key_list.sql": models__nontyped_trinary_unique_key_list_sql,
"unary_unique_key_list.sql": models__unary_unique_key_list_sql,
"not_found_unique_key.sql": models__not_found_unique_key_sql,
"empty_unique_key_list.sql": models__empty_unique_key_list_sql,
"no_unique_key.sql": models__no_unique_key_sql,
"empty_str_unique_key.sql": models__empty_str_unique_key_sql,
"str_unique_key.sql": models__str_unique_key_sql,
"duplicated_unary_unique_key_list.sql": models__duplicated_unary_unique_key_list_sql,
"not_found_unique_key_list.sql": models__not_found_unique_key_list_sql,
"expected": {
"one_str__overwrite.sql": models__expected__one_str__overwrite_sql,
"unique_key_list__inplace_overwrite.sql": models__expected__unique_key_list__inplace_overwrite_sql,
},
}
@pytest.fixture(scope="class")
def seeds(self):
return {
"duplicate_insert.sql": seeds__duplicate_insert_sql,
"seed.csv": seeds__seed_csv,
"add_new_rows.sql": seeds__add_new_rows_sql,
}
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
pass
def update_incremental_model(self, incremental_model):
"""update incremental model after the seed table has been updated"""
model_result_set = run_dbt(["run", "--select", incremental_model])
return len(model_result_set)
def get_test_fields(
self, project, seed, incremental_model, update_sql_file, opt_model_count=None
):
"""build a test case and return values for assertions
[INFO] Models must be in place to test incremental model
construction and merge behavior. Database touches are side
effects to extract counts (which speak to health of unique keys)."""
# idempotently create some number of seeds and incremental models'''
seed_count = len(run_dbt(["seed", "--select", seed, "--full-refresh"]))
model_count = len(run_dbt(["run", "--select", incremental_model, "--full-refresh"]))
# pass on kwarg
relation = incremental_model
# update seed in anticipation of incremental model update
row_count_query = "select * from {}.{}".format(project.test_schema, seed)
project.run_sql_file(Path("seeds") / Path(update_sql_file + ".sql"))
seed_rows = len(project.run_sql(row_count_query, fetch="all"))
# propagate seed state to incremental model according to unique keys
inc_test_model_count = self.update_incremental_model(incremental_model=incremental_model)
return ResultHolder(
seed_count, model_count, seed_rows, inc_test_model_count, opt_model_count, relation
)
def check_scenario_correctness(self, expected_fields, test_case_fields, project):
"""Invoke assertions to verify correct build functionality"""
# 1. test seed(s) should build afresh
assert expected_fields.seed_count == test_case_fields.seed_count
# 2. test model(s) should build afresh
assert expected_fields.model_count == test_case_fields.model_count
# 3. seeds should have intended row counts post update
assert expected_fields.seed_rows == test_case_fields.seed_rows
# 4. incremental test model(s) should be updated
assert expected_fields.inc_test_model_count == test_case_fields.inc_test_model_count
# 5. extra incremental model(s) should be built; optional since
# comparison may be between an incremental model and seed
if expected_fields.opt_model_count and test_case_fields.opt_model_count:
assert expected_fields.opt_model_count == test_case_fields.opt_model_count
# 6. result table should match intended result set (itself a relation)
check_relations_equal(
project.adapter, [expected_fields.relation, test_case_fields.relation]
)
def get_expected_fields(self, relation, seed_rows, opt_model_count=None):
return ResultHolder(
seed_count=1,
model_count=1,
seed_rows=seed_rows,
inc_test_model_count=1,
opt_model_count=opt_model_count,
relation=relation,
)
def fail_to_build_inc_missing_unique_key_column(self, incremental_model_name):
"""should pass back error state when trying build an incremental
model whose unique key or keylist includes a column missing
from the incremental model"""
seed_count = len(run_dbt(["seed", "--select", "seed", "--full-refresh"])) # noqa:F841
# unique keys are not applied on first run, so two are needed
run_dbt(
["run", "--select", incremental_model_name, "--full-refresh"],
expect_pass=True,
)
run_result = run_dbt(
["run", "--select", incremental_model_name], expect_pass=False
).results[0]
return run_result.status, run_result.message
# no unique_key test
def test__no_unique_keys(self, project):
"""with no unique keys, seed and model should match"""
expected_fields = self.get_expected_fields(relation="seed", seed_rows=8)
test_case_fields = self.get_test_fields(
project, seed="seed", incremental_model="no_unique_key", update_sql_file="add_new_rows"
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
# unique_key as str tests
def test__empty_str_unique_key(self, project):
"""with empty string for unique key, seed and model should match"""
expected_fields = self.get_expected_fields(relation="seed", seed_rows=8)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="empty_str_unique_key",
update_sql_file="add_new_rows",
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__one_unique_key(self, project):
"""with one unique key, model will overwrite existing row"""
expected_fields = self.get_expected_fields(
relation="one_str__overwrite", seed_rows=7, opt_model_count=1
)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="str_unique_key",
update_sql_file="duplicate_insert",
opt_model_count=self.update_incremental_model("one_str__overwrite"),
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__bad_unique_key(self, project):
"""expect compilation error from unique key not being a column"""
(status, exc) = self.fail_to_build_inc_missing_unique_key_column(
incremental_model_name="not_found_unique_key"
)
assert status == RunStatus.Error
assert "thisisnotacolumn" in exc.lower()
# test unique_key as list
def test__empty_unique_key_list(self, project):
"""with no unique keys, seed and model should match"""
expected_fields = self.get_expected_fields(relation="seed", seed_rows=8)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="empty_unique_key_list",
update_sql_file="add_new_rows",
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__unary_unique_key_list(self, project):
"""with one unique key, model will overwrite existing row"""
expected_fields = self.get_expected_fields(
relation="unique_key_list__inplace_overwrite", seed_rows=7, opt_model_count=1
)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="unary_unique_key_list",
update_sql_file="duplicate_insert",
opt_model_count=self.update_incremental_model("unique_key_list__inplace_overwrite"),
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__duplicated_unary_unique_key_list(self, project):
"""with two of the same unique key, model will overwrite existing row"""
expected_fields = self.get_expected_fields(
relation="unique_key_list__inplace_overwrite", seed_rows=7, opt_model_count=1
)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="duplicated_unary_unique_key_list",
update_sql_file="duplicate_insert",
opt_model_count=self.update_incremental_model("unique_key_list__inplace_overwrite"),
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__trinary_unique_key_list(self, project):
"""with three unique keys, model will overwrite existing row"""
expected_fields = self.get_expected_fields(
relation="unique_key_list__inplace_overwrite", seed_rows=7, opt_model_count=1
)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="trinary_unique_key_list",
update_sql_file="duplicate_insert",
opt_model_count=self.update_incremental_model("unique_key_list__inplace_overwrite"),
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__trinary_unique_key_list_no_update(self, project):
"""even with three unique keys, adding distinct rows to seed does not
cause seed and model to diverge"""
expected_fields = self.get_expected_fields(relation="seed", seed_rows=8)
test_case_fields = self.get_test_fields(
project,
seed="seed",
incremental_model="nontyped_trinary_unique_key_list",
update_sql_file="add_new_rows",
)
self.check_scenario_correctness(expected_fields, test_case_fields, project)
def test__bad_unique_key_list(self, project):
"""expect compilation error from unique key not being a column"""
(status, exc) = self.fail_to_build_inc_missing_unique_key_column(
incremental_model_name="not_found_unique_key_list"
)
assert status == RunStatus.Error
assert "thisisnotacolumn" in exc.lower()
class TestIncrementalUniqueKey(BaseIncrementalUniqueKey):
pass

View File

@@ -1,241 +0,0 @@
from typing import Optional, Tuple
import pytest
from dbt.adapters.base.relation import BaseRelation
from dbt.adapters.contracts.relation import RelationType
from dbt.tests.util import (
assert_message_in_logs,
get_model_file,
run_dbt,
run_dbt_and_capture,
set_model_file,
)
from tests.functional.adapter.materialized_view.files import (
MY_MATERIALIZED_VIEW,
MY_SEED,
MY_TABLE,
MY_VIEW,
)
class MaterializedViewBasic:
"""
Tests basic functionality:
- create
- idempotent create
- full refresh
- materialized views can replace table/view
- table/view can replace materialized views
- the object is an actual materialized view, not a traditional view
"""
"""
Configure these
"""
@staticmethod
def insert_record(project, table: BaseRelation, record: Tuple[int, int]):
raise NotImplementedError(
"To use this test, please implement `insert_record`, inherited from `MaterializedViewsBasic`."
)
@staticmethod
def refresh_materialized_view(project, materialized_view: BaseRelation):
raise NotImplementedError(
"To use this test, please implement `refresh_materialized_view`, inherited from `MaterializedViewsBasic`."
)
@staticmethod
def query_row_count(project, relation: BaseRelation) -> int:
raise NotImplementedError(
"To use this test, please implement `query_row_count`, inherited from `MaterializedViewsBasic`."
)
@staticmethod
def query_relation_type(project, relation: BaseRelation) -> Optional[str]:
raise NotImplementedError(
"To use this test, please implement `query_relation_type`, inherited from `MaterializedViewsBasic`."
)
"""
Configure these if needed
"""
@pytest.fixture(scope="class", autouse=True)
def seeds(self):
return {"my_seed.csv": MY_SEED}
@pytest.fixture(scope="class", autouse=True)
def models(self):
yield {
"my_table.sql": MY_TABLE,
"my_view.sql": MY_VIEW,
"my_materialized_view.sql": MY_MATERIALIZED_VIEW,
}
"""
Don't configure these unless absolutely necessary
"""
@pytest.fixture(scope="class")
def my_materialized_view(self, project) -> BaseRelation:
return project.adapter.Relation.create(
identifier="my_materialized_view",
schema=project.test_schema,
database=project.database,
type=RelationType.MaterializedView,
)
@pytest.fixture(scope="class")
def my_view(self, project) -> BaseRelation:
return project.adapter.Relation.create(
identifier="my_view",
schema=project.test_schema,
database=project.database,
type=RelationType.View,
)
@pytest.fixture(scope="class")
def my_table(self, project) -> BaseRelation:
return project.adapter.Relation.create(
identifier="my_table",
schema=project.test_schema,
database=project.database,
type=RelationType.Table,
)
@pytest.fixture(scope="class")
def my_seed(self, project) -> BaseRelation:
return project.adapter.Relation.create(
identifier="my_seed",
schema=project.test_schema,
database=project.database,
type=RelationType.Table,
)
@staticmethod
def swap_table_to_materialized_view(project, table):
initial_model = get_model_file(project, table)
new_model = initial_model.replace(
"materialized='table'", "materialized='materialized_view'"
)
set_model_file(project, table, new_model)
@staticmethod
def swap_view_to_materialized_view(project, view):
initial_model = get_model_file(project, view)
new_model = initial_model.replace(
"materialized='view'", "materialized='materialized_view'"
)
set_model_file(project, view, new_model)
@staticmethod
def swap_materialized_view_to_table(project, materialized_view):
initial_model = get_model_file(project, materialized_view)
new_model = initial_model.replace(
"materialized='materialized_view'", "materialized='table'"
)
set_model_file(project, materialized_view, new_model)
@staticmethod
def swap_materialized_view_to_view(project, materialized_view):
initial_model = get_model_file(project, materialized_view)
new_model = initial_model.replace(
"materialized='materialized_view'", "materialized='view'"
)
set_model_file(project, materialized_view, new_model)
@pytest.fixture(scope="function", autouse=True)
def setup(self, project, my_materialized_view):
run_dbt(["seed"])
run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"])
# the tests touch these files, store their contents in memory
initial_model = get_model_file(project, my_materialized_view)
yield
# and then reset them after the test runs
set_model_file(project, my_materialized_view, initial_model)
project.run_sql(f"drop schema if exists {project.test_schema} cascade")
def test_materialized_view_create(self, project, my_materialized_view):
# setup creates it; verify it's there
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
def test_materialized_view_create_idempotent(self, project, my_materialized_view):
# setup creates it once; verify it's there and run once
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
run_dbt(["run", "--models", my_materialized_view.identifier])
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
def test_materialized_view_full_refresh(self, project, my_materialized_view):
_, logs = run_dbt_and_capture(
["--debug", "run", "--models", my_materialized_view.identifier, "--full-refresh"]
)
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs)
def test_materialized_view_replaces_table(self, project, my_table):
run_dbt(["run", "--models", my_table.identifier])
assert self.query_relation_type(project, my_table) == "table"
self.swap_table_to_materialized_view(project, my_table)
run_dbt(["run", "--models", my_table.identifier])
assert self.query_relation_type(project, my_table) == "materialized_view"
def test_materialized_view_replaces_view(self, project, my_view):
run_dbt(["run", "--models", my_view.identifier])
assert self.query_relation_type(project, my_view) == "view"
self.swap_view_to_materialized_view(project, my_view)
run_dbt(["run", "--models", my_view.identifier])
assert self.query_relation_type(project, my_view) == "materialized_view"
def test_table_replaces_materialized_view(self, project, my_materialized_view):
run_dbt(["run", "--models", my_materialized_view.identifier])
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
self.swap_materialized_view_to_table(project, my_materialized_view)
run_dbt(["run", "--models", my_materialized_view.identifier])
assert self.query_relation_type(project, my_materialized_view) == "table"
def test_view_replaces_materialized_view(self, project, my_materialized_view):
run_dbt(["run", "--models", my_materialized_view.identifier])
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
self.swap_materialized_view_to_view(project, my_materialized_view)
run_dbt(["run", "--models", my_materialized_view.identifier])
assert self.query_relation_type(project, my_materialized_view) == "view"
def test_materialized_view_only_updates_after_refresh(
self, project, my_materialized_view, my_seed
):
# poll database
table_start = self.query_row_count(project, my_seed)
view_start = self.query_row_count(project, my_materialized_view)
# insert new record in table
self.insert_record(project, my_seed, (4, 400))
# poll database
table_mid = self.query_row_count(project, my_seed)
view_mid = self.query_row_count(project, my_materialized_view)
# refresh the materialized view
self.refresh_materialized_view(project, my_materialized_view)
# poll database
table_end = self.query_row_count(project, my_seed)
view_end = self.query_row_count(project, my_materialized_view)
# new records were inserted in the table but didn't show up in the view until it was refreshed
assert table_start < table_mid == table_end
assert view_start == view_mid < view_end

View File

@@ -1,263 +0,0 @@
from typing import Optional
import pytest
from dbt.adapters.base.relation import BaseRelation
from dbt.adapters.contracts.relation import RelationType
from dbt.tests.util import (
assert_message_in_logs,
get_model_file,
run_dbt,
run_dbt_and_capture,
set_model_file,
)
from dbt_common.contracts.config.materialization import OnConfigurationChangeOption
from tests.functional.adapter.materialized_view.files import (
MY_MATERIALIZED_VIEW,
MY_SEED,
)
class MaterializedViewChanges:
"""
Tests change management functionality:
- apply small changes via alter
- apply large changes via replace
- small changes will not be made if full refresh is also happening
- continue if changes detected and configured to continue
- full refresh is available even if changes are detected and configured to continue
- fail if changes are detected and configured to fail
- full refresh is available even if changes are detected and configured to fail
To use this class, subclass it and configure it in the adapter. Then create a test class
that inherits both from the adapter's subclass and one of the mixins below. This avoids needing to
configure this class three times.
"""
"""
Configure these
"""
@staticmethod
def check_start_state(project, materialized_view):
"""
Check the starting state; this should align with `files.MY_MATERIALIZED_VIEW`.
"""
raise NotImplementedError(
"To use this test, please implement `check_start_state`,"
" inherited from `MaterializedViewsChanges`."
)
@staticmethod
def change_config_via_alter(project, materialized_view):
"""
Should be a change that can be applied without dropping the materialized view
If there are no such changes, inherit the corresponding tests and mark them with
`@pytest.mark.skip()`.
"""
pass
@staticmethod
def check_state_alter_change_is_applied(project, materialized_view):
"""
Verify that the changes in `change_config_via_alter` were applied.
"""
raise NotImplementedError(
"To use this test, please implement `change_config_via_alter` and"
" `check_state_alter_change_is_applied`,"
" inherited from `MaterializedViewsChanges`."
)
@staticmethod
def change_config_via_replace(project, materialized_view):
"""
Should be a change that will trigger dropping the materialized view
If there are no such changes, inherit the corresponding tests and mark them with
`@pytest.mark.skip()`.
"""
pass
@staticmethod
def check_state_replace_change_is_applied(project, materialized_view):
"""
Verify that the changes in `change_config_via_replace` were applied.
This is independent of `check_state_alter_change_is_applied`.
"""
raise NotImplementedError(
"To use this test, please implement `change_config_via_replace` and"
" `check_state_replace_change_is_applied`,"
" inherited from `MaterializedViewsChanges`."
)
@staticmethod
def query_relation_type(project, relation: BaseRelation) -> Optional[str]:
raise NotImplementedError(
"To use this test, please implement `query_relation_type`, inherited from `MaterializedViewsChanges`."
)
"""
Configure these if needed
"""
@pytest.fixture(scope="class", autouse=True)
def seeds(self):
yield {"my_seed.csv": MY_SEED}
@pytest.fixture(scope="class", autouse=True)
def models(self):
yield {"my_materialized_view.sql": MY_MATERIALIZED_VIEW}
"""
Don't configure these unless absolutely necessary
"""
@pytest.fixture(scope="class")
def my_materialized_view(self, project) -> BaseRelation:
return project.adapter.Relation.create(
identifier="my_materialized_view",
schema=project.test_schema,
database=project.database,
type=RelationType.MaterializedView,
)
@pytest.fixture(scope="function", autouse=True)
def setup(self, project, my_materialized_view):
# make sure the model in the data reflects the files each time
run_dbt(["seed"])
run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"])
# the tests touch these files, store their contents in memory
initial_model = get_model_file(project, my_materialized_view)
yield
# and then reset them after the test runs
set_model_file(project, my_materialized_view, initial_model)
# ensure clean slate each method
project.run_sql(f"drop schema if exists {project.test_schema} cascade")
def test_full_refresh_occurs_with_changes(self, project, my_materialized_view):
self.change_config_via_alter(project, my_materialized_view)
self.change_config_via_replace(project, my_materialized_view)
_, logs = run_dbt_and_capture(
["--debug", "run", "--models", my_materialized_view.identifier, "--full-refresh"]
)
assert self.query_relation_type(project, my_materialized_view) == "materialized_view"
assert_message_in_logs(f"Applying ALTER to: {my_materialized_view}", logs, False)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs)
class MaterializedViewChangesApplyMixin:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"models": {"on_configuration_change": OnConfigurationChangeOption.Apply.value}}
def test_change_is_applied_via_alter(self, project, my_materialized_view):
self.check_start_state(project, my_materialized_view)
self.change_config_via_alter(project, my_materialized_view)
_, logs = run_dbt_and_capture(["--debug", "run", "--models", my_materialized_view.name])
self.check_state_alter_change_is_applied(project, my_materialized_view)
assert_message_in_logs(f"Applying ALTER to: {my_materialized_view}", logs)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs, False)
def test_change_is_applied_via_replace(self, project, my_materialized_view):
self.check_start_state(project, my_materialized_view)
self.change_config_via_alter(project, my_materialized_view)
self.change_config_via_replace(project, my_materialized_view)
_, logs = run_dbt_and_capture(["--debug", "run", "--models", my_materialized_view.name])
self.check_state_alter_change_is_applied(project, my_materialized_view)
self.check_state_replace_change_is_applied(project, my_materialized_view)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs)
class MaterializedViewChangesContinueMixin:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"models": {"on_configuration_change": OnConfigurationChangeOption.Continue.value}}
def test_change_is_not_applied_via_alter(self, project, my_materialized_view):
self.check_start_state(project, my_materialized_view)
self.change_config_via_alter(project, my_materialized_view)
_, logs = run_dbt_and_capture(["--debug", "run", "--models", my_materialized_view.name])
self.check_start_state(project, my_materialized_view)
assert_message_in_logs(
f"Configuration changes were identified and `on_configuration_change` was set"
f" to `continue` for `{my_materialized_view}`",
logs,
)
assert_message_in_logs(f"Applying ALTER to: {my_materialized_view}", logs, False)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs, False)
def test_change_is_not_applied_via_replace(self, project, my_materialized_view):
self.check_start_state(project, my_materialized_view)
self.change_config_via_alter(project, my_materialized_view)
self.change_config_via_replace(project, my_materialized_view)
_, logs = run_dbt_and_capture(["--debug", "run", "--models", my_materialized_view.name])
self.check_start_state(project, my_materialized_view)
assert_message_in_logs(
f"Configuration changes were identified and `on_configuration_change` was set"
f" to `continue` for `{my_materialized_view}`",
logs,
)
assert_message_in_logs(f"Applying ALTER to: {my_materialized_view}", logs, False)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs, False)
class MaterializedViewChangesFailMixin:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"models": {"on_configuration_change": OnConfigurationChangeOption.Fail.value}}
def test_change_is_not_applied_via_alter(self, project, my_materialized_view):
self.check_start_state(project, my_materialized_view)
self.change_config_via_alter(project, my_materialized_view)
_, logs = run_dbt_and_capture(
["--debug", "run", "--models", my_materialized_view.name], expect_pass=False
)
self.check_start_state(project, my_materialized_view)
assert_message_in_logs(
f"Configuration changes were identified and `on_configuration_change` was set"
f" to `fail` for `{my_materialized_view}`",
logs,
)
assert_message_in_logs(f"Applying ALTER to: {my_materialized_view}", logs, False)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs, False)
def test_change_is_not_applied_via_replace(self, project, my_materialized_view):
self.check_start_state(project, my_materialized_view)
self.change_config_via_alter(project, my_materialized_view)
self.change_config_via_replace(project, my_materialized_view)
_, logs = run_dbt_and_capture(
["--debug", "run", "--models", my_materialized_view.name], expect_pass=False
)
self.check_start_state(project, my_materialized_view)
assert_message_in_logs(
f"Configuration changes were identified and `on_configuration_change` was set"
f" to `fail` for `{my_materialized_view}`",
logs,
)
assert_message_in_logs(f"Applying ALTER to: {my_materialized_view}", logs, False)
assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs, False)

View File

@@ -1,30 +0,0 @@
MY_SEED = """
id,value
1,100
2,200
3,300
""".strip()
MY_TABLE = """
{{ config(
materialized='table',
) }}
select * from {{ ref('my_seed') }}
"""
MY_VIEW = """
{{ config(
materialized='view',
) }}
select * from {{ ref('my_seed') }}
"""
MY_MATERIALIZED_VIEW = """
{{ config(
materialized='materialized_view',
) }}
select * from {{ ref('my_seed') }}
"""

View File

@@ -1,144 +0,0 @@
_MODELS__VIEW = """
{{ config(materialized='view') }}
select 2 as id, 'Bob' as name
"""
_MODELS__NO_DOCS_MODEL = """
select 1 as id, 'Alice' as name
"""
_DOCS__MY_FUN_DOCS = """
{% docs my_fun_doc %}
name Column description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
{% enddocs %}
"""
_MODELS__TABLE = """
{{ config(materialized='table') }}
select 1 as id, 'Joe' as name
"""
_MODELS__MISSING_COLUMN = """
{{ config(materialized='table') }}
select 1 as id, 'Ed' as name
"""
_MODELS__MODEL_USING_QUOTE_UTIL = """
select 1 as {{ adapter.quote("2id") }}
"""
_PROPERTIES__QUOTE_MODEL = """
version: 2
models:
- name: quote_model
description: "model to test column quotes and comments"
columns:
- name: 2id
description: "XXX My description"
quote: true
"""
_PROPERITES__SCHEMA_MISSING_COL = """
version: 2
models:
- name: missing_column
columns:
- name: id
description: "test id column description"
- name: column_that_does_not_exist
description: "comment that cannot be created"
"""
_PROPERTIES__SCHEMA_YML = """
version: 2
models:
- name: table_model
description: |
Table model description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
columns:
- name: id
description: |
id Column description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
- name: name
description: |
Some stuff here and then a call to
{{ doc('my_fun_doc')}}
- name: view_model
description: |
View model description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
columns:
- name: id
description: |
id Column description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
seeds:
- name: seed
description: |
Seed model description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
columns:
- name: id
description: |
id Column description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
80% of statistics are made up on the spot
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
- name: name
description: |
Some stuff here and then a call to
{{ doc('my_fun_doc')}}
"""
_SEEDS__SEED = """id,name
1,Alice
2,Bob
"""

View File

@@ -1,207 +0,0 @@
import json
import os
import pytest
from dbt.tests.util import run_dbt
from tests.functional.adapter.persist_docs.fixtures import (
_DOCS__MY_FUN_DOCS,
_MODELS__MISSING_COLUMN,
_MODELS__MODEL_USING_QUOTE_UTIL,
_MODELS__NO_DOCS_MODEL,
_MODELS__TABLE,
_MODELS__VIEW,
_PROPERITES__SCHEMA_MISSING_COL,
_PROPERTIES__QUOTE_MODEL,
_PROPERTIES__SCHEMA_YML,
_SEEDS__SEED,
)
class BasePersistDocsBase:
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
run_dbt(["seed"])
run_dbt()
@pytest.fixture(scope="class")
def seeds(self):
return {"seed.csv": _SEEDS__SEED}
@pytest.fixture(scope="class")
def models(self):
return {
"no_docs_model.sql": _MODELS__NO_DOCS_MODEL,
"table_model.sql": _MODELS__TABLE,
"view_model.sql": _MODELS__VIEW,
}
@pytest.fixture(scope="class")
def properties(self):
return {
"my_fun_docs.md": _DOCS__MY_FUN_DOCS,
"schema.yml": _PROPERTIES__SCHEMA_YML,
}
def _assert_common_comments(self, *comments):
for comment in comments:
assert '"with double quotes"' in comment
assert """'''abc123'''""" in comment
assert "\n" in comment
assert "Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting" in comment
assert "/* comment */" in comment
if os.name == "nt":
assert "--\r\n" in comment or "--\n" in comment
else:
assert "--\n" in comment
def _assert_has_table_comments(self, table_node):
table_comment = table_node["metadata"]["comment"]
assert table_comment.startswith("Table model description")
table_id_comment = table_node["columns"]["id"]["comment"]
assert table_id_comment.startswith("id Column description")
table_name_comment = table_node["columns"]["name"]["comment"]
assert table_name_comment.startswith("Some stuff here and then a call to")
self._assert_common_comments(table_comment, table_id_comment, table_name_comment)
def _assert_has_view_comments(
self, view_node, has_node_comments=True, has_column_comments=True
):
view_comment = view_node["metadata"]["comment"]
if has_node_comments:
assert view_comment.startswith("View model description")
self._assert_common_comments(view_comment)
else:
assert view_comment is None
view_id_comment = view_node["columns"]["id"]["comment"]
if has_column_comments:
assert view_id_comment.startswith("id Column description")
self._assert_common_comments(view_id_comment)
else:
assert view_id_comment is None
view_name_comment = view_node["columns"]["name"]["comment"]
assert view_name_comment is None
class BasePersistDocs(BasePersistDocsBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"+persist_docs": {
"relation": True,
"columns": True,
},
}
}
}
def test_has_comments_pglike(self, project):
run_dbt(["docs", "generate"])
with open("target/catalog.json") as fp:
catalog_data = json.load(fp)
assert "nodes" in catalog_data
assert len(catalog_data["nodes"]) == 4
table_node = catalog_data["nodes"]["model.test.table_model"]
view_node = self._assert_has_table_comments(table_node)
view_node = catalog_data["nodes"]["model.test.view_model"]
self._assert_has_view_comments(view_node)
no_docs_node = catalog_data["nodes"]["model.test.no_docs_model"]
self._assert_has_view_comments(no_docs_node, False, False)
class BasePersistDocsColumnMissing(BasePersistDocsBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"+persist_docs": {
"columns": True,
},
}
}
}
@pytest.fixture(scope="class")
def models(self):
return {"missing_column.sql": _MODELS__MISSING_COLUMN}
@pytest.fixture(scope="class")
def properties(self):
return {"schema.yml": _PROPERITES__SCHEMA_MISSING_COL}
def test_missing_column(self, project):
run_dbt(["docs", "generate"])
with open("target/catalog.json") as fp:
catalog_data = json.load(fp)
assert "nodes" in catalog_data
table_node = catalog_data["nodes"]["model.test.missing_column"]
table_id_comment = table_node["columns"]["id"]["comment"]
assert table_id_comment.startswith("test id column description")
class BasePersistDocsCommentOnQuotedColumn:
"""Covers edge case where column with comment must be quoted.
We set this using the `quote:` tag in the property file."""
@pytest.fixture(scope="class")
def models(self):
return {"quote_model.sql": _MODELS__MODEL_USING_QUOTE_UTIL}
@pytest.fixture(scope="class")
def properties(self):
return {"properties.yml": _PROPERTIES__QUOTE_MODEL}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"materialized": "table",
"+persist_docs": {
"relation": True,
"columns": True,
},
}
}
}
@pytest.fixture(scope="class")
def run_has_comments(self, project):
def fixt():
run_dbt()
run_dbt(["docs", "generate"])
with open("target/catalog.json") as fp:
catalog_data = json.load(fp)
assert "nodes" in catalog_data
assert len(catalog_data["nodes"]) == 1
column_node = catalog_data["nodes"]["model.test.quote_model"]
column_comment = column_node["columns"]["2id"]["comment"]
assert column_comment.startswith("XXX")
return fixt
def test_quoted_column_comments(self, run_has_comments):
run_has_comments()
class TestPersistDocs(BasePersistDocs):
pass
class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing):
pass
class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn):
pass

View File

@@ -1,154 +0,0 @@
import os
import pytest
import yaml
from dbt.tests.util import run_dbt
basic_sql = """
select 1 as id union all
select 1 as id union all
select 1 as id union all
select 1 as id union all
select 1 as id union all
select 1 as id
"""
basic_python = """
def model(dbt, _):
dbt.config(
materialized='table',
)
df = dbt.ref("my_sql_model")
df2 = dbt.ref("my_versioned_sql_model", v=1)
df3 = dbt.ref("my_versioned_sql_model", version=1)
df4 = dbt.ref("test", "my_versioned_sql_model", v=1)
df5 = dbt.ref("test", "my_versioned_sql_model", version=1)
df6 = dbt.source("test_source", "test_table")
df = df.limit(2)
return df
"""
second_sql = """
select * from {{ref('my_python_model')}}
"""
schema_yml = """version: 2
models:
- name: my_versioned_sql_model
versions:
- v: 1
sources:
- name: test_source
loader: custom
schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}"
quoting:
identifier: True
tags:
- my_test_source_tag
tables:
- name: test_table
identifier: source
"""
seeds__source_csv = """favorite_color,id,first_name,email,ip_address,updated_at
blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31
blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14
"""
class BasePythonModelTests:
@pytest.fixture(scope="class", autouse=True)
def setEnvVars(self):
os.environ["DBT_TEST_SCHEMA_NAME_VARIABLE"] = "test_run_schema"
yield
del os.environ["DBT_TEST_SCHEMA_NAME_VARIABLE"]
@pytest.fixture(scope="class")
def seeds(self):
return {"source.csv": seeds__source_csv}
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": schema_yml,
"my_sql_model.sql": basic_sql,
"my_versioned_sql_model_v1.sql": basic_sql,
"my_python_model.py": basic_python,
"second_sql_model.sql": second_sql,
}
def test_singular_tests(self, project):
# test command
vars_dict = {
"test_run_schema": project.test_schema,
}
run_dbt(["seed", "--vars", yaml.safe_dump(vars_dict)])
results = run_dbt(["run", "--vars", yaml.safe_dump(vars_dict)])
assert len(results) == 4
m_1 = """
{{config(materialized='table')}}
select 1 as id union all
select 2 as id union all
select 3 as id union all
select 4 as id union all
select 5 as id
"""
incremental_python = """
def model(dbt, session):
dbt.config(materialized="incremental", unique_key='id')
df = dbt.ref("m_1")
if dbt.is_incremental:
# incremental runs should only apply to part of the data
df = df.filter(df.id > 5)
return df
"""
class BasePythonIncrementalTests:
@pytest.fixture(scope="class")
def project_config_update(self):
return {"models": {"+incremental_strategy": "merge"}}
@pytest.fixture(scope="class")
def models(self):
return {"m_1.sql": m_1, "incremental.py": incremental_python}
def test_incremental(self, project):
# create m_1 and run incremental model the first time
run_dbt(["run"])
test_schema_relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
assert (
project.run_sql(
f"select count(*) from {test_schema_relation}.incremental",
fetch="one",
)[0]
== 5
)
# running incremental model again will not cause any changes in the result model
run_dbt(["run", "-s", "incremental"])
assert (
project.run_sql(
f"select count(*) from {test_schema_relation}.incremental",
fetch="one",
)[0]
== 5
)
# add 3 records with one supposed to be filtered out
project.run_sql(f"insert into {test_schema_relation}.m_1(id) values (0), (6), (7)")
# validate that incremental model would correctly add 2 valid records to result model
run_dbt(["run", "-s", "incremental"])
assert (
project.run_sql(
f"select count(*) from {test_schema_relation}.incremental",
fetch="one",
)[0]
== 7
)

View File

@@ -1,94 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
PANDAS_MODEL = """
import pandas as pd
def model(dbt, session):
dbt.config(
materialized="table",
)
df = pd.DataFrame(
{'City': ['Buenos Aires', 'Brasilia', 'Santiago', 'Bogota', 'Caracas'],
'Country': ['Argentina', 'Brazil', 'Chile', 'Colombia', 'Venezuela'],
'Latitude': [-34.58, -15.78, -33.45, 4.60, 10.48],
'Longitude': [-58.66, -47.91, -70.66, -74.08, -66.86]}
)
return df
"""
PYSPARK_MODEL = """
def model(dbt, session):
dbt.config(
materialized="table",
)
df = spark.createDataFrame(
[
("Buenos Aires", "Argentina", -34.58, -58.66),
("Brasilia", "Brazil", -15.78, -47.91),
("Santiago", "Chile", -33.45, -70.66),
("Bogota", "Colombia", 4.60, -74.08),
("Caracas", "Venezuela", 10.48, -66.86),
],
["City", "Country", "Latitude", "Longitude"]
)
return df
"""
PANDAS_ON_SPARK_MODEL = """
import pyspark.pandas as ps
def model(dbt, session):
dbt.config(
materialized="table",
)
df = ps.DataFrame(
{'City': ['Buenos Aires', 'Brasilia', 'Santiago', 'Bogota', 'Caracas'],
'Country': ['Argentina', 'Brazil', 'Chile', 'Colombia', 'Venezuela'],
'Latitude': [-34.58, -15.78, -33.45, 4.60, 10.48],
'Longitude': [-58.66, -47.91, -70.66, -74.08, -66.86]}
)
return df
"""
KOALAS_MODEL = """
import databricks.koalas as ks
def model(dbt, session):
dbt.config(
materialized="table",
)
df = ks.DataFrame(
{'City': ['Buenos Aires', 'Brasilia', 'Santiago', 'Bogota', 'Caracas'],
'Country': ['Argentina', 'Brazil', 'Chile', 'Colombia', 'Venezuela'],
'Latitude': [-34.58, -15.78, -33.45, 4.60, 10.48],
'Longitude': [-58.66, -47.91, -70.66, -74.08, -66.86]}
)
return df
"""
class BasePySparkTests:
@pytest.fixture(scope="class")
def models(self):
return {
"pandas_df.py": PANDAS_MODEL,
"pyspark_df.py": PYSPARK_MODEL,
"pandas_on_spark_df.py": PANDAS_ON_SPARK_MODEL,
"koalas_df.py": KOALAS_MODEL,
}
def test_different_dataframes(self, project):
# test
results = run_dbt(["run"])
assert len(results) == 4

View File

@@ -1,32 +0,0 @@
MACROS__MACRO_SQL = """
{%- macro query_header_no_args() -%}
{%- set x = "are pretty cool" -%}
{{ "dbt macros" }}
{{ x }}
{%- endmacro -%}
{%- macro query_header_args(message) -%}
{%- set comment_dict = dict(
app='dbt++',
macro_version='0.1.0',
message='blah: '~ message) -%}
{{ return(comment_dict) }}
{%- endmacro -%}
{%- macro ordered_to_json(dct) -%}
{{ tojson(dct, sort_keys=True) }}
{%- endmacro %}
{% macro invalid_query_header() -%}
{{ "Here is an invalid character for you: */" }}
{% endmacro %}
"""
MODELS__X_SQL = """
{% do run_query('select 2 as inner_id') %}
select 1 as outer_id
"""

View File

@@ -1,127 +0,0 @@
import json
import pytest
from dbt.exceptions import DbtRuntimeError
from dbt.tests.util import run_dbt_and_capture
from tests.functional.adapter.query_comment.fixtures import (
MACROS__MACRO_SQL,
MODELS__X_SQL,
)
class BaseDefaultQueryComments:
@pytest.fixture(scope="class")
def models(self):
return {
"x.sql": MODELS__X_SQL,
}
@pytest.fixture(scope="class")
def macros(self):
return {
"macro.sql": MACROS__MACRO_SQL,
}
def run_get_json(self, expect_pass=True):
res, raw_logs = run_dbt_and_capture(
["--debug", "--log-format=json", "run"], expect_pass=expect_pass
)
# empty lists evaluate as False
assert len(res) > 0
return raw_logs
# Base setup to be inherited #
class BaseQueryComments(BaseDefaultQueryComments):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": "dbt\nrules!\n"}
def test_matches_comment(self, project) -> bool:
logs = self.run_get_json()
assert r"/* dbt\nrules! */\n" in logs
class BaseMacroQueryComments(BaseDefaultQueryComments):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": "{{ query_header_no_args() }}"}
def test_matches_comment(self, project) -> bool:
logs = self.run_get_json()
assert r"/* dbt macros\nare pretty cool */\n" in logs
class BaseMacroArgsQueryComments(BaseDefaultQueryComments):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": "{{ return(ordered_to_json(query_header_args(target.name))) }}"}
def test_matches_comment(self, project) -> bool:
logs = self.run_get_json()
expected_dct = {
"app": "dbt++",
"macro_version": "0.1.0",
"message": f"blah: {project.adapter.config.target_name}",
}
expected = r"/* {} */\n".format(json.dumps(expected_dct, sort_keys=True)).replace(
'"', r"\""
)
assert expected in logs
class BaseMacroInvalidQueryComments(BaseDefaultQueryComments):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": "{{ invalid_query_header() }}"}
def test_run_assert_comments(self, project):
with pytest.raises(DbtRuntimeError):
self.run_get_json(expect_pass=False)
class BaseNullQueryComments(BaseDefaultQueryComments):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": None}
def test_matches_comment(self, project) -> bool:
logs = self.run_get_json()
assert "/*" not in logs or "*/" not in logs
class BaseEmptyQueryComments(BaseDefaultQueryComments):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": ""}
def test_matches_comment(self, project) -> bool:
logs = self.run_get_json()
assert "/*" not in logs or "*/" not in logs
# Tests #
class TestQueryComments(BaseQueryComments):
pass
class TestMacroQueryComments(BaseMacroQueryComments):
pass
class TestMacroArgsQueryComments(BaseMacroArgsQueryComments):
pass
class TestMacroInvalidQueryComments(BaseMacroInvalidQueryComments):
pass
class TestNullQueryComments(BaseNullQueryComments):
pass
class TestEmptyQueryComments(BaseEmptyQueryComments):
pass

View File

@@ -1,40 +0,0 @@
from typing import List, Optional
import pytest
from dbt.tests.util import run_dbt
_DEFAULT_CHANGE_RELATION_TYPE_MODEL = """
{{ config(materialized=var('materialized')) }}
select '{{ var("materialized") }}' as materialization
{% if var('materialized') == 'incremental' and is_incremental() %}
where 'abc' != (select max(materialization) from {{ this }})
{% endif %}
"""
class BaseChangeRelationTypeValidator:
@pytest.fixture(scope="class")
def models(self):
return {"model_mc_modelface.sql": _DEFAULT_CHANGE_RELATION_TYPE_MODEL}
def _run_and_check_materialization(self, materialization, extra_args: Optional[List] = None):
run_args = ["run", "--vars", f"materialized: {materialization}"]
if extra_args:
run_args.extend(extra_args)
results = run_dbt(run_args)
assert results[0].node.config.materialized == materialization
assert len(results) == 1
def test_changing_materialization_changes_relation_type(self, project):
self._run_and_check_materialization("view")
self._run_and_check_materialization("table")
self._run_and_check_materialization("view")
self._run_and_check_materialization("incremental")
self._run_and_check_materialization("table", extra_args=["--full-refresh"])
class TestChangeRelationTypes(BaseChangeRelationTypeValidator):
pass

View File

@@ -1,35 +0,0 @@
import pytest
from dbt.tests.util import get_connection, run_dbt
class BaseDropSchemaNamed:
@pytest.fixture(scope="class")
def models(self):
return {
"model_a.sql": "select 1 as id",
}
def test_dropped_schema_named_drops_expected_schema(self, project):
results = run_dbt(["run"])
assert len(results) == 1
run_dbt(
[
"run-operation",
"drop_schema_named",
"--args",
f"{{schema_name: {project.test_schema} }}",
]
)
adapter = project.adapter
with get_connection(adapter):
schemas = adapter.list_schemas(project.database)
assert project.test_schema not in schemas
class TestDropSchemaNamed(BaseDropSchemaNamed):
pass

View File

@@ -1,430 +0,0 @@
_PROPERTIES__SCHEMA_YML = """
version: 2
models:
- name: disabled
columns:
- name: id
data_tests:
- unique
"""
_SEEDS__SEED_INITIAL = """
id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67
12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193
13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5
14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250
15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245
16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54
17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96
18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72
19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174
20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25
21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253
22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153
23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201
24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122
25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95
26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52
27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26
28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118
29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28
30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177
31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233
32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203
33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149
34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167
35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110
36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68
37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89
38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81
39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15
40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255
41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140
42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24
43,Sean,Mason,smason16@icq.com,Male,159.219.155.249
44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218
45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198
46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18
47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238
48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61
49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21
50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209
51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87
52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142
53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126
54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212
55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194
56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22
57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60
58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50
59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222
60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115
61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155
62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94
63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106
64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68
65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41
66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109
67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77
68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194
69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135
70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87
71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44
72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182
73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241
74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24
75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214
76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199
77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41
78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255
79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144
80,Rose,King,rking27@ucoz.com,Female,212.123.168.231
81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188
82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61
83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30
84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192
85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232
86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109
87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156
88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84
89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235
90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53
91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221
92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187
93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57
94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189
95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180
96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144
97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117
98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126
99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244
100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88
""".lstrip()
_SEEDS__SEED_UPDATE = """
id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67
12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193
13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5
14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250
15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245
16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54
17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96
18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72
19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174
20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25
21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253
22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153
23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201
24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122
25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95
26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52
27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26
28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118
29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28
30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177
31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233
32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203
33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149
34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167
35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110
36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68
37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89
38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81
39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15
40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255
41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140
42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24
43,Sean,Mason,smason16@icq.com,Male,159.219.155.249
44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218
45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198
46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18
47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238
48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61
49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21
50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209
51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87
52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142
53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126
54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212
55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194
56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22
57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60
58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50
59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222
60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115
61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155
62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94
63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106
64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68
65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41
66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109
67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77
68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194
69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135
70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87
71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44
72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182
73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241
74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24
75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214
76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199
77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41
78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255
79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144
80,Rose,King,rking27@ucoz.com,Female,212.123.168.231
81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188
82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61
83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30
84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192
85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232
86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109
87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156
88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84
89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235
90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53
91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221
92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187
93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57
94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189
95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180
96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144
97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117
98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126
99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244
100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88
101,Michael,Perez,mperez0@chronoengine.com,Male,106.239.70.175
102,Shawn,Mccoy,smccoy1@reddit.com,Male,24.165.76.182
103,Kathleen,Payne,kpayne2@cargocollective.com,Female,113.207.168.106
104,Jimmy,Cooper,jcooper3@cargocollective.com,Male,198.24.63.114
105,Katherine,Rice,krice4@typepad.com,Female,36.97.186.238
106,Sarah,Ryan,sryan5@gnu.org,Female,119.117.152.40
107,Martin,Mcdonald,mmcdonald6@opera.com,Male,8.76.38.115
108,Frank,Robinson,frobinson7@wunderground.com,Male,186.14.64.194
109,Jennifer,Franklin,jfranklin8@mail.ru,Female,91.216.3.131
110,Henry,Welch,hwelch9@list-manage.com,Male,176.35.182.168
111,Fred,Snyder,fsnydera@reddit.com,Male,217.106.196.54
112,Amy,Dunn,adunnb@nba.com,Female,95.39.163.195
113,Kathleen,Meyer,kmeyerc@cdc.gov,Female,164.142.188.214
114,Steve,Ferguson,sfergusond@reverbnation.com,Male,138.22.204.251
115,Teresa,Hill,thille@dion.ne.jp,Female,82.84.228.235
116,Amanda,Harper,aharperf@mail.ru,Female,16.123.56.176
117,Kimberly,Ray,krayg@xing.com,Female,48.66.48.12
118,Johnny,Knight,jknighth@jalbum.net,Male,99.30.138.123
119,Virginia,Freeman,vfreemani@tiny.cc,Female,225.172.182.63
120,Anna,Austin,aaustinj@diigo.com,Female,62.111.227.148
121,Willie,Hill,whillk@mail.ru,Male,0.86.232.249
122,Sean,Harris,sharrisl@zdnet.com,Male,117.165.133.249
123,Mildred,Adams,madamsm@usatoday.com,Female,163.44.97.46
124,David,Graham,dgrahamn@zimbio.com,Male,78.13.246.202
125,Victor,Hunter,vhuntero@ehow.com,Male,64.156.179.139
126,Aaron,Ruiz,aruizp@weebly.com,Male,34.194.68.78
127,Benjamin,Brooks,bbrooksq@jalbum.net,Male,20.192.189.107
128,Lisa,Wilson,lwilsonr@japanpost.jp,Female,199.152.130.217
129,Benjamin,King,bkings@comsenz.com,Male,29.189.189.213
130,Christina,Williamson,cwilliamsont@boston.com,Female,194.101.52.60
131,Jane,Gonzalez,jgonzalezu@networksolutions.com,Female,109.119.12.87
132,Thomas,Owens,towensv@psu.edu,Male,84.168.213.153
133,Katherine,Moore,kmoorew@naver.com,Female,183.150.65.24
134,Jennifer,Stewart,jstewartx@yahoo.com,Female,38.41.244.58
135,Sara,Tucker,stuckery@topsy.com,Female,181.130.59.184
136,Harold,Ortiz,hortizz@vkontakte.ru,Male,198.231.63.137
137,Shirley,James,sjames10@yelp.com,Female,83.27.160.104
138,Dennis,Johnson,djohnson11@slate.com,Male,183.178.246.101
139,Louise,Weaver,lweaver12@china.com.cn,Female,1.14.110.18
140,Maria,Armstrong,marmstrong13@prweb.com,Female,181.142.1.249
141,Gloria,Cruz,gcruz14@odnoklassniki.ru,Female,178.232.140.243
142,Diana,Spencer,dspencer15@ifeng.com,Female,125.153.138.244
143,Kelly,Nguyen,knguyen16@altervista.org,Female,170.13.201.119
144,Jane,Rodriguez,jrodriguez17@biblegateway.com,Female,12.102.249.81
145,Scott,Brown,sbrown18@geocities.jp,Male,108.174.99.192
146,Norma,Cruz,ncruz19@si.edu,Female,201.112.156.197
147,Marie,Peters,mpeters1a@mlb.com,Female,231.121.197.144
148,Lillian,Carr,lcarr1b@typepad.com,Female,206.179.164.163
149,Judy,Nichols,jnichols1c@t-online.de,Female,158.190.209.194
150,Billy,Long,blong1d@yahoo.com,Male,175.20.23.160
151,Howard,Reid,hreid1e@exblog.jp,Male,118.99.196.20
152,Laura,Ferguson,lferguson1f@tuttocitta.it,Female,22.77.87.110
153,Anne,Bailey,abailey1g@geocities.com,Female,58.144.159.245
154,Rose,Morgan,rmorgan1h@ehow.com,Female,118.127.97.4
155,Nicholas,Reyes,nreyes1i@google.ru,Male,50.135.10.252
156,Joshua,Kennedy,jkennedy1j@house.gov,Male,154.6.163.209
157,Paul,Watkins,pwatkins1k@upenn.edu,Male,177.236.120.87
158,Kathryn,Kelly,kkelly1l@businessweek.com,Female,70.28.61.86
159,Adam,Armstrong,aarmstrong1m@techcrunch.com,Male,133.235.24.202
160,Norma,Wallace,nwallace1n@phoca.cz,Female,241.119.227.128
161,Timothy,Reyes,treyes1o@google.cn,Male,86.28.23.26
162,Elizabeth,Patterson,epatterson1p@sun.com,Female,139.97.159.149
163,Edward,Gomez,egomez1q@google.fr,Male,158.103.108.255
164,David,Cox,dcox1r@friendfeed.com,Male,206.80.80.58
165,Brenda,Wood,bwood1s@over-blog.com,Female,217.207.44.179
166,Adam,Walker,awalker1t@blogs.com,Male,253.211.54.93
167,Michael,Hart,mhart1u@wix.com,Male,230.206.200.22
168,Jesse,Ellis,jellis1v@google.co.uk,Male,213.254.162.52
169,Janet,Powell,jpowell1w@un.org,Female,27.192.194.86
170,Helen,Ford,hford1x@creativecommons.org,Female,52.160.102.168
171,Gerald,Carpenter,gcarpenter1y@about.me,Male,36.30.194.218
172,Kathryn,Oliver,koliver1z@army.mil,Female,202.63.103.69
173,Alan,Berry,aberry20@gov.uk,Male,246.157.112.211
174,Harry,Andrews,handrews21@ameblo.jp,Male,195.108.0.12
175,Andrea,Hall,ahall22@hp.com,Female,149.162.163.28
176,Barbara,Wells,bwells23@behance.net,Female,224.70.72.1
177,Anne,Wells,awells24@apache.org,Female,180.168.81.153
178,Harry,Harper,hharper25@rediff.com,Male,151.87.130.21
179,Jack,Ray,jray26@wufoo.com,Male,220.109.38.178
180,Phillip,Hamilton,phamilton27@joomla.org,Male,166.40.47.30
181,Shirley,Hunter,shunter28@newsvine.com,Female,97.209.140.194
182,Arthur,Daniels,adaniels29@reuters.com,Male,5.40.240.86
183,Virginia,Rodriguez,vrodriguez2a@walmart.com,Female,96.80.164.184
184,Christina,Ryan,cryan2b@hibu.com,Female,56.35.5.52
185,Theresa,Mendoza,tmendoza2c@vinaora.com,Female,243.42.0.210
186,Jason,Cole,jcole2d@ycombinator.com,Male,198.248.39.129
187,Phillip,Bryant,pbryant2e@rediff.com,Male,140.39.116.251
188,Adam,Torres,atorres2f@sun.com,Male,101.75.187.135
189,Margaret,Johnston,mjohnston2g@ucsd.edu,Female,159.30.69.149
190,Paul,Payne,ppayne2h@hhs.gov,Male,199.234.140.220
191,Todd,Willis,twillis2i@businessweek.com,Male,191.59.136.214
192,Willie,Oliver,woliver2j@noaa.gov,Male,44.212.35.197
193,Frances,Robertson,frobertson2k@go.com,Female,31.117.65.136
194,Gregory,Hawkins,ghawkins2l@joomla.org,Male,91.3.22.49
195,Lisa,Perkins,lperkins2m@si.edu,Female,145.95.31.186
196,Jacqueline,Anderson,janderson2n@cargocollective.com,Female,14.176.0.187
197,Shirley,Diaz,sdiaz2o@ucla.edu,Female,207.12.95.46
198,Nicole,Meyer,nmeyer2p@flickr.com,Female,231.79.115.13
199,Mary,Gray,mgray2q@constantcontact.com,Female,210.116.64.253
200,Jean,Mcdonald,jmcdonald2r@baidu.com,Female,122.239.235.117
""".lstrip()
_MODELS__ADVANCED_INCREMENTAL = """
{{
config(
materialized = "incremental",
unique_key = "id",
persist_docs = {"relation": true}
)
}}
select *
from {{ ref('seed') }}
{% if is_incremental() %}
where id > (select max(id) from {{this}})
{% endif %}
"""
_MODELS__COMPOUND_SORT = """
{{
config(
materialized = "table",
sort = 'first_name',
sort_type = 'compound'
)
}}
select * from {{ ref('seed') }}
"""
_MODELS__DISABLED = """
{{
config(
materialized = "view",
enabled = False
)
}}
select * from {{ ref('seed') }}
"""
_MODELS__EMPTY = """
"""
_MODELS__GET_AND_REF = """
{%- do adapter.get_relation(database=target.database, schema=target.schema, identifier='materialized') -%}
select * from {{ ref('materialized') }}
"""
_MODELS_GET_AND_REF_UPPERCASE = """
{%- do adapter.get_relation(database=target.database, schema=target.schema, identifier='MATERIALIZED') -%}
select * from {{ ref('MATERIALIZED') }}
"""
_MODELS__INCREMENTAL = """
{{
config(
materialized = "incremental"
)
}}
select * from {{ ref('seed') }}
{% if is_incremental() %}
where id > (select max(id) from {{this}})
{% endif %}
"""
_MODELS__INTERLEAVED_SORT = """
{{
config(
materialized = "table",
sort = ['first_name', 'last_name'],
sort_type = 'interleaved'
)
}}
select * from {{ ref('seed') }}
"""
_MODELS__MATERIALIZED = """
{{
config(
materialized = "table"
)
}}
-- ensure that dbt_utils' relation check will work
{% set relation = ref('seed') %}
{%- if not (relation is mapping and relation.get('metadata', {}).get('type', '').endswith('Relation')) -%}
{%- do exceptions.raise_compiler_error("Macro " ~ macro ~ " expected a Relation but received the value: " ~ relation) -%}
{%- endif -%}
-- this is a unicode character: å
select * from {{ relation }}
"""
_MODELS__VIEW_MODEL = """
{{
config(
materialized = "view"
)
}}
select * from {{ ref('seed') }}
"""

View File

@@ -1,68 +0,0 @@
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
from tests.functional.adapter.simple_copy.fixtures import (
_MODELS__ADVANCED_INCREMENTAL,
_MODELS__COMPOUND_SORT,
_MODELS__DISABLED,
_MODELS__EMPTY,
_MODELS__INCREMENTAL,
_MODELS__INTERLEAVED_SORT,
_MODELS__MATERIALIZED,
_MODELS__VIEW_MODEL,
_MODELS_GET_AND_REF_UPPERCASE,
_PROPERTIES__SCHEMA_YML,
_SEEDS__SEED_INITIAL,
)
class TestSimpleCopyUppercase:
@pytest.fixture(scope="class")
def dbt_profile_target(self):
return {
"type": "postgres",
"threads": 4,
"host": "localhost",
"port": 5432,
"user": "root",
"pass": "password",
"dbname": "dbtMixedCase",
}
@pytest.fixture(scope="class")
def models(self):
return {
"ADVANCED_INCREMENTAL.sql": _MODELS__ADVANCED_INCREMENTAL,
"COMPOUND_SORT.sql": _MODELS__COMPOUND_SORT,
"DISABLED.sql": _MODELS__DISABLED,
"EMPTY.sql": _MODELS__EMPTY,
"GET_AND_REF.sql": _MODELS_GET_AND_REF_UPPERCASE,
"INCREMENTAL.sql": _MODELS__INCREMENTAL,
"INTERLEAVED_SORT.sql": _MODELS__INTERLEAVED_SORT,
"MATERIALIZED.sql": _MODELS__MATERIALIZED,
"VIEW_MODEL.sql": _MODELS__VIEW_MODEL,
}
@pytest.fixture(scope="class")
def properties(self):
return {
"schema.yml": _PROPERTIES__SCHEMA_YML,
}
@pytest.fixture(scope="class")
def seeds(self):
return {"seed.csv": _SEEDS__SEED_INITIAL}
def test_simple_copy_uppercase(self, project):
# Load the seed file and check that it worked
results = run_dbt(["seed"])
assert len(results) == 1
# Run the project and ensure that all the models loaded
results = run_dbt()
assert len(results) == 7
check_relations_equal(
project.adapter, ["seed", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"]
)

View File

@@ -1,118 +0,0 @@
# mix in biguery
# mix in snowflake
from pathlib import Path
import pytest
from dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file
from tests.functional.adapter.simple_copy.fixtures import (
_MODELS__ADVANCED_INCREMENTAL,
_MODELS__COMPOUND_SORT,
_MODELS__DISABLED,
_MODELS__EMPTY,
_MODELS__GET_AND_REF,
_MODELS__INCREMENTAL,
_MODELS__INTERLEAVED_SORT,
_MODELS__MATERIALIZED,
_MODELS__VIEW_MODEL,
_PROPERTIES__SCHEMA_YML,
_SEEDS__SEED_INITIAL,
_SEEDS__SEED_UPDATE,
)
class SimpleCopySetup:
@pytest.fixture(scope="class")
def models(self):
return {
"advanced_incremental.sql": _MODELS__ADVANCED_INCREMENTAL,
"compound_sort.sql": _MODELS__COMPOUND_SORT,
"disabled.sql": _MODELS__DISABLED,
"empty.sql": _MODELS__EMPTY,
"get_and_ref.sql": _MODELS__GET_AND_REF,
"incremental.sql": _MODELS__INCREMENTAL,
"interleaved_sort.sql": _MODELS__INTERLEAVED_SORT,
"materialized.sql": _MODELS__MATERIALIZED,
"view_model.sql": _MODELS__VIEW_MODEL,
}
@pytest.fixture(scope="class")
def properties(self):
return {
"schema.yml": _PROPERTIES__SCHEMA_YML,
}
@pytest.fixture(scope="class")
def seeds(self):
return {"seed.csv": _SEEDS__SEED_INITIAL}
@pytest.fixture(scope="class")
def project_config_update(self):
return {"seeds": {"quote_columns": False}}
class SimpleCopyBase(SimpleCopySetup):
def test_simple_copy(self, project):
# Load the seed file and check that it worked
results = run_dbt(["seed"])
assert len(results) == 1
# Run the project and ensure that all the models loaded
results = run_dbt()
assert len(results) == 7
check_relations_equal(
project.adapter, ["seed", "view_model", "incremental", "materialized", "get_and_ref"]
)
# Change the seed.csv file and see if everything is the same, i.e. everything has been updated
main_seed_file = project.project_root / Path("seeds") / Path("seed.csv")
rm_file(main_seed_file)
write_file(_SEEDS__SEED_UPDATE, main_seed_file)
results = run_dbt(["seed"])
assert len(results) == 1
results = run_dbt()
assert len(results) == 7
check_relations_equal(
project.adapter, ["seed", "view_model", "incremental", "materialized", "get_and_ref"]
)
def test_simple_copy_with_materialized_views(self, project):
project.run_sql(f"create table {project.test_schema}.unrelated_table (id int)")
sql = f"""
create materialized view {project.test_schema}.unrelated_materialized_view as (
select * from {project.test_schema}.unrelated_table
)
"""
project.run_sql(sql)
sql = f"""
create view {project.test_schema}.unrelated_view as (
select * from {project.test_schema}.unrelated_materialized_view
)
"""
project.run_sql(sql)
results = run_dbt(["seed"])
assert len(results) == 1
results = run_dbt()
assert len(results) == 7
class EmptyModelsArentRunBase(SimpleCopySetup):
def test_dbt_doesnt_run_empty_models(self, project):
results = run_dbt(["seed"])
assert len(results) == 1
results = run_dbt()
assert len(results) == 7
tables = project.get_tables_in_schema()
assert "empty" not in tables.keys()
assert "disabled" not in tables.keys()
class TestSimpleCopyBase(SimpleCopyBase):
pass
class TestEmptyModelsArentRun(EmptyModelsArentRunBase):
pass

View File

@@ -1,99 +0,0 @@
#
# Macros
#
macros__schema_test = """
{% test column_type(model, column_name, type) %}
{% set cols = adapter.get_columns_in_relation(model) %}
{% set col_types = {} %}
{% for col in cols %}
{% do col_types.update({col.name: col.data_type}) %}
{% endfor %}
{% set validation_message = 'Got a column type of ' ~ col_types.get(column_name) ~ ', expected ' ~ type %}
{% set val = 0 if col_types.get(column_name) == type else 1 %}
{% if val == 1 and execute %}
{{ log(validation_message, info=True) }}
{% endif %}
select '{{ validation_message }}' as validation_error
from (select true) as nothing
where {{ val }} = 1
{% endtest %}
"""
#
# Models
#
models__downstream_from_seed_actual = """
select * from {{ ref('seed_actual') }}
"""
models__downstream_from_seed_pipe_separated = """
select * from {{ ref('seed_pipe_separated') }}
"""
models__from_basic_seed = """
select * from {{ this.schema }}.seed_expected
"""
#
# Properties
#
properties__schema_yml = """
version: 2
seeds:
- name: seed_enabled
columns:
- name: birthday
data_tests:
- column_type:
type: date
- name: seed_id
data_tests:
- column_type:
type: text
- name: seed_tricky
columns:
- name: seed_id
data_tests:
- column_type:
type: integer
- name: seed_id_str
data_tests:
- column_type:
type: text
- name: a_bool
data_tests:
- column_type:
type: boolean
- name: looks_like_a_bool
data_tests:
- column_type:
type: text
- name: a_date
data_tests:
- column_type:
type: timestamp without time zone
- name: looks_like_a_date
data_tests:
- column_type:
type: text
- name: relative
data_tests:
- column_type:
type: text
- name: weekday
data_tests:
- column_type:
type: text
"""

View File

@@ -1,501 +0,0 @@
seed_id,first_name,email,ip_address,birthday
1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31
2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14
3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57
4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43
5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29
6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50
7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59
8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15
9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48
10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49
11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11
12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14
13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36
14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04
15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56
16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47
17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35
18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57
19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15
20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05
21,Aaron,arodriguezk@nps.gov,205.245.118.221,1985-10-11 23:07:49
22,Patrick,pparkerl@techcrunch.com,19.8.100.182,2006-03-29 12:53:56
23,Phillip,pmorenom@intel.com,41.38.254.103,2011-11-07 15:35:43
24,Henry,hgarcian@newsvine.com,1.191.216.252,2008-08-28 08:30:44
25,Irene,iturnero@opera.com,50.17.60.190,1994-04-01 07:15:02
26,Andrew,adunnp@pen.io,123.52.253.176,2000-11-01 06:03:25
27,David,dgutierrezq@wp.com,238.23.203.42,1988-01-25 07:29:18
28,Henry,hsanchezr@cyberchimps.com,248.102.2.185,1983-01-01 13:36:37
29,Evelyn,epetersons@gizmodo.com,32.80.46.119,1979-07-16 17:24:12
30,Tammy,tmitchellt@purevolume.com,249.246.167.88,2001-04-03 10:00:23
31,Jacqueline,jlittleu@domainmarket.com,127.181.97.47,1986-02-11 21:35:50
32,Earl,eortizv@opera.com,166.47.248.240,1996-07-06 08:16:27
33,Juan,jgordonw@sciencedirect.com,71.77.2.200,1987-01-31 03:46:44
34,Diane,dhowellx@nyu.edu,140.94.133.12,1994-06-11 02:30:05
35,Randy,rkennedyy@microsoft.com,73.255.34.196,2005-05-26 20:28:39
36,Janice,jriveraz@time.com,22.214.227.32,1990-02-09 04:16:52
37,Laura,lperry10@diigo.com,159.148.145.73,2015-03-17 05:59:25
38,Gary,gray11@statcounter.com,40.193.124.56,1970-01-27 10:04:51
39,Jesse,jmcdonald12@typepad.com,31.7.86.103,2009-03-14 08:14:29
40,Sandra,sgonzalez13@goodreads.com,223.80.168.239,1993-05-21 14:08:54
41,Scott,smoore14@archive.org,38.238.46.83,1980-08-30 11:16:56
42,Phillip,pevans15@cisco.com,158.234.59.34,2011-12-15 23:26:31
43,Steven,sriley16@google.ca,90.247.57.68,2011-10-29 19:03:28
44,Deborah,dbrown17@hexun.com,179.125.143.240,1995-04-10 14:36:07
45,Lori,lross18@ow.ly,64.80.162.180,1980-12-27 16:49:15
46,Sean,sjackson19@tumblr.com,240.116.183.69,1988-06-12 21:24:45
47,Terry,tbarnes1a@163.com,118.38.213.137,1997-09-22 16:43:19
48,Dorothy,dross1b@ebay.com,116.81.76.49,2005-02-28 13:33:24
49,Samuel,swashington1c@house.gov,38.191.253.40,1989-01-19 21:15:48
50,Ralph,rcarter1d@tinyurl.com,104.84.60.174,2007-08-11 10:21:49
51,Wayne,whudson1e@princeton.edu,90.61.24.102,1983-07-03 16:58:12
52,Rose,rjames1f@plala.or.jp,240.83.81.10,1995-06-08 11:46:23
53,Louise,lcox1g@theglobeandmail.com,105.11.82.145,2016-09-19 14:45:51
54,Kenneth,kjohnson1h@independent.co.uk,139.5.45.94,1976-08-17 11:26:19
55,Donna,dbrown1i@amazon.co.uk,19.45.169.45,2006-05-27 16:51:40
56,Johnny,jvasquez1j@trellian.com,118.202.238.23,1975-11-17 08:42:32
57,Patrick,pramirez1k@tamu.edu,231.25.153.198,1997-08-06 11:51:09
58,Helen,hlarson1l@prweb.com,8.40.21.39,1993-08-04 19:53:40
59,Patricia,pspencer1m@gmpg.org,212.198.40.15,1977-08-03 16:37:27
60,Joseph,jspencer1n@marriott.com,13.15.63.238,2005-07-23 20:22:06
61,Phillip,pschmidt1o@blogtalkradio.com,177.98.201.190,1976-05-19 21:47:44
62,Joan,jwebb1p@google.ru,105.229.170.71,1972-09-07 17:53:47
63,Phyllis,pkennedy1q@imgur.com,35.145.8.244,2000-01-01 22:33:37
64,Katherine,khunter1r@smh.com.au,248.168.205.32,1991-01-09 06:40:24
65,Laura,lvasquez1s@wiley.com,128.129.115.152,1997-10-23 12:04:56
66,Juan,jdunn1t@state.gov,44.228.124.51,2004-11-10 05:07:35
67,Judith,jholmes1u@wiley.com,40.227.179.115,1977-08-02 17:01:45
68,Beverly,bbaker1v@wufoo.com,208.34.84.59,2016-03-06 20:07:23
69,Lawrence,lcarr1w@flickr.com,59.158.212.223,1988-09-13 06:07:21
70,Gloria,gwilliams1x@mtv.com,245.231.88.33,1995-03-18 22:32:46
71,Steven,ssims1y@cbslocal.com,104.50.58.255,2001-08-05 21:26:20
72,Betty,bmills1z@arstechnica.com,103.177.214.220,1981-12-14 21:26:54
73,Mildred,mfuller20@prnewswire.com,151.158.8.130,2000-04-19 10:13:55
74,Donald,dday21@icq.com,9.178.102.255,1972-12-03 00:58:24
75,Eric,ethomas22@addtoany.com,85.2.241.227,1992-11-01 05:59:30
76,Joyce,jarmstrong23@sitemeter.com,169.224.20.36,1985-10-24 06:50:01
77,Maria,mmartinez24@amazonaws.com,143.189.167.135,2005-10-05 05:17:42
78,Harry,hburton25@youtube.com,156.47.176.237,1978-03-26 05:53:33
79,Kevin,klawrence26@hao123.com,79.136.183.83,1994-10-12 04:38:52
80,David,dhall27@prweb.com,133.149.172.153,1976-12-15 16:24:24
81,Kathy,kperry28@twitter.com,229.242.72.228,1979-03-04 02:58:56
82,Adam,aprice29@elegantthemes.com,13.145.21.10,1982-11-07 11:46:59
83,Brandon,bgriffin2a@va.gov,73.249.128.212,2013-10-30 05:30:36
84,Henry,hnguyen2b@discovery.com,211.36.214.242,1985-01-09 06:37:27
85,Eric,esanchez2c@edublogs.org,191.166.188.251,2004-05-01 23:21:42
86,Jason,jlee2d@jimdo.com,193.92.16.182,1973-01-08 09:05:39
87,Diana,drichards2e@istockphoto.com,19.130.175.245,1994-10-05 22:50:49
88,Andrea,awelch2f@abc.net.au,94.155.233.96,2002-04-26 08:41:44
89,Louis,lwagner2g@miitbeian.gov.cn,26.217.34.111,2003-08-25 07:56:39
90,Jane,jsims2h@seesaa.net,43.4.220.135,1987-03-20 20:39:04
91,Larry,lgrant2i@si.edu,97.126.79.34,2000-09-07 20:26:19
92,Louis,ldean2j@prnewswire.com,37.148.40.127,2011-09-16 20:12:14
93,Jennifer,jcampbell2k@xing.com,38.106.254.142,1988-07-15 05:06:49
94,Wayne,wcunningham2l@google.com.hk,223.28.26.187,2009-12-15 06:16:54
95,Lori,lstevens2m@icq.com,181.250.181.58,1984-10-28 03:29:19
96,Judy,jsimpson2n@marriott.com,180.121.239.219,1986-02-07 15:18:10
97,Phillip,phoward2o@usa.gov,255.247.0.175,2002-12-26 08:44:45
98,Gloria,gwalker2p@usa.gov,156.140.7.128,1997-10-04 07:58:58
99,Paul,pjohnson2q@umn.edu,183.59.198.197,1991-11-14 12:33:55
100,Frank,fgreene2r@blogspot.com,150.143.68.121,2010-06-12 23:55:39
101,Deborah,dknight2s@reverbnation.com,222.131.211.191,1970-07-08 08:54:23
102,Sandra,sblack2t@tripadvisor.com,254.183.128.254,2000-04-12 02:39:36
103,Edward,eburns2u@dailymotion.com,253.89.118.18,1993-10-10 10:54:01
104,Anthony,ayoung2v@ustream.tv,118.4.193.176,1978-08-26 17:07:29
105,Donald,dlawrence2w@wp.com,139.200.159.227,2007-07-21 20:56:20
106,Matthew,mfreeman2x@google.fr,205.26.239.92,2014-12-05 17:05:39
107,Sean,ssanders2y@trellian.com,143.89.82.108,1993-07-14 21:45:02
108,Sharon,srobinson2z@soundcloud.com,66.234.247.54,1977-04-06 19:07:03
109,Jennifer,jwatson30@t-online.de,196.102.127.7,1998-03-07 05:12:23
110,Clarence,cbrooks31@si.edu,218.93.234.73,2002-11-06 17:22:25
111,Jose,jflores32@goo.gl,185.105.244.231,1995-01-05 06:32:21
112,George,glee33@adobe.com,173.82.249.196,2015-01-04 02:47:46
113,Larry,lhill34@linkedin.com,66.5.206.195,2010-11-02 10:21:17
114,Marie,mmeyer35@mysql.com,151.152.88.107,1990-05-22 20:52:51
115,Clarence,cwebb36@skype.com,130.198.55.217,1972-10-27 07:38:54
116,Sarah,scarter37@answers.com,80.89.18.153,1971-08-24 19:29:30
117,Henry,hhughes38@webeden.co.uk,152.60.114.174,1973-01-27 09:00:42
118,Teresa,thenry39@hao123.com,32.187.239.106,2015-11-06 01:48:44
119,Billy,bgutierrez3a@sun.com,52.37.70.134,2002-03-19 03:20:19
120,Anthony,agibson3b@github.io,154.251.232.213,1991-04-19 01:08:15
121,Sandra,sromero3c@wikia.com,44.124.171.2,1998-09-06 20:30:34
122,Paula,pandrews3d@blogs.com,153.142.118.226,2003-06-24 16:31:24
123,Terry,tbaker3e@csmonitor.com,99.120.45.219,1970-12-09 23:57:21
124,Lois,lwilson3f@reuters.com,147.44.171.83,1971-01-09 22:28:51
125,Sara,smorgan3g@nature.com,197.67.192.230,1992-01-28 20:33:24
126,Charles,ctorres3h@china.com.cn,156.115.216.2,1993-10-02 19:36:34
127,Richard,ralexander3i@marriott.com,248.235.180.59,1999-02-03 18:40:55
128,Christina,charper3j@cocolog-nifty.com,152.114.116.129,1978-09-13 00:37:32
129,Steve,sadams3k@economist.com,112.248.91.98,2004-03-21 09:07:43
130,Katherine,krobertson3l@ow.ly,37.220.107.28,1977-03-18 19:28:50
131,Donna,dgibson3m@state.gov,222.218.76.221,1999-02-01 06:46:16
132,Christina,cwest3n@mlb.com,152.114.6.160,1979-12-24 15:30:35
133,Sandra,swillis3o@meetup.com,180.71.49.34,1984-09-27 08:05:54
134,Clarence,cedwards3p@smugmug.com,10.64.180.186,1979-04-16 16:52:10
135,Ruby,rjames3q@wp.com,98.61.54.20,2007-01-13 14:25:52
136,Sarah,smontgomery3r@tripod.com,91.45.164.172,2009-07-25 04:34:30
137,Sarah,soliver3s@eventbrite.com,30.106.39.146,2012-05-09 22:12:33
138,Deborah,dwheeler3t@biblegateway.com,59.105.213.173,1999-11-09 08:08:44
139,Deborah,dray3u@i2i.jp,11.108.186.217,2014-02-04 03:15:19
140,Paul,parmstrong3v@alexa.com,6.250.59.43,2009-12-21 10:08:53
141,Aaron,abishop3w@opera.com,207.145.249.62,1996-04-25 23:20:23
142,Henry,hsanders3x@google.ru,140.215.203.171,2012-01-29 11:52:32
143,Anne,aanderson3y@1688.com,74.150.102.118,1982-04-03 13:46:17
144,Victor,vmurphy3z@hugedomains.com,222.155.99.152,1987-11-03 19:58:41
145,Evelyn,ereid40@pbs.org,249.122.33.117,1977-12-14 17:09:57
146,Brian,bgonzalez41@wikia.com,246.254.235.141,1991-02-24 00:45:58
147,Sandra,sgray42@squarespace.com,150.73.28.159,1972-07-28 17:26:32
148,Alice,ajones43@a8.net,78.253.12.177,2002-12-05 16:57:46
149,Jessica,jhanson44@mapquest.com,87.229.30.160,1994-01-30 11:40:04
150,Louise,lbailey45@reuters.com,191.219.31.101,2011-09-07 21:11:45
151,Christopher,cgonzalez46@printfriendly.com,83.137.213.239,1984-10-24 14:58:04
152,Gregory,gcollins47@yandex.ru,28.176.10.115,1998-07-25 17:17:10
153,Jane,jperkins48@usnews.com,46.53.164.159,1979-08-19 15:25:00
154,Phyllis,plong49@yahoo.co.jp,208.140.88.2,1985-07-06 02:16:36
155,Adam,acarter4a@scribd.com,78.48.148.204,2005-07-20 03:31:09
156,Frank,fweaver4b@angelfire.com,199.180.255.224,2011-03-04 23:07:54
157,Ronald,rmurphy4c@cloudflare.com,73.42.97.231,1991-01-11 10:39:41
158,Richard,rmorris4d@e-recht24.de,91.9.97.223,2009-01-17 21:05:15
159,Rose,rfoster4e@woothemes.com,203.169.53.16,1991-04-21 02:09:38
160,George,ggarrett4f@uiuc.edu,186.61.5.167,1989-11-11 11:29:42
161,Victor,vhamilton4g@biblegateway.com,121.229.138.38,2012-06-22 18:01:23
162,Mark,mbennett4h@businessinsider.com,209.184.29.203,1980-04-16 15:26:34
163,Martin,mwells4i@ifeng.com,97.223.55.105,2010-05-26 14:08:18
164,Diana,dstone4j@google.ru,90.155.52.47,2013-02-11 00:14:54
165,Walter,wferguson4k@blogger.com,30.63.212.44,1986-02-20 17:46:46
166,Denise,dcoleman4l@vistaprint.com,10.209.153.77,1992-05-13 20:14:14
167,Philip,pknight4m@xing.com,15.28.135.167,2000-09-11 18:41:13
168,Russell,rcarr4n@youtube.com,113.55.165.50,2008-07-10 17:49:27
169,Donna,dburke4o@dion.ne.jp,70.0.105.111,1992-02-10 17:24:58
170,Anne,along4p@squidoo.com,36.154.58.107,2012-08-19 23:35:31
171,Clarence,cbanks4q@webeden.co.uk,94.57.53.114,1972-03-11 21:46:44
172,Betty,bbowman4r@cyberchimps.com,178.115.209.69,2013-01-13 21:34:51
173,Andrew,ahudson4s@nytimes.com,84.32.252.144,1998-09-15 14:20:04
174,Keith,kgordon4t@cam.ac.uk,189.237.211.102,2009-01-22 05:34:38
175,Patrick,pwheeler4u@mysql.com,47.22.117.226,1984-09-05 22:33:15
176,Jesse,jfoster4v@mapquest.com,229.95.131.46,1990-01-20 12:19:15
177,Arthur,afisher4w@jugem.jp,107.255.244.98,1983-10-13 11:08:46
178,Nicole,nryan4x@wsj.com,243.211.33.221,1974-05-30 23:19:14
179,Bruce,bjohnson4y@sfgate.com,17.41.200.101,1992-09-23 02:02:19
180,Terry,tcox4z@reference.com,20.189.120.106,1982-02-13 12:43:14
181,Ashley,astanley50@kickstarter.com,86.3.56.98,1976-05-09 01:27:16
182,Michael,mrivera51@about.me,72.118.249.0,1971-11-11 17:28:37
183,Steven,sgonzalez52@mozilla.org,169.112.247.47,2002-08-24 14:59:25
184,Kathleen,kfuller53@bloglovin.com,80.93.59.30,2002-03-11 13:41:29
185,Nicole,nhenderson54@usda.gov,39.253.60.30,1995-04-24 05:55:07
186,Ralph,rharper55@purevolume.com,167.147.142.189,1980-02-10 18:35:45
187,Heather,hcunningham56@photobucket.com,96.222.196.229,2007-06-15 05:37:50
188,Nancy,nlittle57@cbc.ca,241.53.255.175,2007-07-12 23:42:48
189,Juan,jramirez58@pinterest.com,190.128.84.27,1978-11-07 23:37:37
190,Beverly,bfowler59@chronoengine.com,54.144.230.49,1979-03-31 23:27:28
191,Shirley,sstevens5a@prlog.org,200.97.231.248,2011-12-06 07:08:50
192,Annie,areyes5b@squidoo.com,223.32.182.101,2011-05-28 02:42:09
193,Jack,jkelley5c@tiny.cc,47.34.118.150,1981-12-05 17:31:40
194,Keith,krobinson5d@1und1.de,170.210.209.31,1999-03-09 11:05:43
195,Joseph,jmiller5e@google.com.au,136.74.212.139,1984-10-08 13:18:20
196,Annie,aday5f@blogspot.com,71.99.186.69,1986-02-18 12:27:34
197,Nancy,nperez5g@liveinternet.ru,28.160.6.107,1983-10-20 17:51:20
198,Tammy,tward5h@ucoz.ru,141.43.164.70,1980-03-31 04:45:29
199,Doris,dryan5i@ted.com,239.117.202.188,1985-07-03 03:17:53
200,Rose,rmendoza5j@photobucket.com,150.200.206.79,1973-04-21 21:36:40
201,Cynthia,cbutler5k@hubpages.com,80.153.174.161,2001-01-20 01:42:26
202,Samuel,soliver5l@people.com.cn,86.127.246.140,1970-09-02 02:19:00
203,Carl,csanchez5m@mysql.com,50.149.237.107,1993-12-01 07:02:09
204,Kathryn,kowens5n@geocities.jp,145.166.205.201,2004-07-06 18:39:33
205,Nicholas,nnichols5o@parallels.com,190.240.66.170,2014-11-11 18:52:19
206,Keith,kwillis5p@youtube.com,181.43.206.100,1998-06-13 06:30:51
207,Justin,jwebb5q@intel.com,211.54.245.74,2000-11-04 16:58:26
208,Gary,ghicks5r@wikipedia.org,196.154.213.104,1992-12-01 19:48:28
209,Martin,mpowell5s@flickr.com,153.67.12.241,1983-06-30 06:24:32
210,Brenda,bkelley5t@xinhuanet.com,113.100.5.172,2005-01-08 20:50:22
211,Edward,eray5u@a8.net,205.187.246.65,2011-09-26 08:04:44
212,Steven,slawson5v@senate.gov,238.150.250.36,1978-11-22 02:48:09
213,Robert,rthompson5w@furl.net,70.7.89.236,2001-09-12 08:52:07
214,Jack,jporter5x@diigo.com,220.172.29.99,1976-07-26 14:29:21
215,Lisa,ljenkins5y@oakley.com,150.151.170.180,2010-03-20 19:21:16
216,Theresa,tbell5z@mayoclinic.com,247.25.53.173,2001-03-11 05:36:40
217,Jimmy,jstephens60@weather.com,145.101.93.235,1983-04-12 09:35:30
218,Louis,lhunt61@amazon.co.jp,78.137.6.253,1997-08-29 19:34:34
219,Lawrence,lgilbert62@ted.com,243.132.8.78,2015-04-08 22:06:56
220,David,dgardner63@4shared.com,204.40.46.136,1971-07-09 03:29:11
221,Charles,ckennedy64@gmpg.org,211.83.233.2,2011-02-26 11:55:04
222,Lillian,lbanks65@msu.edu,124.233.12.80,2010-05-16 20:29:02
223,Ernest,enguyen66@baidu.com,82.45.128.148,1996-07-04 10:07:04
224,Ryan,rrussell67@cloudflare.com,202.53.240.223,1983-08-05 12:36:29
225,Donald,ddavis68@ustream.tv,47.39.218.137,1989-05-27 02:30:56
226,Joe,jscott69@blogspot.com,140.23.131.75,1973-03-16 12:21:31
227,Anne,amarshall6a@google.ca,113.162.200.197,1988-12-09 03:38:29
228,Willie,wturner6b@constantcontact.com,85.83.182.249,1991-10-06 01:51:10
229,Nicole,nwilson6c@sogou.com,30.223.51.135,1977-05-29 19:54:56
230,Janet,jwheeler6d@stumbleupon.com,153.194.27.144,2011-03-13 12:48:47
231,Lois,lcarr6e@statcounter.com,0.41.36.53,1993-02-06 04:52:01
232,Shirley,scruz6f@tmall.com,37.156.39.223,2007-02-18 17:47:01
233,Patrick,pford6g@reverbnation.com,36.198.200.89,1977-03-06 15:47:24
234,Lisa,lhudson6h@usatoday.com,134.213.58.137,2014-10-28 01:56:56
235,Pamela,pmartinez6i@opensource.org,5.151.127.202,1987-11-30 16:44:47
236,Larry,lperez6j@infoseek.co.jp,235.122.96.148,1979-01-18 06:33:45
237,Pamela,pramirez6k@census.gov,138.233.34.163,2012-01-29 10:35:20
238,Daniel,dcarr6l@php.net,146.21.152.242,1984-11-17 08:22:59
239,Patrick,psmith6m@indiegogo.com,136.222.199.36,2001-05-30 22:16:44
240,Raymond,rhenderson6n@hc360.com,116.31.112.38,2000-01-05 20:35:41
241,Teresa,treynolds6o@miitbeian.gov.cn,198.126.205.220,1996-11-08 01:27:31
242,Johnny,jmason6p@flickr.com,192.8.232.114,2013-05-14 05:35:50
243,Angela,akelly6q@guardian.co.uk,234.116.60.197,1977-08-20 02:05:17
244,Douglas,dcole6r@cmu.edu,128.135.212.69,2016-10-26 17:40:36
245,Frances,fcampbell6s@twitpic.com,94.22.243.235,1987-04-26 07:07:13
246,Donna,dgreen6t@chron.com,227.116.46.107,2011-07-25 12:59:54
247,Benjamin,bfranklin6u@redcross.org,89.141.142.89,1974-05-03 20:28:18
248,Randy,rpalmer6v@rambler.ru,70.173.63.178,2011-12-20 17:40:18
249,Melissa,mmurray6w@bbb.org,114.234.118.137,1991-02-26 12:45:44
250,Jean,jlittle6x@epa.gov,141.21.163.254,1991-08-16 04:57:09
251,Daniel,dolson6y@nature.com,125.75.104.97,2010-04-23 06:25:54
252,Kathryn,kwells6z@eventbrite.com,225.104.28.249,2015-01-31 02:21:50
253,Theresa,tgonzalez70@ox.ac.uk,91.93.156.26,1971-12-11 10:31:31
254,Beverly,broberts71@bluehost.com,244.40.158.89,2013-09-21 13:02:31
255,Pamela,pmurray72@netscape.com,218.54.95.216,1985-04-16 00:34:00
256,Timothy,trichardson73@amazonaws.com,235.49.24.229,2000-11-11 09:48:28
257,Mildred,mpalmer74@is.gd,234.125.95.132,1992-05-25 02:25:02
258,Jessica,jcampbell75@google.it,55.98.30.140,2014-08-26 00:26:34
259,Beverly,bthomas76@cpanel.net,48.78.228.176,1970-08-18 10:40:05
260,Eugene,eward77@cargocollective.com,139.226.204.2,1996-12-04 23:17:00
261,Andrea,aallen78@webnode.com,160.31.214.38,2009-07-06 07:22:37
262,Justin,jruiz79@merriam-webster.com,150.149.246.122,2005-06-06 11:44:19
263,Kenneth,kedwards7a@networksolutions.com,98.82.193.128,2001-07-03 02:00:10
264,Rachel,rday7b@miibeian.gov.cn,114.15.247.221,1994-08-18 19:45:40
265,Russell,rmiller7c@instagram.com,184.130.152.253,1977-11-06 01:58:12
266,Bonnie,bhudson7d@cornell.edu,235.180.186.206,1990-12-03 22:45:24
267,Raymond,rknight7e@yandex.ru,161.2.44.252,1995-08-25 04:31:19
268,Bonnie,brussell7f@elpais.com,199.237.57.207,1991-03-29 08:32:06
269,Marie,mhenderson7g@elpais.com,52.203.131.144,2004-06-04 21:50:28
270,Alan,acarr7h@trellian.com,147.51.205.72,2005-03-03 10:51:31
271,Barbara,bturner7i@hugedomains.com,103.160.110.226,2004-08-04 13:42:40
272,Christina,cdaniels7j@census.gov,0.238.61.251,1972-10-18 12:47:33
273,Jeremy,jgomez7k@reuters.com,111.26.65.56,2013-01-13 10:41:35
274,Laura,lwood7l@icio.us,149.153.38.205,2011-06-25 09:33:59
275,Matthew,mbowman7m@auda.org.au,182.138.206.172,1999-03-05 03:25:36
276,Denise,dparker7n@icq.com,0.213.88.138,2011-11-04 09:43:06
277,Phillip,pparker7o@discuz.net,219.242.165.240,1973-10-19 04:22:29
278,Joan,jpierce7p@salon.com,63.31.213.202,1989-04-09 22:06:24
279,Irene,ibaker7q@cbc.ca,102.33.235.114,1992-09-04 13:00:57
280,Betty,bbowman7r@ted.com,170.91.249.242,2015-09-28 08:14:22
281,Teresa,truiz7s@boston.com,82.108.158.207,1999-07-18 05:17:09
282,Helen,hbrooks7t@slideshare.net,102.87.162.187,2003-01-06 15:45:29
283,Karen,kgriffin7u@wunderground.com,43.82.44.184,2010-05-28 01:56:37
284,Lisa,lfernandez7v@mtv.com,200.238.218.220,1993-04-03 20:33:51
285,Jesse,jlawrence7w@timesonline.co.uk,95.122.105.78,1990-01-05 17:28:43
286,Terry,tross7x@macromedia.com,29.112.114.133,2009-08-29 21:32:17
287,Angela,abradley7y@icq.com,177.44.27.72,1989-10-04 21:46:06
288,Maria,mhart7z@dailymotion.com,55.27.55.202,1975-01-21 01:22:57
289,Raymond,randrews80@pinterest.com,88.90.78.67,1992-03-16 21:37:40
290,Kathy,krice81@bluehost.com,212.63.196.102,2000-12-14 03:06:44
291,Cynthia,cramos82@nymag.com,107.89.190.6,2005-06-28 02:02:33
292,Kimberly,kjones83@mysql.com,86.169.101.101,2007-06-13 22:56:49
293,Timothy,thansen84@microsoft.com,108.100.254.90,2003-04-04 10:31:57
294,Carol,cspencer85@berkeley.edu,75.118.144.187,1999-03-30 14:53:21
295,Louis,lmedina86@latimes.com,141.147.163.24,1991-04-11 17:53:13
296,Margaret,mcole87@google.fr,53.184.26.83,1991-12-19 01:54:10
297,Mary,mgomez88@yellowpages.com,208.56.57.99,1976-05-21 18:05:08
298,Amanda,aanderson89@geocities.com,147.73.15.252,1987-08-22 15:05:28
299,Kathryn,kgarrett8a@nature.com,27.29.177.220,1976-07-15 04:25:04
300,Dorothy,dmason8b@shareasale.com,106.210.99.193,1990-09-03 21:39:31
301,Lois,lkennedy8c@amazon.de,194.169.29.187,2007-07-29 14:09:31
302,Irene,iburton8d@washingtonpost.com,196.143.110.249,2013-09-05 11:32:46
303,Betty,belliott8e@wired.com,183.105.222.199,1979-09-19 19:29:13
304,Bobby,bmeyer8f@census.gov,36.13.161.145,2014-05-24 14:34:39
305,Ann,amorrison8g@sfgate.com,72.154.54.137,1978-10-05 14:22:34
306,Daniel,djackson8h@wunderground.com,144.95.32.34,1990-07-27 13:23:05
307,Joe,jboyd8i@alibaba.com,187.105.86.178,2011-09-28 16:46:32
308,Ralph,rdunn8j@fc2.com,3.19.87.255,1984-10-18 08:00:40
309,Craig,ccarter8k@gizmodo.com,235.152.76.215,1998-07-04 12:15:21
310,Paula,pdean8l@hhs.gov,161.100.173.197,1973-02-13 09:38:55
311,Andrew,agarrett8m@behance.net,199.253.123.218,1991-02-14 13:36:32
312,Janet,jhowell8n@alexa.com,39.189.139.79,2012-11-24 20:17:33
313,Keith,khansen8o@godaddy.com,116.186.223.196,1987-08-23 21:22:05
314,Nicholas,nedwards8p@state.gov,142.175.142.11,1977-03-28 18:27:27
315,Jacqueline,jallen8q@oaic.gov.au,189.66.135.192,1994-10-26 11:44:26
316,Frank,fgardner8r@mapy.cz,154.77.119.169,1983-01-29 19:19:51
317,Eric,eharrison8s@google.cn,245.139.65.123,1984-02-04 09:54:36
318,Gregory,gcooper8t@go.com,171.147.0.221,2004-06-14 05:22:08
319,Jean,jfreeman8u@rakuten.co.jp,67.243.121.5,1977-01-07 18:23:43
320,Juan,jlewis8v@shinystat.com,216.181.171.189,2001-08-23 17:32:43
321,Randy,rwilliams8w@shinystat.com,105.152.146.28,1983-02-17 00:05:50
322,Stephen,shart8x@sciencedirect.com,196.131.205.148,2004-02-15 10:12:03
323,Annie,ahunter8y@example.com,63.36.34.103,2003-07-23 21:15:25
324,Melissa,mflores8z@cbc.ca,151.230.217.90,1983-11-02 14:53:56
325,Jane,jweaver90@about.me,0.167.235.217,1987-07-29 00:13:44
326,Anthony,asmith91@oracle.com,97.87.48.41,2001-05-31 18:44:11
327,Terry,tdavis92@buzzfeed.com,46.20.12.51,2015-09-12 23:13:55
328,Brandon,bmontgomery93@gravatar.com,252.101.48.186,2010-10-28 08:26:27
329,Chris,cmurray94@bluehost.com,25.158.167.97,2004-05-05 16:10:31
330,Denise,dfuller95@hugedomains.com,216.210.149.28,1979-04-20 08:57:24
331,Arthur,amcdonald96@sakura.ne.jp,206.42.36.213,2009-08-15 03:26:16
332,Jesse,jhoward97@google.cn,46.181.118.30,1974-04-18 14:08:41
333,Frank,fsimpson98@domainmarket.com,163.220.211.87,2006-06-30 14:46:52
334,Janice,jwoods99@pen.io,229.245.237.182,1988-04-06 11:52:58
335,Rebecca,rroberts9a@huffingtonpost.com,148.96.15.80,1976-10-05 08:44:16
336,Joshua,jray9b@opensource.org,192.253.12.198,1971-12-25 22:27:07
337,Joyce,jcarpenter9c@statcounter.com,125.171.46.215,2001-12-31 22:08:13
338,Andrea,awest9d@privacy.gov.au,79.101.180.201,1983-02-18 20:07:47
339,Christine,chudson9e@yelp.com,64.198.43.56,1997-09-08 08:03:43
340,Joe,jparker9f@earthlink.net,251.215.148.153,1973-11-04 05:08:18
341,Thomas,tkim9g@answers.com,49.187.34.47,1991-08-07 21:13:48
342,Janice,jdean9h@scientificamerican.com,4.197.117.16,2009-12-08 02:35:49
343,James,jmitchell9i@umich.edu,43.121.18.147,2011-04-28 17:04:09
344,Charles,cgardner9j@purevolume.com,197.78.240.240,1998-02-11 06:47:07
345,Robert,rhenderson9k@friendfeed.com,215.84.180.88,2002-05-10 15:33:14
346,Chris,cgray9l@4shared.com,249.70.192.240,1998-10-03 16:43:42
347,Gloria,ghayes9m@hibu.com,81.103.138.26,1999-12-26 11:23:13
348,Edward,eramirez9n@shareasale.com,38.136.90.136,2010-08-19 08:01:06
349,Cheryl,cbutler9o@google.ca,172.180.78.172,1995-05-27 20:03:52
350,Margaret,mwatkins9p@sfgate.com,3.20.198.6,2014-10-21 01:42:58
351,Rebecca,rwelch9q@examiner.com,45.81.42.208,2001-02-08 12:19:06
352,Joe,jpalmer9r@phpbb.com,163.202.92.190,1970-01-05 11:29:12
353,Sandra,slewis9s@dyndns.org,77.215.201.236,1974-01-05 07:04:04
354,Todd,tfranklin9t@g.co,167.125.181.82,2009-09-28 10:13:58
355,Joseph,jlewis9u@webmd.com,244.204.6.11,1990-10-21 15:49:57
356,Alan,aknight9v@nydailynews.com,152.197.95.83,1996-03-08 08:43:17
357,Sharon,sdean9w@123-reg.co.uk,237.46.40.26,1985-11-30 12:09:24
358,Annie,awright9x@cafepress.com,190.45.231.111,2000-08-24 11:56:06
359,Diane,dhamilton9y@youtube.com,85.146.171.196,2015-02-24 02:03:57
360,Antonio,alane9z@auda.org.au,61.63.146.203,2001-05-13 03:43:34
361,Matthew,mallena0@hhs.gov,29.97.32.19,1973-02-19 23:43:32
362,Bonnie,bfowlera1@soup.io,251.216.99.53,2013-08-01 15:35:41
363,Margaret,mgraya2@examiner.com,69.255.151.79,1998-01-23 22:24:59
364,Joan,jwagnera3@printfriendly.com,192.166.120.61,1973-07-13 00:30:22
365,Catherine,cperkinsa4@nytimes.com,58.21.24.214,2006-11-19 11:52:26
366,Mark,mcartera5@cpanel.net,220.33.102.142,2007-09-09 09:43:27
367,Paula,ppricea6@msn.com,36.182.238.124,2009-11-11 09:13:05
368,Catherine,cgreena7@army.mil,228.203.58.19,2005-08-09 16:52:15
369,Helen,hhamiltona8@symantec.com,155.56.194.99,2005-02-01 05:40:36
370,Jane,jmeyera9@ezinearticles.com,133.244.113.213,2013-11-06 22:10:23
371,Wanda,wevansaa@bloglovin.com,233.125.192.48,1994-12-26 23:43:42
372,Mark,mmarshallab@tumblr.com,114.74.60.47,2016-09-29 18:03:01
373,Andrew,amartinezac@google.cn,182.54.37.130,1976-06-06 17:04:17
374,Helen,hmoralesad@e-recht24.de,42.45.4.123,1977-03-28 19:06:59
375,Bonnie,bstoneae@php.net,196.149.79.137,1970-02-05 17:05:58
376,Douglas,dfreemanaf@nasa.gov,215.65.124.218,2008-11-20 21:51:55
377,Willie,wwestag@army.mil,35.189.92.118,1992-07-24 05:08:08
378,Cheryl,cwagnerah@upenn.edu,228.239.222.141,2010-01-25 06:29:01
379,Sandra,swardai@baidu.com,63.11.113.240,1985-05-23 08:07:37
380,Julie,jrobinsonaj@jugem.jp,110.58.202.50,2015-03-05 09:42:07
381,Larry,lwagnerak@shop-pro.jp,98.234.25.24,1975-07-22 22:22:02
382,Juan,jcastilloal@yelp.com,24.174.74.202,2007-01-17 09:32:43
383,Donna,dfrazieram@artisteer.com,205.26.147.45,1990-02-11 20:55:46
384,Rachel,rfloresan@w3.org,109.60.216.162,1983-05-22 22:42:18
385,Robert,rreynoldsao@theguardian.com,122.65.209.130,2009-05-01 18:02:51
386,Donald,dbradleyap@etsy.com,42.54.35.126,1997-01-16 16:31:52
387,Rachel,rfisheraq@nih.gov,160.243.250.45,2006-02-17 22:05:49
388,Nicholas,nhamiltonar@princeton.edu,156.211.37.111,1976-06-21 03:36:29
389,Timothy,twhiteas@ca.gov,36.128.23.70,1975-09-24 03:51:18
390,Diana,dbradleyat@odnoklassniki.ru,44.102.120.184,1983-04-27 09:02:50
391,Billy,bfowlerau@jimdo.com,91.200.68.196,1995-01-29 06:57:35
392,Bruce,bandrewsav@ucoz.com,48.12.101.125,1992-10-27 04:31:39
393,Linda,lromeroaw@usa.gov,100.71.233.19,1992-06-08 15:13:18
394,Debra,dwatkinsax@ucoz.ru,52.160.233.193,2001-11-11 06:51:01
395,Katherine,kburkeay@wix.com,151.156.242.141,2010-06-14 19:54:28
396,Martha,mharrisonaz@youku.com,21.222.10.199,1989-10-16 14:17:55
397,Dennis,dwellsb0@youtu.be,103.16.29.3,1985-12-21 06:05:51
398,Gloria,grichardsb1@bloglines.com,90.147.120.234,1982-08-27 01:04:43
399,Brenda,bfullerb2@t.co,33.253.63.90,2011-04-20 05:00:35
400,Larry,lhendersonb3@disqus.com,88.95.132.128,1982-08-31 02:15:12
401,Richard,rlarsonb4@wisc.edu,13.48.231.150,1979-04-15 14:08:09
402,Terry,thuntb5@usa.gov,65.91.103.240,1998-05-15 11:50:49
403,Harry,hburnsb6@nasa.gov,33.38.21.244,1981-04-12 14:02:20
404,Diana,dellisb7@mlb.com,218.229.81.135,1997-01-29 00:17:25
405,Jack,jburkeb8@tripadvisor.com,210.227.182.216,1984-03-09 17:24:03
406,Julia,jlongb9@fotki.com,10.210.12.104,2005-10-26 03:54:13
407,Lois,lscottba@msu.edu,188.79.136.138,1973-02-02 18:40:39
408,Sandra,shendersonbb@shareasale.com,114.171.220.108,2012-06-09 18:22:26
409,Irene,isanchezbc@cdbaby.com,109.255.50.119,1983-09-28 21:11:27
410,Emily,ebrooksbd@bandcamp.com,227.81.93.79,1970-08-31 21:08:01
411,Michelle,mdiazbe@businessweek.com,236.249.6.226,1993-05-22 08:07:07
412,Tammy,tbennettbf@wisc.edu,145.253.239.152,1978-12-31 20:24:51
413,Christine,cgreenebg@flickr.com,97.25.140.118,1978-07-17 12:55:30
414,Patricia,pgarzabh@tuttocitta.it,139.246.192.211,1984-02-27 13:40:08
415,Kimberly,kromerobi@aol.com,73.56.88.247,1976-09-16 14:22:04
416,George,gjohnstonbj@fda.gov,240.36.245.185,1979-07-24 14:36:02
417,Eugene,efullerbk@sciencedaily.com,42.38.105.140,2012-09-12 01:56:41
418,Andrea,astevensbl@goo.gl,31.152.207.204,1979-05-24 11:06:21
419,Shirley,sreidbm@scientificamerican.com,103.60.31.241,1984-02-23 04:07:41
420,Terry,tmorenobn@blinklist.com,92.161.34.42,1994-06-25 14:01:35
421,Christopher,cmorenobo@go.com,158.86.176.82,1973-09-05 09:18:47
422,Dennis,dhansonbp@ning.com,40.160.81.75,1982-01-20 10:19:41
423,Beverly,brussellbq@de.vu,138.32.56.204,1997-11-06 07:20:19
424,Howard,hparkerbr@163.com,103.171.134.171,2015-06-24 15:37:10
425,Helen,hmccoybs@fema.gov,61.200.4.71,1995-06-20 08:59:10
426,Ann,ahudsonbt@cafepress.com,239.187.71.125,1977-04-11 07:59:28
427,Tina,twestbu@nhs.uk,80.213.117.74,1992-08-19 05:54:44
428,Terry,tnguyenbv@noaa.gov,21.93.118.95,1991-09-19 23:22:55
429,Ashley,aburtonbw@wix.com,233.176.205.109,2009-11-10 05:01:20
430,Eric,emyersbx@1und1.de,168.91.212.67,1987-08-10 07:16:20
431,Barbara,blittleby@lycos.com,242.14.189.239,2008-08-02 12:13:04
432,Sean,sevansbz@instagram.com,14.39.177.13,2007-04-16 17:28:49
433,Shirley,sburtonc0@newsvine.com,34.107.138.76,1980-12-10 02:19:29
434,Patricia,pfreemanc1@so-net.ne.jp,219.213.142.117,1987-03-01 02:25:45
435,Paula,pfosterc2@vkontakte.ru,227.14.138.141,1972-09-22 12:59:34
436,Nicole,nstewartc3@1688.com,8.164.23.115,1998-10-27 00:10:17
437,Earl,ekimc4@ovh.net,100.26.244.177,2013-01-22 10:05:46
438,Beverly,breedc5@reuters.com,174.12.226.27,1974-09-22 07:29:36
439,Lawrence,lbutlerc6@a8.net,105.164.42.164,1992-06-05 00:43:40
440,Charles,cmoorec7@ucoz.com,252.197.131.69,1990-04-09 02:34:05
441,Alice,alawsonc8@live.com,183.73.220.232,1989-02-28 09:11:04
442,Dorothy,dcarpenterc9@arstechnica.com,241.47.200.14,2005-05-02 19:57:21
443,Carolyn,cfowlerca@go.com,213.109.55.202,1978-09-10 20:18:20
444,Anthony,alongcb@free.fr,169.221.158.204,1984-09-13 01:59:23
445,Annie,amoorecc@e-recht24.de,50.34.148.61,2009-03-26 03:41:07
446,Carlos,candrewscd@ihg.com,236.69.59.212,1972-03-29 22:42:48
447,Beverly,bramosce@google.ca,164.250.184.49,1982-11-10 04:34:01
448,Teresa,tlongcf@umich.edu,174.88.53.223,1987-05-17 12:48:00
449,Roy,rboydcg@uol.com.br,91.58.243.215,1974-06-16 17:59:54
450,Ashley,afieldsch@tamu.edu,130.138.11.126,1983-09-15 05:52:36
451,Judith,jhawkinsci@cmu.edu,200.187.103.245,2003-10-22 12:24:03
452,Rebecca,rwestcj@ocn.ne.jp,72.85.3.103,1980-11-13 11:01:26
453,Raymond,rporterck@infoseek.co.jp,146.33.216.151,1982-05-17 23:58:03
454,Janet,jmarshallcl@odnoklassniki.ru,52.46.193.166,1998-10-04 00:02:21
455,Shirley,speterscm@salon.com,248.126.31.15,1987-01-30 06:04:59
456,Annie,abowmancn@economist.com,222.213.248.59,2006-03-14 23:52:59
457,Jean,jlarsonco@blogspot.com,71.41.25.195,2007-09-08 23:49:45
458,Phillip,pmoralescp@stanford.edu,74.119.87.28,2011-03-14 20:25:40
459,Norma,nrobinsoncq@economist.com,28.225.21.54,1989-10-21 01:22:43
460,Kimberly,kclarkcr@dion.ne.jp,149.171.132.153,2008-06-27 02:27:30
461,Ruby,rmorriscs@ucla.edu,177.85.163.249,2016-01-28 16:43:44
462,Jonathan,jcastilloct@tripod.com,78.4.28.77,2000-05-24 17:33:06
463,Edward,ebryantcu@jigsy.com,140.31.98.193,1992-12-17 08:32:47
464,Chris,chamiltoncv@eepurl.com,195.171.234.206,1970-12-05 03:42:19
465,Michael,mweavercw@reference.com,7.233.133.213,1987-03-29 02:30:54
466,Howard,hlawrencecx@businessweek.com,113.225.124.224,1990-07-30 07:20:57
467,Philip,phowardcy@comsenz.com,159.170.247.249,2010-10-15 10:18:37
468,Mary,mmarshallcz@xing.com,125.132.189.70,2007-07-19 13:48:47
469,Scott,salvarezd0@theguardian.com,78.49.103.230,1987-10-31 06:10:44
470,Wayne,wcarrolld1@blog.com,238.1.120.204,1980-11-19 03:26:10
471,Jennifer,jwoodsd2@multiply.com,92.20.224.49,2010-05-06 22:17:04
472,Raymond,rwelchd3@toplist.cz,176.158.35.240,2007-12-12 19:02:51
473,Steven,sdixond4@wisc.edu,167.55.237.52,1984-05-05 11:44:37
474,Ralph,rjamesd5@ameblo.jp,241.190.50.133,2000-07-06 08:44:37
475,Jason,jrobinsond6@hexun.com,138.119.139.56,2006-02-03 05:27:45
476,Doris,dwoodd7@fema.gov,180.220.156.190,1978-05-11 20:14:20
477,Elizabeth,eberryd8@youtu.be,74.188.53.229,2006-11-18 08:29:06
478,Irene,igilbertd9@privacy.gov.au,194.152.218.1,1985-09-17 02:46:52
479,Jessica,jdeanda@ameblo.jp,178.103.93.118,1974-06-07 19:04:05
480,Rachel,ralvarezdb@phoca.cz,17.22.223.174,1999-03-08 02:43:25
481,Kenneth,kthompsondc@shinystat.com,229.119.91.234,2007-05-15 13:17:32
482,Harold,hmurraydd@parallels.com,133.26.188.80,1993-11-15 03:42:07
483,Paula,phowellde@samsung.com,34.215.28.216,1993-11-29 15:55:00
484,Ruth,rpiercedf@tripadvisor.com,111.30.130.123,1986-08-17 10:19:38
485,Phyllis,paustindg@vk.com,50.84.34.178,1994-04-13 03:05:24
486,Laura,lfosterdh@usnews.com,37.8.101.33,2001-06-30 08:58:59
487,Eric,etaylordi@com.com,103.183.253.45,2006-09-15 20:18:46
488,Doris,driveradj@prweb.com,247.16.2.199,1989-05-08 09:27:09
489,Ryan,rhughesdk@elegantthemes.com,103.234.153.232,1989-08-01 18:36:06
490,Steve,smoralesdl@jigsy.com,3.76.84.207,2011-03-13 17:01:05
491,Louis,lsullivandm@who.int,78.135.44.208,1975-11-26 16:01:23
492,Catherine,ctuckerdn@seattletimes.com,93.137.106.21,1990-03-13 16:14:56
493,Ann,adixondo@gmpg.org,191.136.222.111,2002-06-05 14:22:18
494,Johnny,jhartdp@amazon.com,103.252.198.39,1988-07-30 23:54:49
495,Susan,srichardsdq@skype.com,126.247.192.11,2005-01-09 12:08:14
496,Brenda,bparkerdr@skype.com,63.232.216.86,1974-05-18 05:58:29
497,Tammy,tmurphyds@constantcontact.com,56.56.37.112,2014-08-05 18:22:25
498,Larry,lhayesdt@wordpress.com,162.146.13.46,1997-02-26 14:01:53
499,,ethomasdu@hhs.gov,6.241.88.250,2007-09-14 13:03:34
500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20
1 seed_id first_name email ip_address birthday
2 1 Larry lking0@miitbeian.gov.cn 69.135.206.194 2008-09-12 19:08:31
3 2 Larry lperkins1@toplist.cz 64.210.133.162 1978-05-09 04:15:14
4 3 Anna amontgomery2@miitbeian.gov.cn 168.104.64.114 2011-10-16 04:07:57
5 4 Sandra sgeorge3@livejournal.com 229.235.252.98 1973-07-19 10:52:43
6 5 Fred fwoods4@google.cn 78.229.170.124 2012-09-30 16:38:29
7 6 Stephen shanson5@livejournal.com 182.227.157.105 1995-11-07 21:40:50
8 7 William wmartinez6@upenn.edu 135.139.249.50 1982-09-05 03:11:59
9 8 Jessica jlong7@hao123.com 203.62.178.210 1991-10-16 11:03:15
10 9 Douglas dwhite8@tamu.edu 178.187.247.1 1979-10-01 09:49:48
11 10 Lisa lcoleman9@nydailynews.com 168.234.128.249 2011-05-26 07:45:49
12 11 Ralph rfieldsa@home.pl 55.152.163.149 1972-11-18 19:06:11
13 12 Louise lnicholsb@samsung.com 141.116.153.154 2014-11-25 20:56:14
14 13 Clarence cduncanc@sfgate.com 81.171.31.133 2011-11-17 07:02:36
15 14 Daniel dfranklind@omniture.com 8.204.211.37 1980-09-13 00:09:04
16 15 Katherine klanee@auda.org.au 176.96.134.59 1997-08-22 19:36:56
17 16 Billy bwardf@wikia.com 214.108.78.85 2003-10-19 02:14:47
18 17 Annie agarzag@ocn.ne.jp 190.108.42.70 1988-10-28 15:12:35
19 18 Shirley scolemanh@fastcompany.com 109.251.164.84 1988-08-24 10:50:57
20 19 Roger rfrazieri@scribd.com 38.145.218.108 1985-12-31 15:17:15
21 20 Lillian lstanleyj@goodreads.com 47.57.236.17 1970-06-08 02:09:05
22 21 Aaron arodriguezk@nps.gov 205.245.118.221 1985-10-11 23:07:49
23 22 Patrick pparkerl@techcrunch.com 19.8.100.182 2006-03-29 12:53:56
24 23 Phillip pmorenom@intel.com 41.38.254.103 2011-11-07 15:35:43
25 24 Henry hgarcian@newsvine.com 1.191.216.252 2008-08-28 08:30:44
26 25 Irene iturnero@opera.com 50.17.60.190 1994-04-01 07:15:02
27 26 Andrew adunnp@pen.io 123.52.253.176 2000-11-01 06:03:25
28 27 David dgutierrezq@wp.com 238.23.203.42 1988-01-25 07:29:18
29 28 Henry hsanchezr@cyberchimps.com 248.102.2.185 1983-01-01 13:36:37
30 29 Evelyn epetersons@gizmodo.com 32.80.46.119 1979-07-16 17:24:12
31 30 Tammy tmitchellt@purevolume.com 249.246.167.88 2001-04-03 10:00:23
32 31 Jacqueline jlittleu@domainmarket.com 127.181.97.47 1986-02-11 21:35:50
33 32 Earl eortizv@opera.com 166.47.248.240 1996-07-06 08:16:27
34 33 Juan jgordonw@sciencedirect.com 71.77.2.200 1987-01-31 03:46:44
35 34 Diane dhowellx@nyu.edu 140.94.133.12 1994-06-11 02:30:05
36 35 Randy rkennedyy@microsoft.com 73.255.34.196 2005-05-26 20:28:39
37 36 Janice jriveraz@time.com 22.214.227.32 1990-02-09 04:16:52
38 37 Laura lperry10@diigo.com 159.148.145.73 2015-03-17 05:59:25
39 38 Gary gray11@statcounter.com 40.193.124.56 1970-01-27 10:04:51
40 39 Jesse jmcdonald12@typepad.com 31.7.86.103 2009-03-14 08:14:29
41 40 Sandra sgonzalez13@goodreads.com 223.80.168.239 1993-05-21 14:08:54
42 41 Scott smoore14@archive.org 38.238.46.83 1980-08-30 11:16:56
43 42 Phillip pevans15@cisco.com 158.234.59.34 2011-12-15 23:26:31
44 43 Steven sriley16@google.ca 90.247.57.68 2011-10-29 19:03:28
45 44 Deborah dbrown17@hexun.com 179.125.143.240 1995-04-10 14:36:07
46 45 Lori lross18@ow.ly 64.80.162.180 1980-12-27 16:49:15
47 46 Sean sjackson19@tumblr.com 240.116.183.69 1988-06-12 21:24:45
48 47 Terry tbarnes1a@163.com 118.38.213.137 1997-09-22 16:43:19
49 48 Dorothy dross1b@ebay.com 116.81.76.49 2005-02-28 13:33:24
50 49 Samuel swashington1c@house.gov 38.191.253.40 1989-01-19 21:15:48
51 50 Ralph rcarter1d@tinyurl.com 104.84.60.174 2007-08-11 10:21:49
52 51 Wayne whudson1e@princeton.edu 90.61.24.102 1983-07-03 16:58:12
53 52 Rose rjames1f@plala.or.jp 240.83.81.10 1995-06-08 11:46:23
54 53 Louise lcox1g@theglobeandmail.com 105.11.82.145 2016-09-19 14:45:51
55 54 Kenneth kjohnson1h@independent.co.uk 139.5.45.94 1976-08-17 11:26:19
56 55 Donna dbrown1i@amazon.co.uk 19.45.169.45 2006-05-27 16:51:40
57 56 Johnny jvasquez1j@trellian.com 118.202.238.23 1975-11-17 08:42:32
58 57 Patrick pramirez1k@tamu.edu 231.25.153.198 1997-08-06 11:51:09
59 58 Helen hlarson1l@prweb.com 8.40.21.39 1993-08-04 19:53:40
60 59 Patricia pspencer1m@gmpg.org 212.198.40.15 1977-08-03 16:37:27
61 60 Joseph jspencer1n@marriott.com 13.15.63.238 2005-07-23 20:22:06
62 61 Phillip pschmidt1o@blogtalkradio.com 177.98.201.190 1976-05-19 21:47:44
63 62 Joan jwebb1p@google.ru 105.229.170.71 1972-09-07 17:53:47
64 63 Phyllis pkennedy1q@imgur.com 35.145.8.244 2000-01-01 22:33:37
65 64 Katherine khunter1r@smh.com.au 248.168.205.32 1991-01-09 06:40:24
66 65 Laura lvasquez1s@wiley.com 128.129.115.152 1997-10-23 12:04:56
67 66 Juan jdunn1t@state.gov 44.228.124.51 2004-11-10 05:07:35
68 67 Judith jholmes1u@wiley.com 40.227.179.115 1977-08-02 17:01:45
69 68 Beverly bbaker1v@wufoo.com 208.34.84.59 2016-03-06 20:07:23
70 69 Lawrence lcarr1w@flickr.com 59.158.212.223 1988-09-13 06:07:21
71 70 Gloria gwilliams1x@mtv.com 245.231.88.33 1995-03-18 22:32:46
72 71 Steven ssims1y@cbslocal.com 104.50.58.255 2001-08-05 21:26:20
73 72 Betty bmills1z@arstechnica.com 103.177.214.220 1981-12-14 21:26:54
74 73 Mildred mfuller20@prnewswire.com 151.158.8.130 2000-04-19 10:13:55
75 74 Donald dday21@icq.com 9.178.102.255 1972-12-03 00:58:24
76 75 Eric ethomas22@addtoany.com 85.2.241.227 1992-11-01 05:59:30
77 76 Joyce jarmstrong23@sitemeter.com 169.224.20.36 1985-10-24 06:50:01
78 77 Maria mmartinez24@amazonaws.com 143.189.167.135 2005-10-05 05:17:42
79 78 Harry hburton25@youtube.com 156.47.176.237 1978-03-26 05:53:33
80 79 Kevin klawrence26@hao123.com 79.136.183.83 1994-10-12 04:38:52
81 80 David dhall27@prweb.com 133.149.172.153 1976-12-15 16:24:24
82 81 Kathy kperry28@twitter.com 229.242.72.228 1979-03-04 02:58:56
83 82 Adam aprice29@elegantthemes.com 13.145.21.10 1982-11-07 11:46:59
84 83 Brandon bgriffin2a@va.gov 73.249.128.212 2013-10-30 05:30:36
85 84 Henry hnguyen2b@discovery.com 211.36.214.242 1985-01-09 06:37:27
86 85 Eric esanchez2c@edublogs.org 191.166.188.251 2004-05-01 23:21:42
87 86 Jason jlee2d@jimdo.com 193.92.16.182 1973-01-08 09:05:39
88 87 Diana drichards2e@istockphoto.com 19.130.175.245 1994-10-05 22:50:49
89 88 Andrea awelch2f@abc.net.au 94.155.233.96 2002-04-26 08:41:44
90 89 Louis lwagner2g@miitbeian.gov.cn 26.217.34.111 2003-08-25 07:56:39
91 90 Jane jsims2h@seesaa.net 43.4.220.135 1987-03-20 20:39:04
92 91 Larry lgrant2i@si.edu 97.126.79.34 2000-09-07 20:26:19
93 92 Louis ldean2j@prnewswire.com 37.148.40.127 2011-09-16 20:12:14
94 93 Jennifer jcampbell2k@xing.com 38.106.254.142 1988-07-15 05:06:49
95 94 Wayne wcunningham2l@google.com.hk 223.28.26.187 2009-12-15 06:16:54
96 95 Lori lstevens2m@icq.com 181.250.181.58 1984-10-28 03:29:19
97 96 Judy jsimpson2n@marriott.com 180.121.239.219 1986-02-07 15:18:10
98 97 Phillip phoward2o@usa.gov 255.247.0.175 2002-12-26 08:44:45
99 98 Gloria gwalker2p@usa.gov 156.140.7.128 1997-10-04 07:58:58
100 99 Paul pjohnson2q@umn.edu 183.59.198.197 1991-11-14 12:33:55
101 100 Frank fgreene2r@blogspot.com 150.143.68.121 2010-06-12 23:55:39
102 101 Deborah dknight2s@reverbnation.com 222.131.211.191 1970-07-08 08:54:23
103 102 Sandra sblack2t@tripadvisor.com 254.183.128.254 2000-04-12 02:39:36
104 103 Edward eburns2u@dailymotion.com 253.89.118.18 1993-10-10 10:54:01
105 104 Anthony ayoung2v@ustream.tv 118.4.193.176 1978-08-26 17:07:29
106 105 Donald dlawrence2w@wp.com 139.200.159.227 2007-07-21 20:56:20
107 106 Matthew mfreeman2x@google.fr 205.26.239.92 2014-12-05 17:05:39
108 107 Sean ssanders2y@trellian.com 143.89.82.108 1993-07-14 21:45:02
109 108 Sharon srobinson2z@soundcloud.com 66.234.247.54 1977-04-06 19:07:03
110 109 Jennifer jwatson30@t-online.de 196.102.127.7 1998-03-07 05:12:23
111 110 Clarence cbrooks31@si.edu 218.93.234.73 2002-11-06 17:22:25
112 111 Jose jflores32@goo.gl 185.105.244.231 1995-01-05 06:32:21
113 112 George glee33@adobe.com 173.82.249.196 2015-01-04 02:47:46
114 113 Larry lhill34@linkedin.com 66.5.206.195 2010-11-02 10:21:17
115 114 Marie mmeyer35@mysql.com 151.152.88.107 1990-05-22 20:52:51
116 115 Clarence cwebb36@skype.com 130.198.55.217 1972-10-27 07:38:54
117 116 Sarah scarter37@answers.com 80.89.18.153 1971-08-24 19:29:30
118 117 Henry hhughes38@webeden.co.uk 152.60.114.174 1973-01-27 09:00:42
119 118 Teresa thenry39@hao123.com 32.187.239.106 2015-11-06 01:48:44
120 119 Billy bgutierrez3a@sun.com 52.37.70.134 2002-03-19 03:20:19
121 120 Anthony agibson3b@github.io 154.251.232.213 1991-04-19 01:08:15
122 121 Sandra sromero3c@wikia.com 44.124.171.2 1998-09-06 20:30:34
123 122 Paula pandrews3d@blogs.com 153.142.118.226 2003-06-24 16:31:24
124 123 Terry tbaker3e@csmonitor.com 99.120.45.219 1970-12-09 23:57:21
125 124 Lois lwilson3f@reuters.com 147.44.171.83 1971-01-09 22:28:51
126 125 Sara smorgan3g@nature.com 197.67.192.230 1992-01-28 20:33:24
127 126 Charles ctorres3h@china.com.cn 156.115.216.2 1993-10-02 19:36:34
128 127 Richard ralexander3i@marriott.com 248.235.180.59 1999-02-03 18:40:55
129 128 Christina charper3j@cocolog-nifty.com 152.114.116.129 1978-09-13 00:37:32
130 129 Steve sadams3k@economist.com 112.248.91.98 2004-03-21 09:07:43
131 130 Katherine krobertson3l@ow.ly 37.220.107.28 1977-03-18 19:28:50
132 131 Donna dgibson3m@state.gov 222.218.76.221 1999-02-01 06:46:16
133 132 Christina cwest3n@mlb.com 152.114.6.160 1979-12-24 15:30:35
134 133 Sandra swillis3o@meetup.com 180.71.49.34 1984-09-27 08:05:54
135 134 Clarence cedwards3p@smugmug.com 10.64.180.186 1979-04-16 16:52:10
136 135 Ruby rjames3q@wp.com 98.61.54.20 2007-01-13 14:25:52
137 136 Sarah smontgomery3r@tripod.com 91.45.164.172 2009-07-25 04:34:30
138 137 Sarah soliver3s@eventbrite.com 30.106.39.146 2012-05-09 22:12:33
139 138 Deborah dwheeler3t@biblegateway.com 59.105.213.173 1999-11-09 08:08:44
140 139 Deborah dray3u@i2i.jp 11.108.186.217 2014-02-04 03:15:19
141 140 Paul parmstrong3v@alexa.com 6.250.59.43 2009-12-21 10:08:53
142 141 Aaron abishop3w@opera.com 207.145.249.62 1996-04-25 23:20:23
143 142 Henry hsanders3x@google.ru 140.215.203.171 2012-01-29 11:52:32
144 143 Anne aanderson3y@1688.com 74.150.102.118 1982-04-03 13:46:17
145 144 Victor vmurphy3z@hugedomains.com 222.155.99.152 1987-11-03 19:58:41
146 145 Evelyn ereid40@pbs.org 249.122.33.117 1977-12-14 17:09:57
147 146 Brian bgonzalez41@wikia.com 246.254.235.141 1991-02-24 00:45:58
148 147 Sandra sgray42@squarespace.com 150.73.28.159 1972-07-28 17:26:32
149 148 Alice ajones43@a8.net 78.253.12.177 2002-12-05 16:57:46
150 149 Jessica jhanson44@mapquest.com 87.229.30.160 1994-01-30 11:40:04
151 150 Louise lbailey45@reuters.com 191.219.31.101 2011-09-07 21:11:45
152 151 Christopher cgonzalez46@printfriendly.com 83.137.213.239 1984-10-24 14:58:04
153 152 Gregory gcollins47@yandex.ru 28.176.10.115 1998-07-25 17:17:10
154 153 Jane jperkins48@usnews.com 46.53.164.159 1979-08-19 15:25:00
155 154 Phyllis plong49@yahoo.co.jp 208.140.88.2 1985-07-06 02:16:36
156 155 Adam acarter4a@scribd.com 78.48.148.204 2005-07-20 03:31:09
157 156 Frank fweaver4b@angelfire.com 199.180.255.224 2011-03-04 23:07:54
158 157 Ronald rmurphy4c@cloudflare.com 73.42.97.231 1991-01-11 10:39:41
159 158 Richard rmorris4d@e-recht24.de 91.9.97.223 2009-01-17 21:05:15
160 159 Rose rfoster4e@woothemes.com 203.169.53.16 1991-04-21 02:09:38
161 160 George ggarrett4f@uiuc.edu 186.61.5.167 1989-11-11 11:29:42
162 161 Victor vhamilton4g@biblegateway.com 121.229.138.38 2012-06-22 18:01:23
163 162 Mark mbennett4h@businessinsider.com 209.184.29.203 1980-04-16 15:26:34
164 163 Martin mwells4i@ifeng.com 97.223.55.105 2010-05-26 14:08:18
165 164 Diana dstone4j@google.ru 90.155.52.47 2013-02-11 00:14:54
166 165 Walter wferguson4k@blogger.com 30.63.212.44 1986-02-20 17:46:46
167 166 Denise dcoleman4l@vistaprint.com 10.209.153.77 1992-05-13 20:14:14
168 167 Philip pknight4m@xing.com 15.28.135.167 2000-09-11 18:41:13
169 168 Russell rcarr4n@youtube.com 113.55.165.50 2008-07-10 17:49:27
170 169 Donna dburke4o@dion.ne.jp 70.0.105.111 1992-02-10 17:24:58
171 170 Anne along4p@squidoo.com 36.154.58.107 2012-08-19 23:35:31
172 171 Clarence cbanks4q@webeden.co.uk 94.57.53.114 1972-03-11 21:46:44
173 172 Betty bbowman4r@cyberchimps.com 178.115.209.69 2013-01-13 21:34:51
174 173 Andrew ahudson4s@nytimes.com 84.32.252.144 1998-09-15 14:20:04
175 174 Keith kgordon4t@cam.ac.uk 189.237.211.102 2009-01-22 05:34:38
176 175 Patrick pwheeler4u@mysql.com 47.22.117.226 1984-09-05 22:33:15
177 176 Jesse jfoster4v@mapquest.com 229.95.131.46 1990-01-20 12:19:15
178 177 Arthur afisher4w@jugem.jp 107.255.244.98 1983-10-13 11:08:46
179 178 Nicole nryan4x@wsj.com 243.211.33.221 1974-05-30 23:19:14
180 179 Bruce bjohnson4y@sfgate.com 17.41.200.101 1992-09-23 02:02:19
181 180 Terry tcox4z@reference.com 20.189.120.106 1982-02-13 12:43:14
182 181 Ashley astanley50@kickstarter.com 86.3.56.98 1976-05-09 01:27:16
183 182 Michael mrivera51@about.me 72.118.249.0 1971-11-11 17:28:37
184 183 Steven sgonzalez52@mozilla.org 169.112.247.47 2002-08-24 14:59:25
185 184 Kathleen kfuller53@bloglovin.com 80.93.59.30 2002-03-11 13:41:29
186 185 Nicole nhenderson54@usda.gov 39.253.60.30 1995-04-24 05:55:07
187 186 Ralph rharper55@purevolume.com 167.147.142.189 1980-02-10 18:35:45
188 187 Heather hcunningham56@photobucket.com 96.222.196.229 2007-06-15 05:37:50
189 188 Nancy nlittle57@cbc.ca 241.53.255.175 2007-07-12 23:42:48
190 189 Juan jramirez58@pinterest.com 190.128.84.27 1978-11-07 23:37:37
191 190 Beverly bfowler59@chronoengine.com 54.144.230.49 1979-03-31 23:27:28
192 191 Shirley sstevens5a@prlog.org 200.97.231.248 2011-12-06 07:08:50
193 192 Annie areyes5b@squidoo.com 223.32.182.101 2011-05-28 02:42:09
194 193 Jack jkelley5c@tiny.cc 47.34.118.150 1981-12-05 17:31:40
195 194 Keith krobinson5d@1und1.de 170.210.209.31 1999-03-09 11:05:43
196 195 Joseph jmiller5e@google.com.au 136.74.212.139 1984-10-08 13:18:20
197 196 Annie aday5f@blogspot.com 71.99.186.69 1986-02-18 12:27:34
198 197 Nancy nperez5g@liveinternet.ru 28.160.6.107 1983-10-20 17:51:20
199 198 Tammy tward5h@ucoz.ru 141.43.164.70 1980-03-31 04:45:29
200 199 Doris dryan5i@ted.com 239.117.202.188 1985-07-03 03:17:53
201 200 Rose rmendoza5j@photobucket.com 150.200.206.79 1973-04-21 21:36:40
202 201 Cynthia cbutler5k@hubpages.com 80.153.174.161 2001-01-20 01:42:26
203 202 Samuel soliver5l@people.com.cn 86.127.246.140 1970-09-02 02:19:00
204 203 Carl csanchez5m@mysql.com 50.149.237.107 1993-12-01 07:02:09
205 204 Kathryn kowens5n@geocities.jp 145.166.205.201 2004-07-06 18:39:33
206 205 Nicholas nnichols5o@parallels.com 190.240.66.170 2014-11-11 18:52:19
207 206 Keith kwillis5p@youtube.com 181.43.206.100 1998-06-13 06:30:51
208 207 Justin jwebb5q@intel.com 211.54.245.74 2000-11-04 16:58:26
209 208 Gary ghicks5r@wikipedia.org 196.154.213.104 1992-12-01 19:48:28
210 209 Martin mpowell5s@flickr.com 153.67.12.241 1983-06-30 06:24:32
211 210 Brenda bkelley5t@xinhuanet.com 113.100.5.172 2005-01-08 20:50:22
212 211 Edward eray5u@a8.net 205.187.246.65 2011-09-26 08:04:44
213 212 Steven slawson5v@senate.gov 238.150.250.36 1978-11-22 02:48:09
214 213 Robert rthompson5w@furl.net 70.7.89.236 2001-09-12 08:52:07
215 214 Jack jporter5x@diigo.com 220.172.29.99 1976-07-26 14:29:21
216 215 Lisa ljenkins5y@oakley.com 150.151.170.180 2010-03-20 19:21:16
217 216 Theresa tbell5z@mayoclinic.com 247.25.53.173 2001-03-11 05:36:40
218 217 Jimmy jstephens60@weather.com 145.101.93.235 1983-04-12 09:35:30
219 218 Louis lhunt61@amazon.co.jp 78.137.6.253 1997-08-29 19:34:34
220 219 Lawrence lgilbert62@ted.com 243.132.8.78 2015-04-08 22:06:56
221 220 David dgardner63@4shared.com 204.40.46.136 1971-07-09 03:29:11
222 221 Charles ckennedy64@gmpg.org 211.83.233.2 2011-02-26 11:55:04
223 222 Lillian lbanks65@msu.edu 124.233.12.80 2010-05-16 20:29:02
224 223 Ernest enguyen66@baidu.com 82.45.128.148 1996-07-04 10:07:04
225 224 Ryan rrussell67@cloudflare.com 202.53.240.223 1983-08-05 12:36:29
226 225 Donald ddavis68@ustream.tv 47.39.218.137 1989-05-27 02:30:56
227 226 Joe jscott69@blogspot.com 140.23.131.75 1973-03-16 12:21:31
228 227 Anne amarshall6a@google.ca 113.162.200.197 1988-12-09 03:38:29
229 228 Willie wturner6b@constantcontact.com 85.83.182.249 1991-10-06 01:51:10
230 229 Nicole nwilson6c@sogou.com 30.223.51.135 1977-05-29 19:54:56
231 230 Janet jwheeler6d@stumbleupon.com 153.194.27.144 2011-03-13 12:48:47
232 231 Lois lcarr6e@statcounter.com 0.41.36.53 1993-02-06 04:52:01
233 232 Shirley scruz6f@tmall.com 37.156.39.223 2007-02-18 17:47:01
234 233 Patrick pford6g@reverbnation.com 36.198.200.89 1977-03-06 15:47:24
235 234 Lisa lhudson6h@usatoday.com 134.213.58.137 2014-10-28 01:56:56
236 235 Pamela pmartinez6i@opensource.org 5.151.127.202 1987-11-30 16:44:47
237 236 Larry lperez6j@infoseek.co.jp 235.122.96.148 1979-01-18 06:33:45
238 237 Pamela pramirez6k@census.gov 138.233.34.163 2012-01-29 10:35:20
239 238 Daniel dcarr6l@php.net 146.21.152.242 1984-11-17 08:22:59
240 239 Patrick psmith6m@indiegogo.com 136.222.199.36 2001-05-30 22:16:44
241 240 Raymond rhenderson6n@hc360.com 116.31.112.38 2000-01-05 20:35:41
242 241 Teresa treynolds6o@miitbeian.gov.cn 198.126.205.220 1996-11-08 01:27:31
243 242 Johnny jmason6p@flickr.com 192.8.232.114 2013-05-14 05:35:50
244 243 Angela akelly6q@guardian.co.uk 234.116.60.197 1977-08-20 02:05:17
245 244 Douglas dcole6r@cmu.edu 128.135.212.69 2016-10-26 17:40:36
246 245 Frances fcampbell6s@twitpic.com 94.22.243.235 1987-04-26 07:07:13
247 246 Donna dgreen6t@chron.com 227.116.46.107 2011-07-25 12:59:54
248 247 Benjamin bfranklin6u@redcross.org 89.141.142.89 1974-05-03 20:28:18
249 248 Randy rpalmer6v@rambler.ru 70.173.63.178 2011-12-20 17:40:18
250 249 Melissa mmurray6w@bbb.org 114.234.118.137 1991-02-26 12:45:44
251 250 Jean jlittle6x@epa.gov 141.21.163.254 1991-08-16 04:57:09
252 251 Daniel dolson6y@nature.com 125.75.104.97 2010-04-23 06:25:54
253 252 Kathryn kwells6z@eventbrite.com 225.104.28.249 2015-01-31 02:21:50
254 253 Theresa tgonzalez70@ox.ac.uk 91.93.156.26 1971-12-11 10:31:31
255 254 Beverly broberts71@bluehost.com 244.40.158.89 2013-09-21 13:02:31
256 255 Pamela pmurray72@netscape.com 218.54.95.216 1985-04-16 00:34:00
257 256 Timothy trichardson73@amazonaws.com 235.49.24.229 2000-11-11 09:48:28
258 257 Mildred mpalmer74@is.gd 234.125.95.132 1992-05-25 02:25:02
259 258 Jessica jcampbell75@google.it 55.98.30.140 2014-08-26 00:26:34
260 259 Beverly bthomas76@cpanel.net 48.78.228.176 1970-08-18 10:40:05
261 260 Eugene eward77@cargocollective.com 139.226.204.2 1996-12-04 23:17:00
262 261 Andrea aallen78@webnode.com 160.31.214.38 2009-07-06 07:22:37
263 262 Justin jruiz79@merriam-webster.com 150.149.246.122 2005-06-06 11:44:19
264 263 Kenneth kedwards7a@networksolutions.com 98.82.193.128 2001-07-03 02:00:10
265 264 Rachel rday7b@miibeian.gov.cn 114.15.247.221 1994-08-18 19:45:40
266 265 Russell rmiller7c@instagram.com 184.130.152.253 1977-11-06 01:58:12
267 266 Bonnie bhudson7d@cornell.edu 235.180.186.206 1990-12-03 22:45:24
268 267 Raymond rknight7e@yandex.ru 161.2.44.252 1995-08-25 04:31:19
269 268 Bonnie brussell7f@elpais.com 199.237.57.207 1991-03-29 08:32:06
270 269 Marie mhenderson7g@elpais.com 52.203.131.144 2004-06-04 21:50:28
271 270 Alan acarr7h@trellian.com 147.51.205.72 2005-03-03 10:51:31
272 271 Barbara bturner7i@hugedomains.com 103.160.110.226 2004-08-04 13:42:40
273 272 Christina cdaniels7j@census.gov 0.238.61.251 1972-10-18 12:47:33
274 273 Jeremy jgomez7k@reuters.com 111.26.65.56 2013-01-13 10:41:35
275 274 Laura lwood7l@icio.us 149.153.38.205 2011-06-25 09:33:59
276 275 Matthew mbowman7m@auda.org.au 182.138.206.172 1999-03-05 03:25:36
277 276 Denise dparker7n@icq.com 0.213.88.138 2011-11-04 09:43:06
278 277 Phillip pparker7o@discuz.net 219.242.165.240 1973-10-19 04:22:29
279 278 Joan jpierce7p@salon.com 63.31.213.202 1989-04-09 22:06:24
280 279 Irene ibaker7q@cbc.ca 102.33.235.114 1992-09-04 13:00:57
281 280 Betty bbowman7r@ted.com 170.91.249.242 2015-09-28 08:14:22
282 281 Teresa truiz7s@boston.com 82.108.158.207 1999-07-18 05:17:09
283 282 Helen hbrooks7t@slideshare.net 102.87.162.187 2003-01-06 15:45:29
284 283 Karen kgriffin7u@wunderground.com 43.82.44.184 2010-05-28 01:56:37
285 284 Lisa lfernandez7v@mtv.com 200.238.218.220 1993-04-03 20:33:51
286 285 Jesse jlawrence7w@timesonline.co.uk 95.122.105.78 1990-01-05 17:28:43
287 286 Terry tross7x@macromedia.com 29.112.114.133 2009-08-29 21:32:17
288 287 Angela abradley7y@icq.com 177.44.27.72 1989-10-04 21:46:06
289 288 Maria mhart7z@dailymotion.com 55.27.55.202 1975-01-21 01:22:57
290 289 Raymond randrews80@pinterest.com 88.90.78.67 1992-03-16 21:37:40
291 290 Kathy krice81@bluehost.com 212.63.196.102 2000-12-14 03:06:44
292 291 Cynthia cramos82@nymag.com 107.89.190.6 2005-06-28 02:02:33
293 292 Kimberly kjones83@mysql.com 86.169.101.101 2007-06-13 22:56:49
294 293 Timothy thansen84@microsoft.com 108.100.254.90 2003-04-04 10:31:57
295 294 Carol cspencer85@berkeley.edu 75.118.144.187 1999-03-30 14:53:21
296 295 Louis lmedina86@latimes.com 141.147.163.24 1991-04-11 17:53:13
297 296 Margaret mcole87@google.fr 53.184.26.83 1991-12-19 01:54:10
298 297 Mary mgomez88@yellowpages.com 208.56.57.99 1976-05-21 18:05:08
299 298 Amanda aanderson89@geocities.com 147.73.15.252 1987-08-22 15:05:28
300 299 Kathryn kgarrett8a@nature.com 27.29.177.220 1976-07-15 04:25:04
301 300 Dorothy dmason8b@shareasale.com 106.210.99.193 1990-09-03 21:39:31
302 301 Lois lkennedy8c@amazon.de 194.169.29.187 2007-07-29 14:09:31
303 302 Irene iburton8d@washingtonpost.com 196.143.110.249 2013-09-05 11:32:46
304 303 Betty belliott8e@wired.com 183.105.222.199 1979-09-19 19:29:13
305 304 Bobby bmeyer8f@census.gov 36.13.161.145 2014-05-24 14:34:39
306 305 Ann amorrison8g@sfgate.com 72.154.54.137 1978-10-05 14:22:34
307 306 Daniel djackson8h@wunderground.com 144.95.32.34 1990-07-27 13:23:05
308 307 Joe jboyd8i@alibaba.com 187.105.86.178 2011-09-28 16:46:32
309 308 Ralph rdunn8j@fc2.com 3.19.87.255 1984-10-18 08:00:40
310 309 Craig ccarter8k@gizmodo.com 235.152.76.215 1998-07-04 12:15:21
311 310 Paula pdean8l@hhs.gov 161.100.173.197 1973-02-13 09:38:55
312 311 Andrew agarrett8m@behance.net 199.253.123.218 1991-02-14 13:36:32
313 312 Janet jhowell8n@alexa.com 39.189.139.79 2012-11-24 20:17:33
314 313 Keith khansen8o@godaddy.com 116.186.223.196 1987-08-23 21:22:05
315 314 Nicholas nedwards8p@state.gov 142.175.142.11 1977-03-28 18:27:27
316 315 Jacqueline jallen8q@oaic.gov.au 189.66.135.192 1994-10-26 11:44:26
317 316 Frank fgardner8r@mapy.cz 154.77.119.169 1983-01-29 19:19:51
318 317 Eric eharrison8s@google.cn 245.139.65.123 1984-02-04 09:54:36
319 318 Gregory gcooper8t@go.com 171.147.0.221 2004-06-14 05:22:08
320 319 Jean jfreeman8u@rakuten.co.jp 67.243.121.5 1977-01-07 18:23:43
321 320 Juan jlewis8v@shinystat.com 216.181.171.189 2001-08-23 17:32:43
322 321 Randy rwilliams8w@shinystat.com 105.152.146.28 1983-02-17 00:05:50
323 322 Stephen shart8x@sciencedirect.com 196.131.205.148 2004-02-15 10:12:03
324 323 Annie ahunter8y@example.com 63.36.34.103 2003-07-23 21:15:25
325 324 Melissa mflores8z@cbc.ca 151.230.217.90 1983-11-02 14:53:56
326 325 Jane jweaver90@about.me 0.167.235.217 1987-07-29 00:13:44
327 326 Anthony asmith91@oracle.com 97.87.48.41 2001-05-31 18:44:11
328 327 Terry tdavis92@buzzfeed.com 46.20.12.51 2015-09-12 23:13:55
329 328 Brandon bmontgomery93@gravatar.com 252.101.48.186 2010-10-28 08:26:27
330 329 Chris cmurray94@bluehost.com 25.158.167.97 2004-05-05 16:10:31
331 330 Denise dfuller95@hugedomains.com 216.210.149.28 1979-04-20 08:57:24
332 331 Arthur amcdonald96@sakura.ne.jp 206.42.36.213 2009-08-15 03:26:16
333 332 Jesse jhoward97@google.cn 46.181.118.30 1974-04-18 14:08:41
334 333 Frank fsimpson98@domainmarket.com 163.220.211.87 2006-06-30 14:46:52
335 334 Janice jwoods99@pen.io 229.245.237.182 1988-04-06 11:52:58
336 335 Rebecca rroberts9a@huffingtonpost.com 148.96.15.80 1976-10-05 08:44:16
337 336 Joshua jray9b@opensource.org 192.253.12.198 1971-12-25 22:27:07
338 337 Joyce jcarpenter9c@statcounter.com 125.171.46.215 2001-12-31 22:08:13
339 338 Andrea awest9d@privacy.gov.au 79.101.180.201 1983-02-18 20:07:47
340 339 Christine chudson9e@yelp.com 64.198.43.56 1997-09-08 08:03:43
341 340 Joe jparker9f@earthlink.net 251.215.148.153 1973-11-04 05:08:18
342 341 Thomas tkim9g@answers.com 49.187.34.47 1991-08-07 21:13:48
343 342 Janice jdean9h@scientificamerican.com 4.197.117.16 2009-12-08 02:35:49
344 343 James jmitchell9i@umich.edu 43.121.18.147 2011-04-28 17:04:09
345 344 Charles cgardner9j@purevolume.com 197.78.240.240 1998-02-11 06:47:07
346 345 Robert rhenderson9k@friendfeed.com 215.84.180.88 2002-05-10 15:33:14
347 346 Chris cgray9l@4shared.com 249.70.192.240 1998-10-03 16:43:42
348 347 Gloria ghayes9m@hibu.com 81.103.138.26 1999-12-26 11:23:13
349 348 Edward eramirez9n@shareasale.com 38.136.90.136 2010-08-19 08:01:06
350 349 Cheryl cbutler9o@google.ca 172.180.78.172 1995-05-27 20:03:52
351 350 Margaret mwatkins9p@sfgate.com 3.20.198.6 2014-10-21 01:42:58
352 351 Rebecca rwelch9q@examiner.com 45.81.42.208 2001-02-08 12:19:06
353 352 Joe jpalmer9r@phpbb.com 163.202.92.190 1970-01-05 11:29:12
354 353 Sandra slewis9s@dyndns.org 77.215.201.236 1974-01-05 07:04:04
355 354 Todd tfranklin9t@g.co 167.125.181.82 2009-09-28 10:13:58
356 355 Joseph jlewis9u@webmd.com 244.204.6.11 1990-10-21 15:49:57
357 356 Alan aknight9v@nydailynews.com 152.197.95.83 1996-03-08 08:43:17
358 357 Sharon sdean9w@123-reg.co.uk 237.46.40.26 1985-11-30 12:09:24
359 358 Annie awright9x@cafepress.com 190.45.231.111 2000-08-24 11:56:06
360 359 Diane dhamilton9y@youtube.com 85.146.171.196 2015-02-24 02:03:57
361 360 Antonio alane9z@auda.org.au 61.63.146.203 2001-05-13 03:43:34
362 361 Matthew mallena0@hhs.gov 29.97.32.19 1973-02-19 23:43:32
363 362 Bonnie bfowlera1@soup.io 251.216.99.53 2013-08-01 15:35:41
364 363 Margaret mgraya2@examiner.com 69.255.151.79 1998-01-23 22:24:59
365 364 Joan jwagnera3@printfriendly.com 192.166.120.61 1973-07-13 00:30:22
366 365 Catherine cperkinsa4@nytimes.com 58.21.24.214 2006-11-19 11:52:26
367 366 Mark mcartera5@cpanel.net 220.33.102.142 2007-09-09 09:43:27
368 367 Paula ppricea6@msn.com 36.182.238.124 2009-11-11 09:13:05
369 368 Catherine cgreena7@army.mil 228.203.58.19 2005-08-09 16:52:15
370 369 Helen hhamiltona8@symantec.com 155.56.194.99 2005-02-01 05:40:36
371 370 Jane jmeyera9@ezinearticles.com 133.244.113.213 2013-11-06 22:10:23
372 371 Wanda wevansaa@bloglovin.com 233.125.192.48 1994-12-26 23:43:42
373 372 Mark mmarshallab@tumblr.com 114.74.60.47 2016-09-29 18:03:01
374 373 Andrew amartinezac@google.cn 182.54.37.130 1976-06-06 17:04:17
375 374 Helen hmoralesad@e-recht24.de 42.45.4.123 1977-03-28 19:06:59
376 375 Bonnie bstoneae@php.net 196.149.79.137 1970-02-05 17:05:58
377 376 Douglas dfreemanaf@nasa.gov 215.65.124.218 2008-11-20 21:51:55
378 377 Willie wwestag@army.mil 35.189.92.118 1992-07-24 05:08:08
379 378 Cheryl cwagnerah@upenn.edu 228.239.222.141 2010-01-25 06:29:01
380 379 Sandra swardai@baidu.com 63.11.113.240 1985-05-23 08:07:37
381 380 Julie jrobinsonaj@jugem.jp 110.58.202.50 2015-03-05 09:42:07
382 381 Larry lwagnerak@shop-pro.jp 98.234.25.24 1975-07-22 22:22:02
383 382 Juan jcastilloal@yelp.com 24.174.74.202 2007-01-17 09:32:43
384 383 Donna dfrazieram@artisteer.com 205.26.147.45 1990-02-11 20:55:46
385 384 Rachel rfloresan@w3.org 109.60.216.162 1983-05-22 22:42:18
386 385 Robert rreynoldsao@theguardian.com 122.65.209.130 2009-05-01 18:02:51
387 386 Donald dbradleyap@etsy.com 42.54.35.126 1997-01-16 16:31:52
388 387 Rachel rfisheraq@nih.gov 160.243.250.45 2006-02-17 22:05:49
389 388 Nicholas nhamiltonar@princeton.edu 156.211.37.111 1976-06-21 03:36:29
390 389 Timothy twhiteas@ca.gov 36.128.23.70 1975-09-24 03:51:18
391 390 Diana dbradleyat@odnoklassniki.ru 44.102.120.184 1983-04-27 09:02:50
392 391 Billy bfowlerau@jimdo.com 91.200.68.196 1995-01-29 06:57:35
393 392 Bruce bandrewsav@ucoz.com 48.12.101.125 1992-10-27 04:31:39
394 393 Linda lromeroaw@usa.gov 100.71.233.19 1992-06-08 15:13:18
395 394 Debra dwatkinsax@ucoz.ru 52.160.233.193 2001-11-11 06:51:01
396 395 Katherine kburkeay@wix.com 151.156.242.141 2010-06-14 19:54:28
397 396 Martha mharrisonaz@youku.com 21.222.10.199 1989-10-16 14:17:55
398 397 Dennis dwellsb0@youtu.be 103.16.29.3 1985-12-21 06:05:51
399 398 Gloria grichardsb1@bloglines.com 90.147.120.234 1982-08-27 01:04:43
400 399 Brenda bfullerb2@t.co 33.253.63.90 2011-04-20 05:00:35
401 400 Larry lhendersonb3@disqus.com 88.95.132.128 1982-08-31 02:15:12
402 401 Richard rlarsonb4@wisc.edu 13.48.231.150 1979-04-15 14:08:09
403 402 Terry thuntb5@usa.gov 65.91.103.240 1998-05-15 11:50:49
404 403 Harry hburnsb6@nasa.gov 33.38.21.244 1981-04-12 14:02:20
405 404 Diana dellisb7@mlb.com 218.229.81.135 1997-01-29 00:17:25
406 405 Jack jburkeb8@tripadvisor.com 210.227.182.216 1984-03-09 17:24:03
407 406 Julia jlongb9@fotki.com 10.210.12.104 2005-10-26 03:54:13
408 407 Lois lscottba@msu.edu 188.79.136.138 1973-02-02 18:40:39
409 408 Sandra shendersonbb@shareasale.com 114.171.220.108 2012-06-09 18:22:26
410 409 Irene isanchezbc@cdbaby.com 109.255.50.119 1983-09-28 21:11:27
411 410 Emily ebrooksbd@bandcamp.com 227.81.93.79 1970-08-31 21:08:01
412 411 Michelle mdiazbe@businessweek.com 236.249.6.226 1993-05-22 08:07:07
413 412 Tammy tbennettbf@wisc.edu 145.253.239.152 1978-12-31 20:24:51
414 413 Christine cgreenebg@flickr.com 97.25.140.118 1978-07-17 12:55:30
415 414 Patricia pgarzabh@tuttocitta.it 139.246.192.211 1984-02-27 13:40:08
416 415 Kimberly kromerobi@aol.com 73.56.88.247 1976-09-16 14:22:04
417 416 George gjohnstonbj@fda.gov 240.36.245.185 1979-07-24 14:36:02
418 417 Eugene efullerbk@sciencedaily.com 42.38.105.140 2012-09-12 01:56:41
419 418 Andrea astevensbl@goo.gl 31.152.207.204 1979-05-24 11:06:21
420 419 Shirley sreidbm@scientificamerican.com 103.60.31.241 1984-02-23 04:07:41
421 420 Terry tmorenobn@blinklist.com 92.161.34.42 1994-06-25 14:01:35
422 421 Christopher cmorenobo@go.com 158.86.176.82 1973-09-05 09:18:47
423 422 Dennis dhansonbp@ning.com 40.160.81.75 1982-01-20 10:19:41
424 423 Beverly brussellbq@de.vu 138.32.56.204 1997-11-06 07:20:19
425 424 Howard hparkerbr@163.com 103.171.134.171 2015-06-24 15:37:10
426 425 Helen hmccoybs@fema.gov 61.200.4.71 1995-06-20 08:59:10
427 426 Ann ahudsonbt@cafepress.com 239.187.71.125 1977-04-11 07:59:28
428 427 Tina twestbu@nhs.uk 80.213.117.74 1992-08-19 05:54:44
429 428 Terry tnguyenbv@noaa.gov 21.93.118.95 1991-09-19 23:22:55
430 429 Ashley aburtonbw@wix.com 233.176.205.109 2009-11-10 05:01:20
431 430 Eric emyersbx@1und1.de 168.91.212.67 1987-08-10 07:16:20
432 431 Barbara blittleby@lycos.com 242.14.189.239 2008-08-02 12:13:04
433 432 Sean sevansbz@instagram.com 14.39.177.13 2007-04-16 17:28:49
434 433 Shirley sburtonc0@newsvine.com 34.107.138.76 1980-12-10 02:19:29
435 434 Patricia pfreemanc1@so-net.ne.jp 219.213.142.117 1987-03-01 02:25:45
436 435 Paula pfosterc2@vkontakte.ru 227.14.138.141 1972-09-22 12:59:34
437 436 Nicole nstewartc3@1688.com 8.164.23.115 1998-10-27 00:10:17
438 437 Earl ekimc4@ovh.net 100.26.244.177 2013-01-22 10:05:46
439 438 Beverly breedc5@reuters.com 174.12.226.27 1974-09-22 07:29:36
440 439 Lawrence lbutlerc6@a8.net 105.164.42.164 1992-06-05 00:43:40
441 440 Charles cmoorec7@ucoz.com 252.197.131.69 1990-04-09 02:34:05
442 441 Alice alawsonc8@live.com 183.73.220.232 1989-02-28 09:11:04
443 442 Dorothy dcarpenterc9@arstechnica.com 241.47.200.14 2005-05-02 19:57:21
444 443 Carolyn cfowlerca@go.com 213.109.55.202 1978-09-10 20:18:20
445 444 Anthony alongcb@free.fr 169.221.158.204 1984-09-13 01:59:23
446 445 Annie amoorecc@e-recht24.de 50.34.148.61 2009-03-26 03:41:07
447 446 Carlos candrewscd@ihg.com 236.69.59.212 1972-03-29 22:42:48
448 447 Beverly bramosce@google.ca 164.250.184.49 1982-11-10 04:34:01
449 448 Teresa tlongcf@umich.edu 174.88.53.223 1987-05-17 12:48:00
450 449 Roy rboydcg@uol.com.br 91.58.243.215 1974-06-16 17:59:54
451 450 Ashley afieldsch@tamu.edu 130.138.11.126 1983-09-15 05:52:36
452 451 Judith jhawkinsci@cmu.edu 200.187.103.245 2003-10-22 12:24:03
453 452 Rebecca rwestcj@ocn.ne.jp 72.85.3.103 1980-11-13 11:01:26
454 453 Raymond rporterck@infoseek.co.jp 146.33.216.151 1982-05-17 23:58:03
455 454 Janet jmarshallcl@odnoklassniki.ru 52.46.193.166 1998-10-04 00:02:21
456 455 Shirley speterscm@salon.com 248.126.31.15 1987-01-30 06:04:59
457 456 Annie abowmancn@economist.com 222.213.248.59 2006-03-14 23:52:59
458 457 Jean jlarsonco@blogspot.com 71.41.25.195 2007-09-08 23:49:45
459 458 Phillip pmoralescp@stanford.edu 74.119.87.28 2011-03-14 20:25:40
460 459 Norma nrobinsoncq@economist.com 28.225.21.54 1989-10-21 01:22:43
461 460 Kimberly kclarkcr@dion.ne.jp 149.171.132.153 2008-06-27 02:27:30
462 461 Ruby rmorriscs@ucla.edu 177.85.163.249 2016-01-28 16:43:44
463 462 Jonathan jcastilloct@tripod.com 78.4.28.77 2000-05-24 17:33:06
464 463 Edward ebryantcu@jigsy.com 140.31.98.193 1992-12-17 08:32:47
465 464 Chris chamiltoncv@eepurl.com 195.171.234.206 1970-12-05 03:42:19
466 465 Michael mweavercw@reference.com 7.233.133.213 1987-03-29 02:30:54
467 466 Howard hlawrencecx@businessweek.com 113.225.124.224 1990-07-30 07:20:57
468 467 Philip phowardcy@comsenz.com 159.170.247.249 2010-10-15 10:18:37
469 468 Mary mmarshallcz@xing.com 125.132.189.70 2007-07-19 13:48:47
470 469 Scott salvarezd0@theguardian.com 78.49.103.230 1987-10-31 06:10:44
471 470 Wayne wcarrolld1@blog.com 238.1.120.204 1980-11-19 03:26:10
472 471 Jennifer jwoodsd2@multiply.com 92.20.224.49 2010-05-06 22:17:04
473 472 Raymond rwelchd3@toplist.cz 176.158.35.240 2007-12-12 19:02:51
474 473 Steven sdixond4@wisc.edu 167.55.237.52 1984-05-05 11:44:37
475 474 Ralph rjamesd5@ameblo.jp 241.190.50.133 2000-07-06 08:44:37
476 475 Jason jrobinsond6@hexun.com 138.119.139.56 2006-02-03 05:27:45
477 476 Doris dwoodd7@fema.gov 180.220.156.190 1978-05-11 20:14:20
478 477 Elizabeth eberryd8@youtu.be 74.188.53.229 2006-11-18 08:29:06
479 478 Irene igilbertd9@privacy.gov.au 194.152.218.1 1985-09-17 02:46:52
480 479 Jessica jdeanda@ameblo.jp 178.103.93.118 1974-06-07 19:04:05
481 480 Rachel ralvarezdb@phoca.cz 17.22.223.174 1999-03-08 02:43:25
482 481 Kenneth kthompsondc@shinystat.com 229.119.91.234 2007-05-15 13:17:32
483 482 Harold hmurraydd@parallels.com 133.26.188.80 1993-11-15 03:42:07
484 483 Paula phowellde@samsung.com 34.215.28.216 1993-11-29 15:55:00
485 484 Ruth rpiercedf@tripadvisor.com 111.30.130.123 1986-08-17 10:19:38
486 485 Phyllis paustindg@vk.com 50.84.34.178 1994-04-13 03:05:24
487 486 Laura lfosterdh@usnews.com 37.8.101.33 2001-06-30 08:58:59
488 487 Eric etaylordi@com.com 103.183.253.45 2006-09-15 20:18:46
489 488 Doris driveradj@prweb.com 247.16.2.199 1989-05-08 09:27:09
490 489 Ryan rhughesdk@elegantthemes.com 103.234.153.232 1989-08-01 18:36:06
491 490 Steve smoralesdl@jigsy.com 3.76.84.207 2011-03-13 17:01:05
492 491 Louis lsullivandm@who.int 78.135.44.208 1975-11-26 16:01:23
493 492 Catherine ctuckerdn@seattletimes.com 93.137.106.21 1990-03-13 16:14:56
494 493 Ann adixondo@gmpg.org 191.136.222.111 2002-06-05 14:22:18
495 494 Johnny jhartdp@amazon.com 103.252.198.39 1988-07-30 23:54:49
496 495 Susan srichardsdq@skype.com 126.247.192.11 2005-01-09 12:08:14
497 496 Brenda bparkerdr@skype.com 63.232.216.86 1974-05-18 05:58:29
498 497 Tammy tmurphyds@constantcontact.com 56.56.37.112 2014-08-05 18:22:25
499 498 Larry lhayesdt@wordpress.com 162.146.13.46 1997-02-26 14:01:53
500 499 ethomasdu@hhs.gov 6.241.88.250 2007-09-14 13:03:34
501 500 Paula pshawdv@networksolutions.com 123.27.47.249 2003-10-30 21:19:20

File diff suppressed because it is too large Load Diff

View File

@@ -1,388 +0,0 @@
import csv
from codecs import BOM_UTF8
from pathlib import Path
import pytest
from dbt.tests.util import (
check_relations_equal,
check_table_does_exist,
check_table_does_not_exist,
copy_file,
mkdir,
read_file,
rm_dir,
run_dbt,
)
from tests.functional.adapter.simple_seed.fixtures import (
models__downstream_from_seed_actual,
models__downstream_from_seed_pipe_separated,
models__from_basic_seed,
)
from tests.functional.adapter.simple_seed.seeds import (
seed__actual_csv,
seed__unicode_csv,
seed__with_dots_csv,
seeds__disabled_in_config_csv,
seeds__enabled_in_config_csv,
seeds__expected_sql,
seeds__pipe_separated_csv,
seeds__tricky_csv,
seeds__wont_parse_csv,
)
class SeedConfigBase(object):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"quote_columns": False,
},
}
class SeedTestBase(SeedConfigBase):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
"""Create table for ensuring seeds and models used in tests build correctly"""
project.run_sql(seeds__expected_sql)
@pytest.fixture(scope="class")
def seeds(self, test_data_dir):
return {"seed_actual.csv": seed__actual_csv}
@pytest.fixture(scope="class")
def models(self):
return {
"models__downstream_from_seed_actual.sql": models__downstream_from_seed_actual,
}
def _build_relations_for_test(self, project):
"""The testing environment needs seeds and models to interact with"""
seed_result = run_dbt(["seed"])
assert len(seed_result) == 1
check_relations_equal(project.adapter, ["seed_expected", "seed_actual"])
run_result = run_dbt()
assert len(run_result) == 1
check_relations_equal(
project.adapter, ["models__downstream_from_seed_actual", "seed_expected"]
)
def _check_relation_end_state(self, run_result, project, exists: bool):
assert len(run_result) == 1
check_relations_equal(project.adapter, ["seed_actual", "seed_expected"])
if exists:
check_table_does_exist(project.adapter, "models__downstream_from_seed_actual")
else:
check_table_does_not_exist(project.adapter, "models__downstream_from_seed_actual")
class TestBasicSeedTests(SeedTestBase):
def test_simple_seed(self, project):
"""Build models and observe that run truncates a seed and re-inserts rows"""
self._build_relations_for_test(project)
self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=True)
def test_simple_seed_full_refresh_flag(self, project):
"""Drop the seed_actual table and re-create. Verifies correct behavior by the absence of the
model which depends on seed_actual."""
self._build_relations_for_test(project)
self._check_relation_end_state(
run_result=run_dbt(["seed", "--full-refresh"]), project=project, exists=False
)
class TestSeedConfigFullRefreshOn(SeedTestBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {"quote_columns": False, "full_refresh": True},
}
def test_simple_seed_full_refresh_config(self, project):
"""config option should drop current model and cascade drop to downstream models"""
self._build_relations_for_test(project)
self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=False)
class TestSeedConfigFullRefreshOff(SeedTestBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {"quote_columns": False, "full_refresh": False},
}
def test_simple_seed_full_refresh_config(self, project):
"""Config options should override a full-refresh flag because config is higher priority"""
self._build_relations_for_test(project)
self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=True)
self._check_relation_end_state(
run_result=run_dbt(["seed", "--full-refresh"]), project=project, exists=True
)
class TestSeedCustomSchema(SeedTestBase):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
"""Create table for ensuring seeds and models used in tests build correctly"""
project.run_sql(seeds__expected_sql)
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"schema": "custom_schema",
"quote_columns": False,
},
}
def test_simple_seed_with_schema(self, project):
seed_results = run_dbt(["seed"])
assert len(seed_results) == 1
custom_schema = f"{project.test_schema}_custom_schema"
check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"])
# this should truncate the seed_actual table, then re-insert
results = run_dbt(["seed"])
assert len(results) == 1
custom_schema = f"{project.test_schema}_custom_schema"
check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"])
def test_simple_seed_with_drop_and_schema(self, project):
seed_results = run_dbt(["seed"])
assert len(seed_results) == 1
custom_schema = f"{project.test_schema}_custom_schema"
check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"])
# this should drop the seed table, then re-create
results = run_dbt(["seed", "--full-refresh"])
assert len(results) == 1
custom_schema = f"{project.test_schema}_custom_schema"
check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"])
class SeedUniqueDelimiterTestBase(SeedConfigBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {"quote_columns": False, "delimiter": "|"},
}
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
"""Create table for ensuring seeds and models used in tests build correctly"""
project.run_sql(seeds__expected_sql)
@pytest.fixture(scope="class")
def seeds(self, test_data_dir):
return {"seed_pipe_separated.csv": seeds__pipe_separated_csv}
@pytest.fixture(scope="class")
def models(self):
return {
"models__downstream_from_seed_pipe_separated.sql": models__downstream_from_seed_pipe_separated,
}
def _build_relations_for_test(self, project):
"""The testing environment needs seeds and models to interact with"""
seed_result = run_dbt(["seed"])
assert len(seed_result) == 1
check_relations_equal(project.adapter, ["seed_expected", "seed_pipe_separated"])
run_result = run_dbt()
assert len(run_result) == 1
check_relations_equal(
project.adapter, ["models__downstream_from_seed_pipe_separated", "seed_expected"]
)
def _check_relation_end_state(self, run_result, project, exists: bool):
assert len(run_result) == 1
check_relations_equal(project.adapter, ["seed_pipe_separated", "seed_expected"])
if exists:
check_table_does_exist(project.adapter, "models__downstream_from_seed_pipe_separated")
else:
check_table_does_not_exist(
project.adapter, "models__downstream_from_seed_pipe_separated"
)
class TestSeedWithUniqueDelimiter(SeedUniqueDelimiterTestBase):
def test_seed_with_unique_delimiter(self, project):
"""Testing correct run of seeds with a unique delimiter (pipe in this case)"""
self._build_relations_for_test(project)
self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=True)
class TestSeedWithWrongDelimiter(SeedUniqueDelimiterTestBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {"quote_columns": False, "delimiter": ";"},
}
def test_seed_with_wrong_delimiter(self, project):
"""Testing failure of running dbt seed with a wrongly configured delimiter"""
seed_result = run_dbt(["seed"], expect_pass=False)
assert "syntax error" in seed_result.results[0].message.lower()
class TestSeedWithEmptyDelimiter(SeedUniqueDelimiterTestBase):
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {"quote_columns": False, "delimiter": ""},
}
def test_seed_with_empty_delimiter(self, project):
"""Testing failure of running dbt seed with an empty configured delimiter value"""
seed_result = run_dbt(["seed"], expect_pass=False)
assert "compilation error" in seed_result.results[0].message.lower()
class TestSimpleSeedEnabledViaConfig(object):
@pytest.fixture(scope="session")
def seeds(self):
return {
"seed_enabled.csv": seeds__enabled_in_config_csv,
"seed_disabled.csv": seeds__disabled_in_config_csv,
"seed_tricky.csv": seeds__tricky_csv,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"test": {"seed_enabled": {"enabled": True}, "seed_disabled": {"enabled": False}},
"quote_columns": False,
},
}
@pytest.fixture(scope="function")
def clear_test_schema(self, project):
yield
project.run_sql(f"drop schema if exists {project.test_schema} cascade")
def test_simple_seed_with_disabled(self, clear_test_schema, project):
results = run_dbt(["seed"])
assert len(results) == 2
check_table_does_exist(project.adapter, "seed_enabled")
check_table_does_not_exist(project.adapter, "seed_disabled")
check_table_does_exist(project.adapter, "seed_tricky")
def test_simple_seed_selection(self, clear_test_schema, project):
results = run_dbt(["seed", "--select", "seed_enabled"])
assert len(results) == 1
check_table_does_exist(project.adapter, "seed_enabled")
check_table_does_not_exist(project.adapter, "seed_disabled")
check_table_does_not_exist(project.adapter, "seed_tricky")
def test_simple_seed_exclude(self, clear_test_schema, project):
results = run_dbt(["seed", "--exclude", "seed_enabled"])
assert len(results) == 1
check_table_does_not_exist(project.adapter, "seed_enabled")
check_table_does_not_exist(project.adapter, "seed_disabled")
check_table_does_exist(project.adapter, "seed_tricky")
class TestSeedParsing(SeedConfigBase):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
"""Create table for ensuring seeds and models used in tests build correctly"""
project.run_sql(seeds__expected_sql)
@pytest.fixture(scope="class")
def seeds(self):
return {"seed.csv": seeds__wont_parse_csv}
@pytest.fixture(scope="class")
def models(self):
return {"model.sql": models__from_basic_seed}
def test_dbt_run_skips_seeds(self, project):
# run does not try to parse the seed files
assert len(run_dbt()) == 1
# make sure 'dbt seed' fails, otherwise our test is invalid!
run_dbt(["seed"], expect_pass=False)
class TestSimpleSeedWithBOM(SeedConfigBase):
# Reference: BOM = byte order mark; see https://www.ibm.com/docs/en/netezza?topic=formats-byte-order-mark
# Tests for hidden unicode character in csv
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
"""Create table for ensuring seeds and models used in tests build correctly"""
project.run_sql(seeds__expected_sql)
copy_file(
project.test_dir,
"seed_bom.csv",
project.project_root / Path("seeds") / "seed_bom.csv",
"",
)
def test_simple_seed(self, project):
seed_result = run_dbt(["seed"])
assert len(seed_result) == 1
# encoding param must be specified in open, so long as Python reads files with a
# default file encoding for character sets beyond extended ASCII.
with open(
project.project_root / Path("seeds") / Path("seed_bom.csv"), encoding="utf-8"
) as fp:
assert fp.read(1) == BOM_UTF8.decode("utf-8")
check_relations_equal(project.adapter, ["seed_expected", "seed_bom"])
class TestSeedSpecificFormats(SeedConfigBase):
"""Expect all edge cases to build"""
@staticmethod
def _make_big_seed(test_data_dir):
mkdir(test_data_dir)
big_seed_path = test_data_dir / Path("tmp.csv")
with open(big_seed_path, "w") as f:
writer = csv.writer(f)
writer.writerow(["seed_id"])
for i in range(0, 20000):
writer.writerow([i])
return big_seed_path
@pytest.fixture(scope="class")
def seeds(self, test_data_dir):
big_seed_path = self._make_big_seed(test_data_dir)
big_seed = read_file(big_seed_path)
yield {
"big_seed.csv": big_seed,
"seed.with.dots.csv": seed__with_dots_csv,
"seed_unicode.csv": seed__unicode_csv,
}
rm_dir(test_data_dir)
def test_simple_seed(self, project):
results = run_dbt(["seed"])
assert len(results) == 3
class BaseTestEmptySeed:
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"quote_columns": False,
},
}
@pytest.fixture(scope="class")
def seeds(self):
return {"empty_with_header.csv": "a,b,c"}
def test_empty_seeds(self, project):
# Should create an empty table and not fail
results = run_dbt(["seed"])
assert len(results) == 1
class TestEmptySeed(BaseTestEmptySeed):
pass

View File

@@ -1,73 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
from tests.functional.adapter.simple_seed.fixtures import (
macros__schema_test,
properties__schema_yml,
)
from tests.functional.adapter.simple_seed.seeds import (
seeds__disabled_in_config_csv,
seeds__enabled_in_config_csv,
seeds__tricky_csv,
)
class BaseSimpleSeedColumnOverride:
@pytest.fixture(scope="class")
def models(self):
return {
"schema.yml": properties__schema_yml,
}
@pytest.fixture(scope="class")
def seeds(self):
return {
"seed_enabled.csv": seeds__enabled_in_config_csv,
"seed_disabled.csv": seeds__disabled_in_config_csv,
"seed_tricky.csv": seeds__tricky_csv,
}
@pytest.fixture(scope="class")
def macros(self):
return {"schema_test.sql": macros__schema_test}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"test": {
"enabled": False,
"quote_columns": True,
"seed_enabled": {"enabled": True, "+column_types": self.seed_enabled_types()},
"seed_tricky": {
"enabled": True,
"+column_types": self.seed_tricky_types(),
},
},
},
}
@staticmethod
def seed_enabled_types():
return {
"seed_id": "text",
"birthday": "date",
}
@staticmethod
def seed_tricky_types():
return {
"seed_id_str": "text",
"looks_like_a_bool": "text",
"looks_like_a_date": "text",
}
def test_simple_seed_with_column_override(self, project):
seed_results = run_dbt(["seed", "--show"])
assert len(seed_results) == 2
test_results = run_dbt(["test"])
assert len(test_results) == 10
class TestSimpleSeedColumnOverride(BaseSimpleSeedColumnOverride):
pass

View File

@@ -1,146 +0,0 @@
from typing import Dict, List
from dbt.tests.fixtures.project import TestProjInfo
from dbt.tests.util import relation_from_name
def get_records(
project: TestProjInfo, table: str, select: str = None, where: str = None
) -> List[tuple]:
"""
Gets records from a single table in a dbt project
Args:
project: the dbt project that contains the table
table: the name of the table without a schema
select: the selection clause; defaults to all columns (*)
where: the where clause to apply, if any; defaults to all records
Returns:
A list of records with each record as a tuple
"""
table_name = relation_from_name(project.adapter, table)
select_clause = select or "*"
where_clause = where or "1 = 1"
sql = f"""
select {select_clause}
from {table_name}
where {where_clause}
"""
return [tuple(record) for record in project.run_sql(sql, fetch="all")]
def update_records(project: TestProjInfo, table: str, updates: Dict[str, str], where: str = None):
"""
Applies updates to a table in a dbt project
Args:
project: the dbt project that contains the table
table: the name of the table without a schema
updates: the updates to be applied in the form {'field_name': 'expression to be applied'}
where: the where clause to apply, if any; defaults to all records
"""
table_name = relation_from_name(project.adapter, table)
set_clause = ", ".join(
[" = ".join([field, expression]) for field, expression in updates.items()]
)
where_clause = where or "1 = 1"
sql = f"""
update {table_name}
set {set_clause}
where {where_clause}
"""
project.run_sql(sql)
def insert_records(
project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None
):
"""
Inserts records from one table into another table in a dbt project
Args:
project: the dbt project that contains the table
to_table: the name of the table, without a schema, in which the records will be inserted
from_table: the name of the table, without a schema, which contains the records to be inserted
select: the selection clause to apply on `from_table`; defaults to all columns (*)
where: the where clause to apply on `from_table`, if any; defaults to all records
"""
to_table_name = relation_from_name(project.adapter, to_table)
from_table_name = relation_from_name(project.adapter, from_table)
select_clause = select or "*"
where_clause = where or "1 = 1"
sql = f"""
insert into {to_table_name}
select {select_clause}
from {from_table_name}
where {where_clause}
"""
project.run_sql(sql)
def delete_records(project: TestProjInfo, table: str, where: str = None):
"""
Deletes records from a table in a dbt project
Args:
project: the dbt project that contains the table
table: the name of the table without a schema
where: the where clause to apply, if any; defaults to all records
"""
table_name = relation_from_name(project.adapter, table)
where_clause = where or "1 = 1"
sql = f"""
delete from {table_name}
where {where_clause}
"""
project.run_sql(sql)
def clone_table(
project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None
):
"""
Creates a new table based on another table in a dbt project
Args:
project: the dbt project that contains the table
to_table: the name of the table, without a schema, to be created
from_table: the name of the table, without a schema, to be cloned
select: the selection clause to apply on `from_table`; defaults to all columns (*)
where: the where clause to apply on `from_table`, if any; defaults to all records
"""
to_table_name = relation_from_name(project.adapter, to_table)
from_table_name = relation_from_name(project.adapter, from_table)
select_clause = select or "*"
where_clause = where or "1 = 1"
sql = f"drop table if exists {to_table_name}"
project.run_sql(sql)
sql = f"""
create table {to_table_name} as
select {select_clause}
from {from_table_name}
where {where_clause}
"""
project.run_sql(sql)
def add_column(project: TestProjInfo, table: str, column: str, definition: str):
"""
Applies updates to a table in a dbt project
Args:
project: the dbt project that contains the table
table: the name of the table without a schema
column: the name of the new column
definition: the definition of the new column, e.g. 'varchar(20) default null'
"""
# BigQuery doesn't like 'varchar' in the definition
if project.adapter.type() == "bigquery" and "varchar" in definition.lower():
definition = "string"
table_name = relation_from_name(project.adapter, table)
sql = f"""
alter table {table_name}
add column {column} {definition}
"""
project.run_sql(sql)

View File

@@ -1,430 +0,0 @@
create_seed_sql = """
create table {schema}.seed (
id INTEGER,
first_name VARCHAR(50),
last_name VARCHAR(50),
email VARCHAR(50),
gender VARCHAR(50),
ip_address VARCHAR(20),
updated_at TIMESTAMP
);
"""
create_snapshot_expected_sql = """
create table {schema}.snapshot_expected (
id INTEGER,
first_name VARCHAR(50),
last_name VARCHAR(50),
email VARCHAR(50),
gender VARCHAR(50),
ip_address VARCHAR(20),
-- snapshotting fields
updated_at TIMESTAMP,
test_valid_from TIMESTAMP,
test_valid_to TIMESTAMP,
test_scd_id TEXT,
test_updated_at TIMESTAMP
);
"""
seed_insert_sql = """
-- seed inserts
-- use the same email for two users to verify that duplicated check_cols values
-- are handled appropriately
insert into {schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values
(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'),
(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'),
(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'),
(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'),
(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'),
(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'),
(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'),
(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'),
(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'),
(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'),
(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'),
(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'),
(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'),
(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'),
(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'),
(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'),
(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'),
(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'),
(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'),
(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19');
"""
populate_snapshot_expected_sql = """
-- populate snapshot table
insert into {schema}.snapshot_expected (
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
test_valid_from,
test_valid_to,
test_updated_at,
test_scd_id
)
select
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
-- fields added by snapshotting
updated_at as test_valid_from,
null::timestamp as test_valid_to,
updated_at as test_updated_at,
md5(id || '-' || first_name || '|' || updated_at::text) as test_scd_id
from {schema}.seed;
"""
populate_snapshot_expected_valid_to_current_sql = """
-- populate snapshot table
insert into {schema}.snapshot_expected (
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
test_valid_from,
test_valid_to,
test_updated_at,
test_scd_id
)
select
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
-- fields added by snapshotting
updated_at as test_valid_from,
date('2099-12-31') as test_valid_to,
updated_at as test_updated_at,
md5(id || '-' || first_name || '|' || updated_at::text) as test_scd_id
from {schema}.seed;
"""
snapshot_actual_sql = """
{% snapshot snapshot_actual %}
{{
config(
unique_key='id || ' ~ "'-'" ~ ' || first_name',
)
}}
select * from {{target.database}}.{{target.schema}}.seed
{% endsnapshot %}
"""
snapshots_yml = """
snapshots:
- name: snapshot_actual
config:
strategy: timestamp
updated_at: updated_at
snapshot_meta_column_names:
dbt_valid_to: test_valid_to
dbt_valid_from: test_valid_from
dbt_scd_id: test_scd_id
dbt_updated_at: test_updated_at
"""
snapshots_no_column_names_yml = """
snapshots:
- name: snapshot_actual
config:
strategy: timestamp
updated_at: updated_at
"""
ref_snapshot_sql = """
select * from {{ ref('snapshot_actual') }}
"""
invalidate_sql = """
-- update records 11 - 21. Change email and updated_at field
update {schema}.seed set
updated_at = updated_at + interval '1 hour',
email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end
where id >= 10 and id <= 20;
-- invalidate records 11 - 21
update {schema}.snapshot_expected set
test_valid_to = updated_at + interval '1 hour'
where id >= 10 and id <= 20;
"""
update_sql = """
-- insert v2 of the 11 - 21 records
insert into {schema}.snapshot_expected (
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
test_valid_from,
test_valid_to,
test_updated_at,
test_scd_id
)
select
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
-- fields added by snapshotting
updated_at as test_valid_from,
null::timestamp as test_valid_to,
updated_at as test_updated_at,
md5(id || '-' || first_name || '|' || updated_at::text) as test_scd_id
from {schema}.seed
where id >= 10 and id <= 20;
"""
# valid_to_current fixtures
snapshots_valid_to_current_yml = """
snapshots:
- name: snapshot_actual
config:
strategy: timestamp
updated_at: updated_at
dbt_valid_to_current: "date('2099-12-31')"
snapshot_meta_column_names:
dbt_valid_to: test_valid_to
dbt_valid_from: test_valid_from
dbt_scd_id: test_scd_id
dbt_updated_at: test_updated_at
"""
update_with_current_sql = """
-- insert v2 of the 11 - 21 records
insert into {schema}.snapshot_expected (
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
test_valid_from,
test_valid_to,
test_updated_at,
test_scd_id
)
select
id,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
-- fields added by snapshotting
updated_at as test_valid_from,
date('2099-12-31') as test_valid_to,
updated_at as test_updated_at,
md5(id || '-' || first_name || '|' || updated_at::text) as test_scd_id
from {schema}.seed
where id >= 10 and id <= 20;
"""
# multi-key snapshot fixtures
create_multi_key_seed_sql = """
create table {schema}.seed (
id1 INTEGER,
id2 INTEGER,
first_name VARCHAR(50),
last_name VARCHAR(50),
email VARCHAR(50),
gender VARCHAR(50),
ip_address VARCHAR(20),
updated_at TIMESTAMP
);
"""
create_multi_key_snapshot_expected_sql = """
create table {schema}.snapshot_expected (
id1 INTEGER,
id2 INTEGER,
first_name VARCHAR(50),
last_name VARCHAR(50),
email VARCHAR(50),
gender VARCHAR(50),
ip_address VARCHAR(20),
-- snapshotting fields
updated_at TIMESTAMP,
test_valid_from TIMESTAMP,
test_valid_to TIMESTAMP,
test_scd_id TEXT,
test_updated_at TIMESTAMP
);
"""
seed_multi_key_insert_sql = """
-- seed inserts
-- use the same email for two users to verify that duplicated check_cols values
-- are handled appropriately
insert into {schema}.seed (id1, id2, first_name, last_name, email, gender, ip_address, updated_at) values
(1, 100, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'),
(2, 200, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'),
(3, 300, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'),
(4, 400, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'),
(5, 500, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'),
(6, 600, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'),
(7, 700, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'),
(8, 800, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'),
(9, 900, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'),
(10, 1000, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'),
(11, 1100, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'),
(12, 1200, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'),
(13, 1300, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'),
(14, 1400, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'),
(15, 1500, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'),
(16, 1600, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'),
(17, 1700, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'),
(18, 1800, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'),
(19, 1900, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'),
(20, 2000, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19');
"""
populate_multi_key_snapshot_expected_sql = """
-- populate snapshot table
insert into {schema}.snapshot_expected (
id1,
id2,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
test_valid_from,
test_valid_to,
test_updated_at,
test_scd_id
)
select
id1,
id2,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
-- fields added by snapshotting
updated_at as test_valid_from,
null::timestamp as test_valid_to,
updated_at as test_updated_at,
md5(id1::text || '|' || id2::text || '|' || updated_at::text) as test_scd_id
from {schema}.seed;
"""
model_seed_sql = """
select * from {{target.database}}.{{target.schema}}.seed
"""
snapshots_multi_key_yml = """
snapshots:
- name: snapshot_actual
relation: "ref('seed')"
config:
strategy: timestamp
updated_at: updated_at
unique_key:
- id1
- id2
snapshot_meta_column_names:
dbt_valid_to: test_valid_to
dbt_valid_from: test_valid_from
dbt_scd_id: test_scd_id
dbt_updated_at: test_updated_at
"""
invalidate_multi_key_sql = """
-- update records 11 - 21. Change email and updated_at field
update {schema}.seed set
updated_at = updated_at + interval '1 hour',
email = case when id1 = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end
where id1 >= 10 and id1 <= 20;
-- invalidate records 11 - 21
update {schema}.snapshot_expected set
test_valid_to = updated_at + interval '1 hour'
where id1 >= 10 and id1 <= 20;
"""
update_multi_key_sql = """
-- insert v2 of the 11 - 21 records
insert into {schema}.snapshot_expected (
id1,
id2,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
test_valid_from,
test_valid_to,
test_updated_at,
test_scd_id
)
select
id1,
id2,
first_name,
last_name,
email,
gender,
ip_address,
updated_at,
-- fields added by snapshotting
updated_at as test_valid_from,
null::timestamp as test_valid_to,
updated_at as test_updated_at,
md5(id1::text || '|' || id2::text || '|' || updated_at::text) as test_scd_id
from {schema}.seed
where id1 >= 10 and id1 <= 20;
"""

View File

@@ -1,33 +0,0 @@
SEED_CSV = """
id,first_name,last_name,email,gender,ip_address,updated_at
1,Judith,Kennedy,jkennedy0@phpbb.com,Female,54.60.24.128,2015-12-24
2,Arthur,Kelly,akelly1@eepurl.com,Male,62.56.24.215,2015-10-28
3,Rachel,Moreno,rmoreno2@msu.edu,Female,31.222.249.23,2016-04-05
4,Ralph,Turner,rturner3@hp.com,Male,157.83.76.114,2016-08-08
5,Laura,Gonzales,lgonzales4@howstuffworks.com,Female,30.54.105.168,2016-09-01
6,Katherine,Lopez,klopez5@yahoo.co.jp,Female,169.138.46.89,2016-08-30
7,Jeremy,Hamilton,jhamilton6@mozilla.org,Male,231.189.13.133,2016-07-17
8,Heather,Rose,hrose7@goodreads.com,Female,87.165.201.65,2015-12-29
9,Gregory,Kelly,gkelly8@trellian.com,Male,154.209.99.7,2016-03-24
10,Rachel,Lopez,rlopez9@themeforest.net,Female,237.165.82.71,2016-08-20
11,Donna,Welch,dwelcha@shutterfly.com,Female,103.33.110.138,2016-02-27
12,Russell,Lawrence,rlawrenceb@qq.com,Male,189.115.73.4,2016-06-11
13,Michelle,Montgomery,mmontgomeryc@scientificamerican.com,Female,243.220.95.82,2016-06-18
14,Walter,Castillo,wcastillod@pagesperso-orange.fr,Male,71.159.238.196,2016-10-06
15,Robin,Mills,rmillse@vkontakte.ru,Female,172.190.5.50,2016-10-31
16,Raymond,Holmes,rholmesf@usgs.gov,Male,148.153.166.95,2016-10-03
17,Gary,Bishop,gbishopg@plala.or.jp,Male,161.108.182.13,2016-08-29
18,Anna,Riley,arileyh@nasa.gov,Female,253.31.108.22,2015-12-11
19,Sarah,Knight,sknighti@foxnews.com,Female,222.220.123.177,2016-09-26
20,Phyllis,Fox,null,Female,163.191.232.95,2016-08-21
21,Judy,Robinson,jrobinsonk@blogs.com,Female,208.21.192.232,2016-09-18
22,Kevin,Alvarez,kalvarezl@buzzfeed.com,Male,228.106.146.9,2016-07-29
23,Barbara,Carr,bcarrm@pen.io,Female,106.165.140.17,2015-09-24
24,William,Watkins,wwatkinsn@guardian.co.uk,Male,78.155.84.6,2016-03-08
25,Judy,Cooper,jcoopero@google.com.au,Female,24.149.123.184,2016-10-05
26,Shirley,Castillo,scastillop@samsung.com,Female,129.252.181.12,2016-06-20
27,Justin,Harper,jharperq@opera.com,Male,131.172.103.218,2016-05-21
28,Marie,Medina,mmedinar@nhs.uk,Female,188.119.125.67,2015-10-08
29,Kelly,Edwards,kedwardss@phoca.cz,Female,47.121.157.66,2015-09-15
30,Carl,Coleman,ccolemant@wikipedia.org,Male,82.227.154.83,2016-05-26
""".lstrip()

View File

@@ -1,27 +0,0 @@
SNAPSHOT_TIMESTAMP_SQL = """
{% snapshot snapshot %}
{{ config(
target_database=database,
target_schema=schema,
unique_key='id',
strategy='timestamp',
updated_at='updated_at',
invalidate_hard_deletes=True,
) }}
select * from {{ ref('fact') }}
{% endsnapshot %}
"""
SNAPSHOT_CHECK_SQL = """
{% snapshot snapshot %}
{{ config(
target_database=database,
target_schema=schema,
unique_key='id',
strategy='check',
check_cols=['email'],
) }}
select * from {{ ref('fact') }}
{% endsnapshot %}
"""

View File

@@ -1,211 +0,0 @@
from typing import Dict, Iterable, List
import pytest
from dbt.tests.util import run_dbt
from tests.functional.adapter.simple_snapshot import common, seeds, snapshots
MODEL_FACT_SQL = """
{{ config(materialized="table") }}
select * from {{ ref('seed') }}
where id between 1 and 20
"""
class BaseSimpleSnapshotBase:
@pytest.fixture(scope="class")
def seeds(self):
"""
This seed file contains all records needed for tests, including records which will be inserted after the
initial snapshot. This table will only need to be loaded once at the class level. It will never be altered, hence requires no further
setup or teardown.
"""
return {"seed.csv": seeds.SEED_CSV}
@pytest.fixture(scope="class")
def models(self):
"""
This will be the working base table. It will be altered by each test, hence will require setup and
teardown at the test case level. See `self._setup_method(self, project)`.
"""
return {"fact.sql": MODEL_FACT_SQL}
@pytest.fixture(scope="class", autouse=True)
def _setup_class(self, project):
"""
Load `seed` once for the whole class
"""
run_dbt(["seed"])
@pytest.fixture(scope="function", autouse=True)
def _setup_method(self, project):
"""
Initialize `fact` and `snapshot` for every test case.
Only load the first 20 `seed` records into `fact`; withhold 10 records as "new" (e.g. to test inserts).
Make the project a class variable to simplify function calls and make the code more readable.
For some reason, this doesn't work in the class-scoped fixture, but does in the function-scoped fixture.
"""
self.project = project
self.create_fact_from_seed("id between 1 and 20")
run_dbt(["snapshot"])
yield
self.delete_snapshot_records()
self.delete_fact_records()
def update_fact_records(self, updates: Dict[str, str], where: str = None):
common.update_records(self.project, "fact", updates, where)
def insert_fact_records(self, where: str = None):
common.insert_records(self.project, "fact", "seed", "*", where)
def delete_fact_records(self, where: str = None):
common.delete_records(self.project, "fact", where)
def add_fact_column(self, column: str = None, definition: str = None):
common.add_column(self.project, "fact", column, definition)
def create_fact_from_seed(self, where: str = None):
common.clone_table(self.project, "fact", "seed", "*", where)
def get_snapshot_records(self, select: str = None, where: str = None) -> List[tuple]:
return common.get_records(self.project, "snapshot", select, where)
def delete_snapshot_records(self):
common.delete_records(self.project, "snapshot")
def _assert_results(
self,
ids_with_current_snapshot_records: Iterable,
ids_with_closed_out_snapshot_records: Iterable,
):
"""
All test cases are checked by considering whether a source record's id has a value in `dbt_valid_to`
in `snapshot`. Each id can fall into one of the following cases:
- The id has only one record in `snapshot`; it has a value in `dbt_valid_to`
- the record was hard deleted in the source
- The id has only one record in `snapshot`; it does not have a value in `dbt_valid_to`
- the record was not updated in the source
- the record was updated in the source, but not in a way that is tracked (e.g. via `strategy='check'`)
- The id has two records in `snapshot`; one has a value in `dbt_valid_to`, the other does not
- the record was altered in the source in a way that is tracked
- the record was hard deleted and revived
Note: Because of the third scenario, ids may show up in both arguments of this method.
Args:
ids_with_current_snapshot_records: a list/set/etc. of ids which aren't end-dated
ids_with_closed_out_snapshot_records: a list/set/etc. of ids which are end-dated
"""
records = set(self.get_snapshot_records("id, dbt_valid_to is null as is_current"))
expected_records = set().union(
{(i, True) for i in ids_with_current_snapshot_records},
{(i, False) for i in ids_with_closed_out_snapshot_records},
)
for record in records:
assert record in expected_records
class BaseSimpleSnapshot(BaseSimpleSnapshotBase):
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot.sql": snapshots.SNAPSHOT_TIMESTAMP_SQL}
def test_updates_are_captured_by_snapshot(self, project):
"""
Update the last 5 records. Show that all ids are current, but the last 5 reflect updates.
"""
self.update_fact_records(
{"updated_at": "updated_at + interval '1 day'"}, "id between 16 and 20"
)
run_dbt(["snapshot"])
self._assert_results(
ids_with_current_snapshot_records=range(1, 21),
ids_with_closed_out_snapshot_records=range(16, 21),
)
def test_inserts_are_captured_by_snapshot(self, project):
"""
Insert 10 records. Show that there are 30 records in `snapshot`, all of which are current.
"""
self.insert_fact_records("id between 21 and 30")
run_dbt(["snapshot"])
self._assert_results(
ids_with_current_snapshot_records=range(1, 31), ids_with_closed_out_snapshot_records=[]
)
def test_deletes_are_captured_by_snapshot(self, project):
"""
Hard delete the last five records. Show that there are now only 15 current records and 5 expired records.
"""
self.delete_fact_records("id between 16 and 20")
run_dbt(["snapshot"])
self._assert_results(
ids_with_current_snapshot_records=range(1, 16),
ids_with_closed_out_snapshot_records=range(16, 21),
)
def test_revives_are_captured_by_snapshot(self, project):
"""
Delete the last five records and run snapshot to collect that information, then revive 3 of those records.
Show that there are now 18 current records and 5 expired records.
"""
self.delete_fact_records("id between 16 and 20")
run_dbt(["snapshot"])
self.insert_fact_records("id between 16 and 18")
run_dbt(["snapshot"])
self._assert_results(
ids_with_current_snapshot_records=range(1, 19),
ids_with_closed_out_snapshot_records=range(16, 21),
)
def test_new_column_captured_by_snapshot(self, project):
"""
Add a column to `fact` and populate the last 10 records with a non-null value.
Show that all ids are current, but the last 10 reflect updates and the first 10 don't
i.e. if the column is added, but not updated, the record doesn't reflect that it's updated
"""
self.add_fact_column("full_name", "varchar(200) default null")
self.update_fact_records(
{
"full_name": "first_name || ' ' || last_name",
"updated_at": "updated_at + interval '1 day'",
},
"id between 11 and 20",
)
run_dbt(["snapshot"])
self._assert_results(
ids_with_current_snapshot_records=range(1, 21),
ids_with_closed_out_snapshot_records=range(11, 21),
)
class BaseSnapshotCheck(BaseSimpleSnapshotBase):
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot.sql": snapshots.SNAPSHOT_CHECK_SQL}
def test_column_selection_is_reflected_in_snapshot(self, project):
"""
Update the first 10 records on a non-tracked column.
Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both)
Show that all ids are current, and only the tracked column updates are reflected in `snapshot`.
"""
self.update_fact_records(
{"last_name": "left(last_name, 3)"}, "id between 1 and 10"
) # not tracked
self.update_fact_records({"email": "left(email, 3)"}, "id between 6 and 15") # tracked
run_dbt(["snapshot"])
self._assert_results(
ids_with_current_snapshot_records=range(1, 21),
ids_with_closed_out_snapshot_records=range(6, 16),
)
class TestSnapshot(BaseSimpleSnapshot):
pass
class TestSnapshotCheck(BaseSnapshotCheck):
pass

View File

@@ -1,277 +0,0 @@
import datetime
import pytest
from dbt.tests.util import (
check_relations_equal,
get_manifest,
run_dbt,
run_dbt_and_capture,
run_sql_with_adapter,
update_config_file,
)
from tests.functional.adapter.simple_snapshot.fixtures import (
create_multi_key_seed_sql,
create_multi_key_snapshot_expected_sql,
create_seed_sql,
create_snapshot_expected_sql,
invalidate_multi_key_sql,
invalidate_sql,
model_seed_sql,
populate_multi_key_snapshot_expected_sql,
populate_snapshot_expected_sql,
populate_snapshot_expected_valid_to_current_sql,
ref_snapshot_sql,
seed_insert_sql,
seed_multi_key_insert_sql,
snapshot_actual_sql,
snapshots_multi_key_yml,
snapshots_no_column_names_yml,
snapshots_valid_to_current_yml,
snapshots_yml,
update_multi_key_sql,
update_sql,
update_with_current_sql,
)
class BaseSnapshotColumnNames:
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot.sql": snapshot_actual_sql}
@pytest.fixture(scope="class")
def models(self):
return {
"snapshots.yml": snapshots_yml,
"ref_snapshot.sql": ref_snapshot_sql,
}
def test_snapshot_column_names(self, project):
project.run_sql(create_seed_sql)
project.run_sql(create_snapshot_expected_sql)
project.run_sql(seed_insert_sql)
project.run_sql(populate_snapshot_expected_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
project.run_sql(invalidate_sql)
project.run_sql(update_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
# run_dbt(["test"])
check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"])
class TestSnapshotColumnNames(BaseSnapshotColumnNames):
pass
class BaseSnapshotColumnNamesFromDbtProject:
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot.sql": snapshot_actual_sql}
@pytest.fixture(scope="class")
def models(self):
return {
"snapshots.yml": snapshots_no_column_names_yml,
"ref_snapshot.sql": ref_snapshot_sql,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"snapshots": {
"test": {
"+snapshot_meta_column_names": {
"dbt_valid_to": "test_valid_to",
"dbt_valid_from": "test_valid_from",
"dbt_scd_id": "test_scd_id",
"dbt_updated_at": "test_updated_at",
}
}
}
}
def test_snapshot_column_names_from_project(self, project):
project.run_sql(create_seed_sql)
project.run_sql(create_snapshot_expected_sql)
project.run_sql(seed_insert_sql)
project.run_sql(populate_snapshot_expected_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
project.run_sql(invalidate_sql)
project.run_sql(update_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
# run_dbt(["test"])
check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"])
class TestSnapshotColumnNamesFromDbtProject(BaseSnapshotColumnNamesFromDbtProject):
pass
class BaseSnapshotInvalidColumnNames:
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot.sql": snapshot_actual_sql}
@pytest.fixture(scope="class")
def models(self):
return {
"snapshots.yml": snapshots_no_column_names_yml,
"ref_snapshot.sql": ref_snapshot_sql,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"snapshots": {
"test": {
"+snapshot_meta_column_names": {
"dbt_valid_to": "test_valid_to",
"dbt_valid_from": "test_valid_from",
"dbt_scd_id": "test_scd_id",
"dbt_updated_at": "test_updated_at",
}
}
}
}
def test_snapshot_invalid_column_names(self, project):
project.run_sql(create_seed_sql)
project.run_sql(create_snapshot_expected_sql)
project.run_sql(seed_insert_sql)
project.run_sql(populate_snapshot_expected_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
manifest = get_manifest(project.project_root)
snapshot_node = manifest.nodes["snapshot.test.snapshot_actual"]
snapshot_node.config.snapshot_meta_column_names == {
"dbt_valid_to": "test_valid_to",
"dbt_valid_from": "test_valid_from",
"dbt_scd_id": "test_scd_id",
"dbt_updated_at": "test_updated_at",
}
project.run_sql(invalidate_sql)
project.run_sql(update_sql)
# Change snapshot_meta_columns and look for an error
different_columns = {
"snapshots": {
"test": {
"+snapshot_meta_column_names": {
"dbt_valid_to": "test_valid_to",
"dbt_updated_at": "test_updated_at",
}
}
}
}
update_config_file(different_columns, "dbt_project.yml")
results, log_output = run_dbt_and_capture(["snapshot"], expect_pass=False)
assert len(results) == 1
assert "Compilation Error in snapshot snapshot_actual" in log_output
assert "Snapshot target is missing configured columns" in log_output
class TestSnapshotInvalidColumnNames(BaseSnapshotInvalidColumnNames):
pass
class BaseSnapshotDbtValidToCurrent:
@pytest.fixture(scope="class")
def snapshots(self):
return {"snapshot.sql": snapshot_actual_sql}
@pytest.fixture(scope="class")
def models(self):
return {
"snapshots.yml": snapshots_valid_to_current_yml,
"ref_snapshot.sql": ref_snapshot_sql,
}
def test_valid_to_current(self, project):
project.run_sql(create_seed_sql)
project.run_sql(create_snapshot_expected_sql)
project.run_sql(seed_insert_sql)
project.run_sql(populate_snapshot_expected_valid_to_current_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
original_snapshot = run_sql_with_adapter(
project.adapter,
"select id, test_scd_id, test_valid_to from {schema}.snapshot_actual",
"all",
)
assert original_snapshot[0][2] == datetime.datetime(2099, 12, 31, 0, 0)
assert original_snapshot[9][2] == datetime.datetime(2099, 12, 31, 0, 0)
project.run_sql(invalidate_sql)
project.run_sql(update_with_current_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
updated_snapshot = run_sql_with_adapter(
project.adapter,
"select id, test_scd_id, test_valid_to from {schema}.snapshot_actual",
"all",
)
assert updated_snapshot[0][2] == datetime.datetime(2099, 12, 31, 0, 0)
# Original row that was updated now has a non-current (2099/12/31) date
assert updated_snapshot[9][2] == datetime.datetime(2016, 8, 20, 16, 44, 49)
# Updated row has a current date
assert updated_snapshot[20][2] == datetime.datetime(2099, 12, 31, 0, 0)
check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"])
class TestSnapshotDbtValidToCurrent(BaseSnapshotDbtValidToCurrent):
pass
# This uses snapshot_meta_column_names, yaml-only snapshot def,
# and multiple keys
class BaseSnapshotMultiUniqueKey:
@pytest.fixture(scope="class")
def models(self):
return {
"seed.sql": model_seed_sql,
"snapshots.yml": snapshots_multi_key_yml,
"ref_snapshot.sql": ref_snapshot_sql,
}
def test_multi_column_unique_key(self, project):
project.run_sql(create_multi_key_seed_sql)
project.run_sql(create_multi_key_snapshot_expected_sql)
project.run_sql(seed_multi_key_insert_sql)
project.run_sql(populate_multi_key_snapshot_expected_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
project.run_sql(invalidate_multi_key_sql)
project.run_sql(update_multi_key_sql)
results = run_dbt(["snapshot"])
assert len(results) == 1
# run_dbt(["test"])
check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"])
class TestSnapshotMultiUniqueKey(BaseSnapshotMultiUniqueKey):
pass

View File

@@ -1,150 +0,0 @@
SEED__CHIPMUNKS = """
name,shirt
alvin,red
simon,blue
theodore,green
dave,
""".strip()
MODEL__CHIPMUNKS = """
{{ config(materialized='table') }}
select *
from {{ ref('chipmunks_stage') }}
"""
TEST__VIEW_TRUE = """
{{ config(store_failures_as="view", store_failures=True) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__VIEW_FALSE = """
{{ config(store_failures_as="view", store_failures=False) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__VIEW_UNSET = """
{{ config(store_failures_as="view") }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__TABLE_TRUE = """
{{ config(store_failures_as="table", store_failures=True) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__TABLE_FALSE = """
{{ config(store_failures_as="table", store_failures=False) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__TABLE_UNSET = """
{{ config(store_failures_as="table") }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__EPHEMERAL_TRUE = """
{{ config(store_failures_as="ephemeral", store_failures=True) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__EPHEMERAL_FALSE = """
{{ config(store_failures_as="ephemeral", store_failures=False) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__EPHEMERAL_UNSET = """
{{ config(store_failures_as="ephemeral") }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__UNSET_TRUE = """
{{ config(store_failures=True) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__UNSET_FALSE = """
{{ config(store_failures=False) }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__UNSET_UNSET = """
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
TEST__VIEW_UNSET_PASS = """
{{ config(store_failures_as="view") }}
select *
from {{ ref('chipmunks') }}
where shirt = 'purple'
"""
TEST__ERROR_UNSET = """
{{ config(store_failures_as="error") }}
select *
from {{ ref('chipmunks') }}
where shirt = 'green'
"""
SCHEMA_YML = """
version: 2
models:
- name: chipmunks
columns:
- name: name
data_tests:
- not_null:
store_failures_as: view
- accepted_values:
store_failures: false
store_failures_as: table
values:
- alvin
- simon
- theodore
- name: shirt
data_tests:
- not_null:
store_failures: true
store_failures_as: view
"""

View File

@@ -1,303 +0,0 @@
from collections import namedtuple
from typing import Set
import pytest
from dbt.artifacts.schemas.results import TestStatus
from dbt.tests.util import check_relation_types, run_dbt
from tests.functional.adapter.store_test_failures_tests import _files
TestResult = namedtuple("TestResult", ["name", "status", "type"])
class StoreTestFailuresAsBase:
seed_table: str = "chipmunks_stage"
model_table: str = "chipmunks"
audit_schema_suffix: str = "dbt_test__audit"
audit_schema: str
@pytest.fixture(scope="class", autouse=True)
def setup_class(self, project):
# the seed doesn't get touched, load it once
run_dbt(["seed"])
yield
@pytest.fixture(scope="function", autouse=True)
def setup_method(self, project, setup_class):
# make sure the model is always right
run_dbt(["run"])
# the name of the audit schema doesn't change in a class, but this doesn't run at the class level
self.audit_schema = f"{project.test_schema}_{self.audit_schema_suffix}"
yield
@pytest.fixture(scope="function", autouse=True)
def teardown_method(self, project):
yield
# clear out the audit schema after each test case
with project.adapter.connection_named("__test"):
audit_schema = project.adapter.Relation.create(
database=project.database, schema=self.audit_schema
)
project.adapter.drop_schema(audit_schema)
@pytest.fixture(scope="class")
def seeds(self):
return {f"{self.seed_table}.csv": _files.SEED__CHIPMUNKS}
@pytest.fixture(scope="class")
def models(self):
return {f"{self.model_table}.sql": _files.MODEL__CHIPMUNKS}
def run_and_assert(
self, project, expected_results: Set[TestResult], expect_pass: bool = False
) -> None:
"""
Run `dbt test` and assert the results are the expected results
Args:
project: the `project` fixture; needed since we invoke `run_dbt`
expected_results: the expected results of the tests as instances of TestResult
expect_pass: passed directly into `run_dbt`; this is only needed if all expected results are tests that pass
Returns:
the row count as an integer
"""
# run the tests
results = run_dbt(["test"], expect_pass=expect_pass)
# show that the statuses are what we expect
actual = {(result.node.name, result.status) for result in results}
expected = {(result.name, result.status) for result in expected_results}
assert actual == expected
# show that the results are persisted in the correct database objects
check_relation_types(
project.adapter, {result.name: result.type for result in expected_results}
)
class StoreTestFailuresAsInteractions(StoreTestFailuresAsBase):
"""
These scenarios test interactions between `store_failures` and `store_failures_as` at the model level.
Granularity (e.g. setting one at the project level and another at the model level) is not considered.
Test Scenarios:
- If `store_failures_as = "view"` and `store_failures = True`, then store the failures in a view.
- If `store_failures_as = "view"` and `store_failures = False`, then store the failures in a view.
- If `store_failures_as = "view"` and `store_failures` is not set, then store the failures in a view.
- If `store_failures_as = "table"` and `store_failures = True`, then store the failures in a table.
- If `store_failures_as = "table"` and `store_failures = False`, then store the failures in a table.
- If `store_failures_as = "table"` and `store_failures` is not set, then store the failures in a table.
- If `store_failures_as = "ephemeral"` and `store_failures = True`, then do not store the failures.
- If `store_failures_as = "ephemeral"` and `store_failures = False`, then do not store the failures.
- If `store_failures_as = "ephemeral"` and `store_failures` is not set, then do not store the failures.
- If `store_failures_as` is not set and `store_failures = True`, then store the failures in a table.
- If `store_failures_as` is not set and `store_failures = False`, then do not store the failures.
- If `store_failures_as` is not set and `store_failures` is not set, then do not store the failures.
"""
@pytest.fixture(scope="class")
def tests(self):
return {
"view_unset_pass.sql": _files.TEST__VIEW_UNSET_PASS, # control
"view_true.sql": _files.TEST__VIEW_TRUE,
"view_false.sql": _files.TEST__VIEW_FALSE,
"view_unset.sql": _files.TEST__VIEW_UNSET,
"table_true.sql": _files.TEST__TABLE_TRUE,
"table_false.sql": _files.TEST__TABLE_FALSE,
"table_unset.sql": _files.TEST__TABLE_UNSET,
"ephemeral_true.sql": _files.TEST__EPHEMERAL_TRUE,
"ephemeral_false.sql": _files.TEST__EPHEMERAL_FALSE,
"ephemeral_unset.sql": _files.TEST__EPHEMERAL_UNSET,
"unset_true.sql": _files.TEST__UNSET_TRUE,
"unset_false.sql": _files.TEST__UNSET_FALSE,
"unset_unset.sql": _files.TEST__UNSET_UNSET,
}
def test_tests_run_successfully_and_are_stored_as_expected(self, project):
expected_results = {
TestResult("view_unset_pass", TestStatus.Pass, "view"), # control
TestResult("view_true", TestStatus.Fail, "view"),
TestResult("view_false", TestStatus.Fail, "view"),
TestResult("view_unset", TestStatus.Fail, "view"),
TestResult("table_true", TestStatus.Fail, "table"),
TestResult("table_false", TestStatus.Fail, "table"),
TestResult("table_unset", TestStatus.Fail, "table"),
TestResult("ephemeral_true", TestStatus.Fail, None),
TestResult("ephemeral_false", TestStatus.Fail, None),
TestResult("ephemeral_unset", TestStatus.Fail, None),
TestResult("unset_true", TestStatus.Fail, "table"),
TestResult("unset_false", TestStatus.Fail, None),
TestResult("unset_unset", TestStatus.Fail, None),
}
self.run_and_assert(project, expected_results)
class StoreTestFailuresAsProjectLevelOff(StoreTestFailuresAsBase):
"""
These scenarios test that `store_failures_as` at the model level takes precedence over `store_failures`
at the project level.
Test Scenarios:
- If `store_failures = False` in the project and `store_failures_as = "view"` in the model,
then store the failures in a view.
- If `store_failures = False` in the project and `store_failures_as = "table"` in the model,
then store the failures in a table.
- If `store_failures = False` in the project and `store_failures_as = "ephemeral"` in the model,
then do not store the failures.
- If `store_failures = False` in the project and `store_failures_as` is not set,
then do not store the failures.
"""
@pytest.fixture(scope="class")
def tests(self):
return {
"results_view.sql": _files.TEST__VIEW_UNSET,
"results_table.sql": _files.TEST__TABLE_UNSET,
"results_ephemeral.sql": _files.TEST__EPHEMERAL_UNSET,
"results_unset.sql": _files.TEST__UNSET_UNSET,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {"data_tests": {"store_failures": False}}
def test_tests_run_successfully_and_are_stored_as_expected(self, project):
expected_results = {
TestResult("results_view", TestStatus.Fail, "view"),
TestResult("results_table", TestStatus.Fail, "table"),
TestResult("results_ephemeral", TestStatus.Fail, None),
TestResult("results_unset", TestStatus.Fail, None),
}
self.run_and_assert(project, expected_results)
class StoreTestFailuresAsProjectLevelView(StoreTestFailuresAsBase):
"""
These scenarios test that `store_failures_as` at the project level takes precedence over `store_failures`
at the model level.
Test Scenarios:
- If `store_failures_as = "view"` in the project and `store_failures = False` in the model,
then store the failures in a view.
- If `store_failures_as = "view"` in the project and `store_failures = True` in the model,
then store the failures in a view.
- If `store_failures_as = "view"` in the project and `store_failures` is not set,
then store the failures in a view.
"""
@pytest.fixture(scope="class")
def tests(self):
return {
"results_true.sql": _files.TEST__VIEW_TRUE,
"results_false.sql": _files.TEST__VIEW_FALSE,
"results_unset.sql": _files.TEST__VIEW_UNSET,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {"data_tests": {"store_failures_as": "view"}}
def test_tests_run_successfully_and_are_stored_as_expected(self, project):
expected_results = {
TestResult("results_true", TestStatus.Fail, "view"),
TestResult("results_false", TestStatus.Fail, "view"),
TestResult("results_unset", TestStatus.Fail, "view"),
}
self.run_and_assert(project, expected_results)
class StoreTestFailuresAsProjectLevelEphemeral(StoreTestFailuresAsBase):
"""
This scenario tests that `store_failures_as` at the project level takes precedence over `store_failures`
at the model level. In particular, setting `store_failures_as = "ephemeral"` at the project level
turns off `store_failures` regardless of the setting of `store_failures` anywhere. Turning `store_failures`
back on at the model level requires `store_failures_as` to be set at the model level.
Test Scenarios:
- If `store_failures_as = "ephemeral"` in the project and `store_failures = True` in the project,
then do not store the failures.
- If `store_failures_as = "ephemeral"` in the project and `store_failures = True` in the project and the model,
then do not store the failures.
- If `store_failures_as = "ephemeral"` in the project and `store_failures_as = "view"` in the model,
then store the failures in a view.
"""
@pytest.fixture(scope="class")
def tests(self):
return {
"results_unset.sql": _files.TEST__UNSET_UNSET,
"results_true.sql": _files.TEST__UNSET_TRUE,
"results_view.sql": _files.TEST__VIEW_UNSET,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {"data_tests": {"store_failures_as": "ephemeral", "store_failures": True}}
def test_tests_run_successfully_and_are_stored_as_expected(self, project):
expected_results = {
TestResult("results_unset", TestStatus.Fail, None),
TestResult("results_true", TestStatus.Fail, None),
TestResult("results_view", TestStatus.Fail, "view"),
}
self.run_and_assert(project, expected_results)
class StoreTestFailuresAsGeneric(StoreTestFailuresAsBase):
"""
This tests that `store_failures_as` works with generic tests.
Test Scenarios:
- If `store_failures_as = "view"` is used with the `not_null` test in the model, then store the failures in a view.
"""
@pytest.fixture(scope="class")
def models(self):
return {
f"{self.model_table}.sql": _files.MODEL__CHIPMUNKS,
"schema.yml": _files.SCHEMA_YML,
}
def test_tests_run_successfully_and_are_stored_as_expected(self, project):
expected_results = {
# `store_failures` unset, `store_failures_as = "view"`
TestResult("not_null_chipmunks_name", TestStatus.Pass, "view"),
# `store_failures = False`, `store_failures_as = "table"`
TestResult(
"accepted_values_chipmunks_name__alvin__simon__theodore", TestStatus.Fail, "table"
),
# `store_failures = True`, `store_failures_as = "view"`
TestResult("not_null_chipmunks_shirt", TestStatus.Fail, "view"),
}
self.run_and_assert(project, expected_results)
class StoreTestFailuresAsExceptions(StoreTestFailuresAsBase):
"""
This tests that `store_failures_as` raises exceptions in appropriate scenarios.
Test Scenarios:
- If `store_failures_as = "error"`, a helpful exception is raised.
"""
@pytest.fixture(scope="class")
def tests(self):
return {
"store_failures_as_error.sql": _files.TEST__ERROR_UNSET,
}
def test_tests_run_unsuccessfully_and_raise_appropriate_exception(self, project):
results = run_dbt(["test"], expect_pass=False)
assert len(results) == 1
result = results[0]
assert "Compilation Error" in result.message
assert "'error' is not a valid value" in result.message
assert "Accepted values are: ['ephemeral', 'table', 'view']" in result.message

View File

@@ -1,126 +0,0 @@
#
# Seeds
#
seeds__people = """id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
"""
seeds__expected_accepted_values = """value_field,n_records
Gary,1
Rose,1
"""
seeds__expected_failing_test = """id,first_name,last_name,email,gender,ip_address
1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168
2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35
3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136
6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220
7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64
8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13
9,Gary,Day,gday8@nih.gov,Male,35.81.68.186
10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100
"""
seeds__expected_not_null_problematic_model_id = """id,first_name,last_name,email,gender,ip_address
,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243
,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175
"""
seeds__expected_unique_problematic_model_id = """unique_field,n_records
2,2
1,2
"""
#
# Schema
#
properties__schema_yml = """
version: 2
models:
- name: fine_model
columns:
- name: id
data_tests:
- unique
- not_null
- name: problematic_model
columns:
- name: id
data_tests:
- unique:
store_failures: true
- not_null
- name: first_name
data_tests:
# test truncation of really long test name
- accepted_values:
values:
- Jack
- Kathryn
- Gerald
- Bonnie
- Harold
- Jacqueline
- Wanda
- Craig
# - Gary
# - Rose
- name: fine_model_but_with_a_no_good_very_long_name
columns:
- name: quite_long_column_name
data_tests:
# test truncation of really long test name with builtin
- unique
"""
#
# Models
#
models__fine_model = """
select * from {{ ref('people') }}
"""
models__file_model_but_with_a_no_good_very_long_name = """
select 1 as quite_long_column_name
"""
models__problematic_model = """
select * from {{ ref('people') }}
union all
select * from {{ ref('people') }}
where id in (1,2)
union all
select null as id, first_name, last_name, email, gender, ip_address from {{ ref('people') }}
where id in (3,4)
"""
#
# Tests
#
tests__failing_test = """
select * from {{ ref('fine_model') }}
"""
tests__passing_test = """
select * from {{ ref('fine_model') }}
where false
"""

View File

@@ -1,163 +0,0 @@
import pytest
from dbt.tests.util import check_relations_equal, run_dbt
from tests.functional.adapter.store_test_failures_tests.fixtures import (
models__file_model_but_with_a_no_good_very_long_name,
models__fine_model,
models__problematic_model,
properties__schema_yml,
seeds__expected_accepted_values,
seeds__expected_failing_test,
seeds__expected_not_null_problematic_model_id,
seeds__expected_unique_problematic_model_id,
seeds__people,
tests__failing_test,
tests__passing_test,
)
# used to rename test audit schema to help test schema meet max char limit
# the default is _dbt_test__audit but this runs over the postgres 63 schema name char limit
# without which idempotency conditions will not hold (i.e. dbt can't drop the schema properly)
TEST_AUDIT_SCHEMA_SUFFIX = "dbt_test__aud"
class StoreTestFailuresBase:
@pytest.fixture(scope="function", autouse=True)
def setUp(self, project):
self.test_audit_schema = f"{project.test_schema}_{TEST_AUDIT_SCHEMA_SUFFIX}"
run_dbt(["seed"])
run_dbt(["run"])
@pytest.fixture(scope="class")
def seeds(self):
return {
"people.csv": seeds__people,
"expected_accepted_values.csv": seeds__expected_accepted_values,
"expected_failing_test.csv": seeds__expected_failing_test,
"expected_not_null_problematic_model_id.csv": seeds__expected_not_null_problematic_model_id,
"expected_unique_problematic_model_id.csv": seeds__expected_unique_problematic_model_id,
}
@pytest.fixture(scope="class")
def tests(self):
return {
"failing_test.sql": tests__failing_test,
"passing_test.sql": tests__passing_test,
}
@pytest.fixture(scope="class")
def properties(self):
return {"schema.yml": properties__schema_yml}
@pytest.fixture(scope="class")
def models(self):
return {
"fine_model.sql": models__fine_model,
"fine_model_but_with_a_no_good_very_long_name.sql": models__file_model_but_with_a_no_good_very_long_name,
"problematic_model.sql": models__problematic_model,
}
@pytest.fixture(scope="class")
def project_config_update(self):
return {
"seeds": {
"quote_columns": False,
"test": self.column_type_overrides(),
},
"data_tests": {"+schema": TEST_AUDIT_SCHEMA_SUFFIX},
}
def column_type_overrides(self):
return {}
def run_tests_store_one_failure(self, project):
run_dbt(["test"], expect_pass=False)
# one test is configured with store_failures: true, make sure it worked
check_relations_equal(
project.adapter,
[
f"{self.test_audit_schema}.unique_problematic_model_id",
"expected_unique_problematic_model_id",
],
)
def run_tests_store_failures_and_assert(self, project):
# make sure this works idempotently for all tests
run_dbt(["test", "--store-failures"], expect_pass=False)
results = run_dbt(["test", "--store-failures"], expect_pass=False)
# compare test results
actual = [(r.status, r.failures) for r in results]
expected = [
("pass", 0),
("pass", 0),
("pass", 0),
("pass", 0),
("fail", 2),
("fail", 2),
("fail", 2),
("fail", 10),
]
assert sorted(actual) == sorted(expected)
# compare test results stored in database
check_relations_equal(
project.adapter, [f"{self.test_audit_schema}.failing_test", "expected_failing_test"]
)
check_relations_equal(
project.adapter,
[
f"{self.test_audit_schema}.not_null_problematic_model_id",
"expected_not_null_problematic_model_id",
],
)
check_relations_equal(
project.adapter,
[
f"{self.test_audit_schema}.unique_problematic_model_id",
"expected_unique_problematic_model_id",
],
)
check_relations_equal(
project.adapter,
[
f"{self.test_audit_schema}.accepted_values_problemat"
"ic_mo_c533ab4ca65c1a9dbf14f79ded49b628",
"expected_accepted_values",
],
)
class TestStoreTestFailures(StoreTestFailuresBase):
@pytest.fixture(scope="function")
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=self.test_audit_schema
)
project.adapter.drop_schema(relation)
relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)
def column_type_overrides(self):
return {
"expected_unique_problematic_model_id": {
"+column_types": {
"n_records": "bigint",
},
},
"expected_accepted_values": {
"+column_types": {
"n_records": "bigint",
},
},
}
def test__store_and_assert(self, project, clean_up):
self.run_tests_store_one_failure(project)
self.run_tests_store_failures_and_assert(project)

View File

@@ -1,69 +0,0 @@
import pytest
from dbt.adapters.postgres.relation_configs import MAX_CHARACTERS_IN_IDENTIFIER
from dbt.tests.util import run_dbt, write_file
my_model_a_sql = """
SELECT
1 as a,
1 as id,
2 as not_testing,
'a' as string_a,
DATE '2020-01-02' as date_a
"""
test_model_a_long_test_name_yml = """
unit_tests:
- name: {test_name}
model: my_model_a
given: []
expect:
rows:
- {{a: 1, id: 1, not_testing: 2, string_a: "a", date_a: "2020-01-02"}}
"""
class BaseUnitTestLongTestName:
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_a.sql": my_model_a_sql,
"test_model_a.yml": test_model_a_long_test_name_yml,
}
@pytest.fixture
def max_unit_test_name_length(self) -> int:
return -1
def test_long_unit_test_name(self, project, max_unit_test_name_length):
# max test name == passing unit test
write_file(
test_model_a_long_test_name_yml.format(test_name="a" * max_unit_test_name_length),
"models",
"test_model_a.yml",
)
results = run_dbt(["run"])
assert len(results) == 1
results = run_dbt(["test"], expect_pass=True)
assert len(results) == 1
# max test name == failing command
write_file(
test_model_a_long_test_name_yml.format(
test_name="a" * (max_unit_test_name_length + 1)
),
"models",
"test_model_a.yml",
)
results = run_dbt(["run"])
assert len(results) == 1
run_dbt(["test"], expect_pass=False)
class TestPostgresUnitTestLongTestNames(BaseUnitTestLongTestName):
@pytest.fixture
def max_unit_test_name_length(self) -> int:
return MAX_CHARACTERS_IN_IDENTIFIER

View File

@@ -1,22 +0,0 @@
from dbt.tests.util import check_relations_equal, get_relation_columns, run_dbt
from tests.functional.adapter.utils.base_utils import BaseUtils
class BaseArrayUtils(BaseUtils):
def assert_columns_equal(self, project, expected_cols, actual_cols):
assert (
expected_cols == actual_cols
), f"Type difference detected: {expected_cols} vs. {actual_cols}"
def test_expected_actual(self, project):
run_dbt(["build"])
# check contents equal
check_relations_equal(project.adapter, ["expected", "actual"])
# check types equal
expected_cols = get_relation_columns(project.adapter, "expected")
actual_cols = get_relation_columns(project.adapter, "actual")
print(f"Expected: {expected_cols}")
print(f"Actual: {actual_cols}")
self.assert_columns_equal(project, expected_cols, actual_cols)

View File

@@ -1,56 +0,0 @@
import pytest
from dbt.tests.util import run_dbt
macros__equals_sql = """
{% macro equals(expr1, expr2) -%}
case when (({{ expr1 }} = {{ expr2 }}) or ({{ expr1 }} is null and {{ expr2 }} is null))
then 0
else 1
end = 0
{% endmacro %}
"""
macros__test_assert_equal_sql = """
{% test assert_equal(model, actual, expected) %}
select * from {{ model }}
where not {{ equals(actual, expected) }}
{% endtest %}
"""
macros__replace_empty_sql = """
{% macro replace_empty(expr) -%}
case
when {{ expr }} = 'EMPTY' then ''
else {{ expr }}
end
{% endmacro %}
"""
class BaseUtils:
# setup
@pytest.fixture(scope="class")
def macros(self):
return {
"equals.sql": macros__equals_sql,
"test_assert_equal.sql": macros__test_assert_equal_sql,
"replace_empty.sql": macros__replace_empty_sql,
}
# make it possible to dynamically update the macro call with a namespace
# (e.g.) 'dateadd', 'dbt.dateadd', 'dbt_utils.dateadd'
def macro_namespace(self):
return ""
def interpolate_macro_namespace(self, model_sql, macro_name):
macro_namespace = self.macro_namespace()
return (
model_sql.replace(f"{macro_name}(", f"{macro_namespace}.{macro_name}(")
if macro_namespace
else model_sql
)
# actual test sequence
def test_build_assert_equal(self, project):
run_dbt(["build"]) # seed, model, test

Some files were not shown because too many files have changed in this diff Show More