mirror of
https://github.com/dbt-labs/dbt-project-evaluator.git
synced 2025-12-18 02:11:27 +00:00
Merge branch 'main' into fix/505-rename-to-data-tests-for-v1
This commit is contained in:
@@ -13,11 +13,11 @@ jobs:
|
||||
- run:
|
||||
name: "Run Tests - Postgres"
|
||||
environment:
|
||||
POSTGRES_TEST_HOST: localhost
|
||||
POSTGRES_TEST_USER: root
|
||||
POSTGRES_TEST_PASS: ''
|
||||
POSTGRES_TEST_PORT: 5432
|
||||
POSTGRES_TEST_DBNAME: circle_test
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_USER: root
|
||||
DBT_ENV_SECRET_POSTGRES_PASS: ''
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DATABASE: circle_test
|
||||
command: ./run_test.sh postgres
|
||||
- store_artifacts:
|
||||
path: ./integration_tests/logs
|
||||
|
||||
49
.github/workflows/ci.yml
vendored
Normal file
49
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
# **what?**
|
||||
# Run tests for dbt-utils against supported adapters
|
||||
|
||||
# **why?**
|
||||
# To ensure that dbt-utils works as expected with all supported adapters
|
||||
|
||||
# **when?**
|
||||
# On every PR, and every push to main and when manually triggered
|
||||
|
||||
name: Package Integration Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
run-tests:
|
||||
uses: dbt-labs/dbt-package-testing/.github/workflows/run_tox.yml@v1
|
||||
with:
|
||||
# no need to pass postgres vars in. We can just use the defaults in the local container
|
||||
# redshift
|
||||
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
|
||||
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
|
||||
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }}
|
||||
REDSHIFT_SCHEMA: "dpe_integration_tests_redshift_${{ github.run_number }}"
|
||||
REDSHIFT_PORT: 5439
|
||||
# bigquery
|
||||
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
|
||||
BIGQUERY_SCHEMA: "dpe_integration_tests_bigquery_${{ github.run_number }}"
|
||||
# snowflake
|
||||
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
|
||||
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
|
||||
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
|
||||
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
|
||||
SNOWFLAKE_SCHEMA: "dbt_utils_integration_tests_snowflake_${{ github.run_number }}"
|
||||
# databricks
|
||||
DATABRICKS_SCHEMA: "integration_tests_databricks_${{ github.run_number }}"
|
||||
DATABRICKS_HOST: ${{ vars.DATABRICKS_HOST }}
|
||||
DATABRICKS_HTTP_PATH: ${{ vars.DATABRICKS_HTTP_PATH }}
|
||||
secrets:
|
||||
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_PASS }}
|
||||
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
|
||||
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
|
||||
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_PASS }}
|
||||
DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DBT_ENV_SECRET_DATABRICKS_TOKEN }}
|
||||
83
.github/workflows/local_only.yml
vendored
Normal file
83
.github/workflows/local_only.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
# **what?**
|
||||
# Run tests for packages not supported for cloud testing
|
||||
#
|
||||
# **why?**
|
||||
# To ensure that packages works as expected with all supported adapters
|
||||
|
||||
# **when?**
|
||||
# On push, PR or manually called
|
||||
|
||||
|
||||
name: Package Integration Tests - Local Only
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.11"
|
||||
POSTGRES_HOST: "localhost"
|
||||
POSTGRES_USER: "root"
|
||||
POSTGRES_PORT: "5432"
|
||||
POSTGRES_DATABASE: "postgres_test"
|
||||
DBT_ENV_SECRET_POSTGRES_PASS: "password" # this isn't actually a secret since it only runs on the runner
|
||||
|
||||
jobs:
|
||||
run-tests:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.DBT_ENV_SECRET_POSTGRES_PASS }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DATABASE }}
|
||||
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# these adapters are tested in this repo but are not tested as part of the dbt Cloud images.
|
||||
# This list should include anything not listed in supported_adapters.env
|
||||
adapter: [duckdb, postgres]
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.event.repository }} "
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Set up Python ${{ env.PYTHON_VERSION }}"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: "Install ${{ matrix.adapter }}"
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install dbt-${{ matrix.adapter }}
|
||||
|
||||
- name: "Install tox"
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: "Run integration tests with tox on ${{ matrix.adapter }}"
|
||||
run: |
|
||||
tox -e dbt_integration_${{ matrix.adapter }}
|
||||
env:
|
||||
# postgres
|
||||
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
DBT_ENV_SECRET_POSTGRES_PASS: ${{ env.DBT_ENV_SECRET_POSTGRES_PASS }}
|
||||
POSTGRES_PORT: ${{ env.POSTGRES_PORT }}
|
||||
POSTGRES_DATABASE: ${{ env.POSTGRES_DATABASE }}
|
||||
POSTGRES_SCHEMA: "integration_tests_postgres_${{ github.run_number }}"
|
||||
# duckdb - needs no vars
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -3,7 +3,8 @@
|
||||
venv/
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
# .env
|
||||
test.env
|
||||
|
||||
# DBT artifacts
|
||||
target/
|
||||
@@ -13,7 +14,7 @@ dbt_packages/
|
||||
integration_tests/state/
|
||||
site/
|
||||
env/
|
||||
profiles.yml
|
||||
# profiles.yml
|
||||
package-lock.yml
|
||||
|
||||
# IDE
|
||||
|
||||
@@ -19,8 +19,9 @@ hide:
|
||||
|Modeling |[Root Models](../rules/modeling/#root-models) |`fct_root_models`|
|
||||
|Modeling |[Staging Models Dependent on Downstream Models](../rules/modeling/#staging-models-dependent-on-downstream-models) |`fct_staging_dependent_on_marts_or_intermediate`|
|
||||
|Modeling |[Unused Sources](../rules/modeling/#unused-sources) |`fct_unused_sources`|
|
||||
|Modeling |[Models with Too Many Joins](../rules/modeling/#models-with-too-many-joins) |`fct_too_many_joins`|
|
||||
|Modeling |[Models with Too Many Joins](../rules/modeling/#models-with-too-many-joins) |`fct_too_many_joins`|
|
||||
|Testing |[Missing Primary Key Tests](../rules/testing/#missing-primary-key-tests) |`fct_missing_primary_key_tests`|
|
||||
|Testing |[Missing Source Freshness](../rules/testing/#missing-source-freshness) |`fct_sources_without_freshness`|
|
||||
|Testing |[Test Coverage](../rules/testing/#test-coverage) |`fct_test_coverage`|
|
||||
|Documentation |[Undocumented Models](../rules/documentation/#undocumented-models) |`fct_undocumented_models`|
|
||||
|Documentation |[Documentation Coverage](../rules/documentation/#documentation-coverage) |`fct_documentation_coverage`|
|
||||
|
||||
@@ -40,8 +40,8 @@ Source freshness is useful for understanding if your data pipelines are in a hea
|
||||
**How to Remediate**
|
||||
|
||||
Apply a [source freshness block](https://docs.getdbt.com/docs/build/sources#declaring-source-freshness) to the source definition. This can be implemented at either the source name or table name level.
|
||||
---
|
||||
|
||||
---
|
||||
## Test Coverage
|
||||
|
||||
`fct_test_coverage` ([source](https://github.com/dbt-labs/dbt-project-evaluator/tree/main/models/marts/tests/fct_test_coverage.sql)) contains metrics pertaining to project-wide test coverage.
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
|
||||
# HEY! This file is used in the integration tests with CircleCI.
|
||||
# You should __NEVER__ check credentials into version control. Thanks for reading :)
|
||||
|
||||
config:
|
||||
send_anonymous_usage_stats: False
|
||||
use_colors: True
|
||||
|
||||
integration_tests:
|
||||
target: postgres
|
||||
outputs:
|
||||
postgres:
|
||||
type: postgres
|
||||
host: "{{ env_var('POSTGRES_TEST_HOST') }}"
|
||||
user: "{{ env_var('POSTGRES_TEST_USER') }}"
|
||||
pass: "{{ env_var('POSTGRES_TEST_PASS') }}"
|
||||
port: "{{ env_var('POSTGRES_TEST_PORT') | as_number }}"
|
||||
dbname: "{{ env_var('POSTGRES_TEST_DBNAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_postgres
|
||||
threads: 5
|
||||
|
||||
redshift:
|
||||
type: redshift
|
||||
host: "{{ env_var('REDSHIFT_TEST_HOST') }}"
|
||||
user: "{{ env_var('REDSHIFT_TEST_USER') }}"
|
||||
pass: "{{ env_var('REDSHIFT_TEST_PASS') }}"
|
||||
dbname: "{{ env_var('REDSHIFT_TEST_DBNAME') }}"
|
||||
port: "{{ env_var('REDSHIFT_TEST_PORT') | as_number }}"
|
||||
schema: dbt_project_evaluator_integration_tests_redshift
|
||||
threads: 5
|
||||
|
||||
bigquery:
|
||||
type: bigquery
|
||||
method: service-account
|
||||
keyfile: "{{ env_var('BIGQUERY_SERVICE_KEY_PATH') }}"
|
||||
project: "{{ env_var('BIGQUERY_TEST_DATABASE') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_bigquery
|
||||
threads: 10
|
||||
|
||||
snowflake:
|
||||
type: snowflake
|
||||
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
|
||||
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
|
||||
password: "{{ env_var('SNOWFLAKE_TEST_PASSWORD') }}"
|
||||
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
|
||||
database: "{{ env_var('SNOWFLAKE_TEST_DATABASE') }}"
|
||||
warehouse: "{{ env_var('SNOWFLAKE_TEST_WAREHOUSE') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_snowflake
|
||||
threads: 10
|
||||
|
||||
databricks:
|
||||
type: databricks
|
||||
schema: dbt_project_evaluator_integration_tests_databricks
|
||||
host: "{{ env_var('DATABRICKS_TEST_HOST') }}"
|
||||
http_path: "{{ env_var('DATABRICKS_TEST_HTTP_PATH') }}"
|
||||
token: "{{ env_var('DATABRICKS_TEST_ACCESS_TOKEN') }}"
|
||||
threads: 10
|
||||
|
||||
duckdb:
|
||||
type: duckdb
|
||||
path: ./duck.db
|
||||
|
||||
trino:
|
||||
type: trino
|
||||
host: "{{ env_var('TRINO_TEST_HOST') }}"
|
||||
port: "{{ env_var('TRINO_TEST_PORT') | as_number }}"
|
||||
method: ldap
|
||||
user: "{{ env_var('TRINO_TEST_USER') }}"
|
||||
password: "{{ env_var('TRINO_TEST_PASS') }}"
|
||||
catalog: "{{ env_var('TRINO_TEST_CATALOG_NAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_trino
|
||||
threads: 5
|
||||
session_properties:
|
||||
query_max_stage_count: 275
|
||||
|
||||
clickhouse:
|
||||
type: clickhouse
|
||||
host: "{{ env_var('CLICKHOUSE_TEST_HOST') }}"
|
||||
port: "{{ env_var('CLICKHOUSE_TEST_PORT') | as_number }}"
|
||||
user: "{{ env_var('CLICKHOUSE_TEST_USER') }}"
|
||||
password: "{{ env_var('CLICKHOUSE_TEST_PASS') }}"
|
||||
dbname: "{{ env_var('CLICKHOUSE_TEST_DBNAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_clickhouse
|
||||
threads: 5
|
||||
84
integration_tests/profiles.yml
Normal file
84
integration_tests/profiles.yml
Normal file
@@ -0,0 +1,84 @@
|
||||
|
||||
# HEY! This file is used in the integration tests with CI.
|
||||
# You should __NEVER__ check credentials into version control. Thanks for reading :)
|
||||
|
||||
config:
|
||||
send_anonymous_usage_stats: False
|
||||
use_colors: True
|
||||
|
||||
integration_tests:
|
||||
target: postgres
|
||||
outputs:
|
||||
postgres:
|
||||
type: postgres
|
||||
host: "{{ env_var('POSTGRES_HOST') }}"
|
||||
user: "{{ env_var('POSTGRES_USER') }}"
|
||||
pass: "{{ env_var('DBT_ENV_SECRET_POSTGRES_PASS') }}"
|
||||
port: "{{ env_var('POSTGRES_PORT') | as_number }}"
|
||||
dbname: "{{ env_var('POSTGRES_DATABASE') }}"
|
||||
schema: "{{ env_var('POSTGRES_SCHEMA', 'dbt_project_evaluator_integration_tests_postgres') }}"
|
||||
threads: 5
|
||||
|
||||
redshift:
|
||||
type: redshift
|
||||
host: "{{ env_var('REDSHIFT_HOST') }}"
|
||||
user: "{{ env_var('REDSHIFT_USER') }}"
|
||||
pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}"
|
||||
dbname: "{{ env_var('REDSHIFT_DATABASE') }}"
|
||||
port: "{{ env_var('REDSHIFT_PORT') | as_number }}"
|
||||
schema: "{{ env_var('REDSHIFT_SCHEMA', 'dbt_project_evaluator_integration_tests_redshift') }}"
|
||||
threads: 5
|
||||
|
||||
bigquery:
|
||||
type: bigquery
|
||||
method: service-account-json
|
||||
keyfile_json: "{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native }}"
|
||||
project: "{{ env_var('BIGQUERY_PROJECT') }}"
|
||||
dataset: "{{ env_var('BIGQUERY_SCHEMA', 'dbt_project_evaluator_integration_tests_bigquery') }}"
|
||||
threads: 10
|
||||
|
||||
snowflake:
|
||||
type: snowflake
|
||||
account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}"
|
||||
user: "{{ env_var('SNOWFLAKE_USER') }}"
|
||||
password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}"
|
||||
role: "{{ env_var('SNOWFLAKE_ROLE') }}"
|
||||
database: "{{ env_var('SNOWFLAKE_DATABASE') }}"
|
||||
warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}"
|
||||
schema: "{{ env_var('SNOWFLAKE_SCHEMA', 'dbt_project_evaluator_integration_tests_snowflake') }}"
|
||||
threads: 10
|
||||
|
||||
databricks:
|
||||
type: databricks
|
||||
schema: "{{ env_var('DATABRICKS_SCHEMA', 'dbt_project_evaluator_integration_tests_databricks') }}"
|
||||
host: "{{ env_var('DATABRICKS_HOST') }}"
|
||||
http_path: "{{ env_var('DATABRICKS_HTTP_PATH') }}"
|
||||
token: "{{ env_var('DBT_ENV_SECRET_DATABRICKS_TOKEN') }}"
|
||||
threads: 10
|
||||
|
||||
duckdb:
|
||||
type: duckdb
|
||||
path: ./duck.db
|
||||
|
||||
trino:
|
||||
type: trino
|
||||
host: "{{ env_var('TRINO_TEST_HOST') }}"
|
||||
port: "{{ env_var('TRINO_TEST_PORT') | as_number }}"
|
||||
method: ldap
|
||||
user: "{{ env_var('TRINO_TEST_USER') }}"
|
||||
password: "{{ env_var('TRINO_TEST_PASS') }}"
|
||||
catalog: "{{ env_var('TRINO_TEST_CATALOG_NAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_trino
|
||||
threads: 5
|
||||
session_properties:
|
||||
query_max_stage_count: 275
|
||||
|
||||
clickhouse:
|
||||
type: clickhouse
|
||||
host: "{{ env_var('CLICKHOUSE_TEST_HOST') }}"
|
||||
port: "{{ env_var('CLICKHOUSE_TEST_PORT') | as_number }}"
|
||||
user: "{{ env_var('CLICKHOUSE_TEST_USER') }}"
|
||||
password: "{{ env_var('CLICKHOUSE_TEST_PASS') }}"
|
||||
dbname: "{{ env_var('CLICKHOUSE_TEST_DBNAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_clickhouse
|
||||
threads: 5
|
||||
@@ -8,7 +8,7 @@ seeds:
|
||||
compare_model: ref('fct_multiple_sources_joined')
|
||||
compare_columns:
|
||||
- child
|
||||
- source_parents
|
||||
- "{{ 'source_parents' if target.type != 'databricks' else 'child' }}"
|
||||
|
||||
- name: test_fct_direct_join_to_source
|
||||
data_tests:
|
||||
@@ -49,7 +49,7 @@ seeds:
|
||||
compare_model: ref('fct_source_fanout')
|
||||
compare_columns:
|
||||
- parent
|
||||
- model_children
|
||||
- "{{ 'model_children' if target.type != 'databricks' else 'parent' }}"
|
||||
|
||||
- name: test_fct_model_fanout
|
||||
data_tests:
|
||||
@@ -59,7 +59,7 @@ seeds:
|
||||
compare_columns:
|
||||
- parent
|
||||
- parent_model_type
|
||||
- leaf_children
|
||||
- "{{ 'leaf_children' if target.type != 'databricks' else 'parent_model_type' }}"
|
||||
|
||||
- name: test_fct_staging_dependent_on_staging
|
||||
data_tests:
|
||||
|
||||
@@ -18,7 +18,7 @@ seeds:
|
||||
compare_columns:
|
||||
- resource_name
|
||||
- model_type
|
||||
- appropriate_prefixes
|
||||
- "{{ 'appropriate_prefixes' if target.type != 'databricks' else 'model_type' }}"
|
||||
- name: test_fct_source_directories
|
||||
data_tests:
|
||||
- dbt_utils.equality:
|
||||
|
||||
84
integration_tests_2/profiles.yml
Normal file
84
integration_tests_2/profiles.yml
Normal file
@@ -0,0 +1,84 @@
|
||||
|
||||
# HEY! This file is used in the integration tests with CI.
|
||||
# You should __NEVER__ check credentials into version control. Thanks for reading :)
|
||||
|
||||
config:
|
||||
send_anonymous_usage_stats: False
|
||||
use_colors: True
|
||||
|
||||
integration_tests:
|
||||
target: postgres
|
||||
outputs:
|
||||
postgres:
|
||||
type: postgres
|
||||
host: "{{ env_var('POSTGRES_HOST') }}"
|
||||
user: "{{ env_var('POSTGRES_USER') }}"
|
||||
pass: "{{ env_var('DBT_ENV_SECRET_POSTGRES_PASS') }}"
|
||||
port: "{{ env_var('POSTGRES_PORT') | as_number }}"
|
||||
dbname: "{{ env_var('POSTGRES_DATABASE') }}"
|
||||
schema: "{{ env_var('POSTGRES_SCHEMA', 'dbt_project_evaluator_integration_tests_postgres') }}"
|
||||
threads: 5
|
||||
|
||||
redshift:
|
||||
type: redshift
|
||||
host: "{{ env_var('REDSHIFT_HOST') }}"
|
||||
user: "{{ env_var('REDSHIFT_USER') }}"
|
||||
pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}"
|
||||
dbname: "{{ env_var('REDSHIFT_DATABASE') }}"
|
||||
port: "{{ env_var('REDSHIFT_PORT') | as_number }}"
|
||||
schema: "{{ env_var('REDSHIFT_SCHEMA', 'dbt_project_evaluator_integration_tests_redshift') }}"
|
||||
threads: 5
|
||||
|
||||
bigquery:
|
||||
type: bigquery
|
||||
method: service-account-json
|
||||
keyfile_json: "{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native }}"
|
||||
project: "{{ env_var('BIGQUERY_PROJECT') }}"
|
||||
dataset: "{{ env_var('BIGQUERY_SCHEMA', 'dbt_project_evaluator_integration_tests_bigquery') }}"
|
||||
threads: 10
|
||||
|
||||
snowflake:
|
||||
type: snowflake
|
||||
account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}"
|
||||
user: "{{ env_var('SNOWFLAKE_USER') }}"
|
||||
password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}"
|
||||
role: "{{ env_var('SNOWFLAKE_ROLE') }}"
|
||||
database: "{{ env_var('SNOWFLAKE_DATABASE') }}"
|
||||
warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}"
|
||||
schema: "{{ env_var('SNOWFLAKE_SCHEMA', 'dbt_project_evaluator_integration_tests_snowflake') }}"
|
||||
threads: 10
|
||||
|
||||
databricks:
|
||||
type: databricks
|
||||
schema: "{{ env_var('DATABRICKS_SCHEMA', 'dbt_project_evaluator_integration_tests_databricks') }}"
|
||||
host: "{{ env_var('DATABRICKS_HOST') }}"
|
||||
http_path: "{{ env_var('DATABRICKS_HTTP_PATH') }}"
|
||||
token: "{{ env_var('DBT_ENV_SECRET_DATABRICKS_TOKEN') }}"
|
||||
threads: 10
|
||||
|
||||
duckdb:
|
||||
type: duckdb
|
||||
path: ./duck.db
|
||||
|
||||
trino:
|
||||
type: trino
|
||||
host: "{{ env_var('TRINO_TEST_HOST') }}"
|
||||
port: "{{ env_var('TRINO_TEST_PORT') | as_number }}"
|
||||
method: ldap
|
||||
user: "{{ env_var('TRINO_TEST_USER') }}"
|
||||
password: "{{ env_var('TRINO_TEST_PASS') }}"
|
||||
catalog: "{{ env_var('TRINO_TEST_CATALOG_NAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_trino
|
||||
threads: 5
|
||||
session_properties:
|
||||
query_max_stage_count: 275
|
||||
|
||||
clickhouse:
|
||||
type: clickhouse
|
||||
host: "{{ env_var('CLICKHOUSE_TEST_HOST') }}"
|
||||
port: "{{ env_var('CLICKHOUSE_TEST_PORT') | as_number }}"
|
||||
user: "{{ env_var('CLICKHOUSE_TEST_USER') }}"
|
||||
password: "{{ env_var('CLICKHOUSE_TEST_PASS') }}"
|
||||
dbname: "{{ env_var('CLICKHOUSE_TEST_DBNAME') }}"
|
||||
schema: dbt_project_evaluator_integration_tests_clickhouse
|
||||
threads: 5
|
||||
@@ -2,12 +2,18 @@
|
||||
|
||||
{# flatten the sets of permissable primary key test sets to one level for later iteration #}
|
||||
{%- set test_macro_list = [] %}
|
||||
{%- set test_macro_names_list = [] %}
|
||||
{%- for test_set in var('primary_key_test_macros') -%}
|
||||
{%- for test in test_set %}
|
||||
{%- do test_macro_list.append(test) -%}
|
||||
{%- do test_macro_list.append(test) -%}
|
||||
{%- endfor %}
|
||||
{%- endfor -%}
|
||||
{%- do test_macro_list.append("dbt.test_unique") -%}
|
||||
{% for test in test_macro_list %}
|
||||
{%- do test_macro_names_list.append(test.split('.')[1]) -%}
|
||||
{%- endfor -%}
|
||||
{%- if "test_unique" not in test_macro_names_list -%}
|
||||
{%- do test_macro_list.append("dbt.test_unique") -%}
|
||||
{%- endif -%}
|
||||
{%- set test_macro_set = set_strict(test_macro_list) -%}
|
||||
|
||||
{%- set quoted_directory_pattern = wrap_string_with_quotes(get_directory_pattern()) %}
|
||||
|
||||
@@ -13,12 +13,6 @@ fi
|
||||
. $VENV
|
||||
|
||||
cd integration_tests
|
||||
|
||||
if [[ ! -e ~/.dbt/profiles.yml ]]; then
|
||||
mkdir -p ~/.dbt
|
||||
cp ci/sample.profiles.yml ~/.dbt/profiles.yml
|
||||
fi
|
||||
|
||||
dbt deps --target $1 || exit 1
|
||||
dbt build -x --target $1 --full-refresh || exit 1
|
||||
|
||||
|
||||
12
run_tox_tests.sh
Executable file
12
run_tox_tests.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "Running tests for the first project"
|
||||
cd integration_tests
|
||||
dbt deps --target $1 || exit 1
|
||||
dbt build -x --target $1 --full-refresh || exit 1
|
||||
|
||||
echo "Running tests for the second project"
|
||||
cd ../integration_tests_2
|
||||
dbt deps --target $1 || exit 1
|
||||
dbt seed --full-refresh --target $1 || exit 1
|
||||
dbt run -x --target $1 --full-refresh || exit 1
|
||||
1
supported_adapters.env
Normal file
1
supported_adapters.env
Normal file
@@ -0,0 +1 @@
|
||||
SUPPORTED_ADAPTERS=snowflake,bigquery,redshift,databricks
|
||||
88
tox.ini
Normal file
88
tox.ini
Normal file
@@ -0,0 +1,88 @@
|
||||
[tox]
|
||||
skipsdist = True
|
||||
envlist = lint_all, testenv
|
||||
|
||||
[testenv]
|
||||
passenv =
|
||||
# postgres env vars
|
||||
POSTGRES_HOST
|
||||
POSTGRES_USER
|
||||
DBT_ENV_SECRET_POSTGRES_PASS
|
||||
POSTGRES_PORT
|
||||
POSTGRES_DATABASE
|
||||
POSTGRES_SCHEMA
|
||||
# snowflake env vars
|
||||
SNOWFLAKE_ACCOUNT
|
||||
SNOWFLAKE_USER
|
||||
DBT_ENV_SECRET_SNOWFLAKE_PASS
|
||||
SNOWFLAKE_ROLE
|
||||
SNOWFLAKE_DATABASE
|
||||
SNOWFLAKE_WAREHOUSE
|
||||
SNOWFLAKE_SCHEMA
|
||||
# redshift
|
||||
REDSHIFT_HOST
|
||||
REDSHIFT_USER
|
||||
DBT_ENV_SECRET_REDSHIFT_PASS
|
||||
REDSHIFT_DATABASE
|
||||
REDSHIFT_SCHEMA
|
||||
REDSHIFT_PORT
|
||||
# bigquery
|
||||
BIGQUERY_PROJECT
|
||||
BIGQUERY_KEYFILE_JSON
|
||||
BIGQUERY_SCHEMA
|
||||
# databricks
|
||||
DATABRICKS_SCHEMA
|
||||
DATABRICKS_HOST
|
||||
DATABRICKS_HTTP_PATH
|
||||
DBT_ENV_SECRET_DATABRICKS_TOKEN
|
||||
|
||||
# Snowflake integration tests for centralized dbt testing
|
||||
# run dbt commands directly, assumes dbt is already installed in environment
|
||||
[testenv:dbt_integration_snowflake]
|
||||
allowlist_externals =
|
||||
bash
|
||||
skip_install = true
|
||||
commands =
|
||||
bash ./run_tox_tests.sh snowflake
|
||||
|
||||
# Postgres integration tests for centralized dbt testing
|
||||
# run dbt commands directly, assumes dbt is already installed in environment
|
||||
[testenv:dbt_integration_postgres]
|
||||
allowlist_externals =
|
||||
bash
|
||||
skip_install = true
|
||||
commands =
|
||||
bash ./run_tox_tests.sh postgres
|
||||
|
||||
# BigQuery integration tests for centralized dbt testing
|
||||
# run dbt commands directly, assumes dbt is already installed in environment
|
||||
[testenv:dbt_integration_bigquery]
|
||||
allowlist_externals =
|
||||
bash
|
||||
skip_install = true
|
||||
commands =
|
||||
bash ./run_tox_tests.sh bigquery
|
||||
|
||||
# redshift integration tests for centralized dbt testing
|
||||
# run dbt commands directly, assumes dbt is already installed in environment
|
||||
[testenv:dbt_integration_redshift]
|
||||
allowlist_externals =
|
||||
bash
|
||||
skip_install = true
|
||||
commands =
|
||||
bash ./run_tox_tests.sh redshift
|
||||
|
||||
# note that duckdb is not a supported dbt target for dbt Cloud testing
|
||||
[testenv:dbt_integration_duckdb]
|
||||
allowlist_externals =
|
||||
bash
|
||||
skip_install = true
|
||||
commands =
|
||||
bash ./run_tox_tests.sh duckdb
|
||||
|
||||
[testenv:dbt_integration_databricks]
|
||||
allowlist_externals =
|
||||
bash
|
||||
skip_install = true
|
||||
commands =
|
||||
bash ./run_tox_tests.sh databricks
|
||||
Reference in New Issue
Block a user