Files
Emily Rockman 450eb1cad0 oops
2024-11-13 11:40:26 -06:00

122 lines
3.0 KiB
YAML

version: 2.1
jobs:
integration-postgres:
docker:
- image: cimg/python:3.9.9
- image: circleci/postgres:9.6.5-alpine-ram
steps:
- checkout
- run:
name: "Run Tests - Postgres"
environment:
POSTGRES_HOST: localhost
POSTGRES_USER: root
DBT_ENV_SECRET_POSTGRES_PASS: ''
POSTGRES_PORT: 5432
POSTGRES_DATABASE: circle_test
command: ./run_test.sh postgres
- store_artifacts:
path: ./integration_tests/logs
integration-duckdb:
docker:
- image: cimg/python:3.9.9
steps:
- checkout
- run:
name: "Run Tests - DuckDB"
command: ./run_test.sh duckdb
- store_artifacts:
path: ./integration_tests/logs
integration-redshift:
docker:
- image: cimg/python:3.9.9
steps:
- checkout
- run:
name: "Run Tests - Redshift"
command: ./run_test.sh redshift
- store_artifacts:
path: ./integration_tests/logs
integration-snowflake:
docker:
- image: cimg/python:3.9.9
steps:
- checkout
- run:
name: "Run Tests - Snowflake"
command: ./run_test.sh snowflake
- store_artifacts:
path: ./integration_tests/logs
integration-bigquery:
environment:
BIGQUERY_SERVICE_KEY_PATH: "/home/circleci/bigquery-service-key.json"
docker:
- image: cimg/python:3.9.9
steps:
- checkout
- run:
name: "Set up credentials"
command: echo $BIGQUERY_SERVICE_ACCOUNT_JSON > ${HOME}/bigquery-service-key.json
- run:
name: "Run Tests - BigQuery"
command: ./run_test.sh bigquery
- store_artifacts:
path: ./integration_tests/logs
integration-databricks:
docker:
- image: cimg/python:3.9.9
steps:
- checkout
- run:
name: "Run Tests - Databricks"
command: ./run_test.sh databricks
- store_artifacts:
path: ./integration_tests/logs
integration-trino:
docker:
- image: cimg/python:3.9.9
steps:
- checkout
- run:
name: "Run Tests - Trino"
command: ./run_test.sh trino
- store_artifacts:
path: ./integration_tests/logs
workflows:
version: 2
test-all:
jobs:
- integration-postgres
- integration-duckdb
- integration-redshift:
context: profile-redshift
requires:
- integration-postgres
- integration-snowflake:
context: profile-snowflake
requires:
- integration-postgres
- integration-bigquery:
context: profile-bigquery
requires:
- integration-postgres
- integration-databricks:
context: profile-databricks
requires:
- integration-postgres
- integration-trino:
context: profile-trino
requires:
- integration-postgres