Compare commits

...

47 Commits

Author SHA1 Message Date
Kyle Wigley
5de72709d4 test w postgres change 2021-08-20 16:27:27 -04:00
Kyle Wigley
9171ab59d5 non python code change test 2021-08-20 16:27:26 -04:00
Kyle Wigley
d4ca0db74e handle case when no changes require integration tests 2021-08-20 16:10:01 -04:00
Kyle Wigley
6db2ba7957 update matrix for all event triggers 2021-08-20 14:33:47 -04:00
Kyle Wigley
016a17f580 revert bumping num of proc 2021-08-20 09:50:56 -04:00
Kyle Wigley
55d9da3574 test bump num of proc 2021-08-20 09:25:46 -04:00
Kyle Wigley
14ec3fe7d8 slim down push matrix, there are a lot of jobs 2021-08-20 09:07:43 -04:00
Kyle Wigley
a9a0ae020a pass opts to pytest 2021-08-20 01:15:48 -04:00
Kyle Wigley
6e8d5414ef align naming 2021-08-20 01:04:00 -04:00
Kyle Wigley
4dd8447a2f finishing touches 2021-08-20 01:02:22 -04:00
Kyle Wigley
a3ceb7fe3d update label logic 2021-08-20 00:33:00 -04:00
Kyle Wigley
fc027775a0 test dynamic matrix generation 2021-08-20 00:27:59 -04:00
Kyle Wigley
7d352c1c85 testing push event 2021-08-19 09:54:48 -04:00
Kyle Wigley
5d21fb5be7 rm branches-ignore filter from pull request trigger 2021-08-19 09:54:48 -04:00
Kyle Wigley
60088f500a split up lint and unit workflows for clear resposibilites 2021-08-19 09:54:48 -04:00
Kyle Wigley
aeaf18ed1f fix path to cli tool 2021-08-19 09:54:47 -04:00
Kyle Wigley
bb67e33d75 test pull_request_target action 2021-08-19 09:54:47 -04:00
Kyle Wigley
eab3394eda more review feedback 2021-08-19 09:54:47 -04:00
Kyle Wigley
9eeb298e6c review feedback: rm unnecessary changes 2021-08-19 09:54:47 -04:00
Kyle Wigley
74edaa677b review feedback: add reasoning for not using secrets in workflow 2021-08-19 09:54:47 -04:00
Kyle Wigley
6a73374e83 review feeback: update order of script for setting up postgres on macos runner 2021-08-19 09:54:46 -04:00
Kyle Wigley
09c26566f3 rm auto formatting changes 2021-08-19 09:54:46 -04:00
Kyle Wigley
91e5ae774d fix build checks 2021-08-19 09:54:46 -04:00
Kyle Wigley
f30abc8c19 updates 2021-08-19 09:54:46 -04:00
Kyle Wigley
2c6f0751f0 dev review feedback 2021-08-19 09:54:46 -04:00
Kyle Wigley
23dab9a068 cosmetic updates 2021-08-19 09:54:45 -04:00
Kyle Wigley
bdbd2fc48e temp commit to test 2021-08-19 09:54:45 -04:00
Kyle Wigley
fa1b9c25ed create actions for common code 2021-08-19 09:54:45 -04:00
Kyle Wigley
43aa3437c5 Update .github/workflows/main.yml 2021-08-19 09:54:45 -04:00
Kyle Wigley
1514a243ca clean up todo 2021-08-19 09:54:45 -04:00
Kyle Wigley
e84c78d39f add changelog 2021-08-19 09:54:44 -04:00
Kyle Wigley
6d27f158ac turn off python3.9 unit tests 2021-08-19 09:54:44 -04:00
Kyle Wigley
70a9201997 update descriptions 2021-08-19 09:54:44 -04:00
Kyle Wigley
0e8fafa0ef set up postgres for other OS 2021-08-19 09:54:44 -04:00
Kyle Wigley
cf5bc834b1 has changes 2021-08-19 09:54:44 -04:00
Kyle Wigley
1ee5d51660 test slim ci 2021-08-19 09:54:43 -04:00
Kyle Wigley
df7f492cf0 fix build workflow 2021-08-19 09:54:43 -04:00
Kyle Wigley
a276b47e4e fix random 'Cannot track experimental parser info when active user is None' error 2021-08-19 09:54:43 -04:00
Kyle Wigley
073233e9e9 add concurrency 2021-08-19 09:54:43 -04:00
Kyle Wigley
74032668bb fix windows build 2021-08-19 09:54:43 -04:00
Kyle Wigley
444b3b20bd fix sh script for building dists 2021-08-19 09:54:42 -04:00
Kyle Wigley
761cf94815 formatting 2021-08-19 09:54:42 -04:00
Kyle Wigley
378e4f0ce2 nvm revert 2021-08-19 09:54:42 -04:00
Kyle Wigley
fde156da0c test actions in same repo 2021-08-19 09:54:42 -04:00
Kyle Wigley
920702a5ab try this again 2021-08-19 09:54:42 -04:00
Kyle Wigley
fa82ebab73 test test 2021-08-19 09:54:41 -04:00
Kyle Wigley
2364f2f65b test 2021-08-19 09:54:41 -04:00
27 changed files with 667 additions and 535 deletions

View File

@@ -1,101 +0,0 @@
version: 2.1
jobs:
build-wheels:
docker: &test_only
- image: fishtownanalytics/test-container:12
environment:
DBT_INVOCATION_ENV: circle
DOCKER_TEST_DATABASE_HOST: "database"
TOX_PARALLEL_NO_SPINNER: 1
steps:
- checkout
- run:
name: Build wheels
command: |
python3.8 -m venv "${PYTHON_ENV}"
export PYTHON_BIN="${PYTHON_ENV}/bin/python"
$PYTHON_BIN -m pip install -U pip setuptools
$PYTHON_BIN -m pip install -r requirements.txt
$PYTHON_BIN -m pip install -r dev-requirements.txt
/bin/bash ./scripts/build-wheels.sh
$PYTHON_BIN ./scripts/collect-dbt-contexts.py > ./dist/context_metadata.json
$PYTHON_BIN ./scripts/collect-artifact-schema.py > ./dist/artifact_schemas.json
environment:
PYTHON_ENV: /home/tox/build_venv/
- store_artifacts:
path: ./dist
destination: dist
integration-postgres:
docker:
- image: fishtownanalytics/test-container:12
environment:
DBT_INVOCATION_ENV: circle
DOCKER_TEST_DATABASE_HOST: "database"
TOX_PARALLEL_NO_SPINNER: 1
- image: postgres
name: database
environment:
POSTGRES_USER: "root"
POSTGRES_PASSWORD: "password"
POSTGRES_DB: "dbt"
steps:
- checkout
- run:
name: Setup postgres
command: bash test/setup_db.sh
environment:
PGHOST: database
PGUSER: root
PGPASSWORD: password
PGDATABASE: postgres
- run:
name: Postgres integration tests
command: tox -p -e py36-postgres,py38-postgres -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
integration-snowflake:
docker: *test_only
steps:
- checkout
- run:
name: Snowflake integration tests
command: tox -p -e py36-snowflake,py38-snowflake -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
integration-redshift:
docker: *test_only
steps:
- checkout
- run:
name: Redshift integration tests
command: tox -p -e py36-redshift,py38-redshift -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
integration-bigquery:
docker: *test_only
steps:
- checkout
- run:
name: Bigquery integration test
command: tox -p -e py36-bigquery,py38-bigquery -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
workflows:
version: 2
test-everything:
jobs:
- integration-postgres
- integration-redshift
- integration-bigquery
- integration-snowflake
- build-wheels:
requires:
- integration-postgres
- integration-redshift
- integration-bigquery
- integration-snowflake

View File

@@ -0,0 +1,10 @@
name: "Set up postgres (linux)"
description: "Set up postgres service on linux vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: bash
run: |
sudo systemctl start postgresql.service
pg_isready
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -0,0 +1,24 @@
name: "Set up postgres (macos)"
description: "Set up postgres service on macos vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: bash
run: |
brew services start postgresql
echo "Check PostgreSQL service is running"
i=10
COMMAND='pg_isready'
while [ $i -gt -1 ]; do
if [ $i == 0 ]; then
echo "PostgreSQL service not ready, all attempts exhausted"
exit 1
fi
echo "Check PostgreSQL service status"
eval $COMMAND && break
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
sleep 10
((i--))
done
createuser -s postgres
bash ${{ github.action_path }}/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -0,0 +1,12 @@
name: "Set up postgres (windows)"
description: "Set up postgres service on windows vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: pwsh
run: |
$pgService = Get-Service -Name postgresql*
Set-Service -InputObject $pgService -Status running -StartupType automatic
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
$env:Path += ";$env:PGBIN"
bash ${{ github.action_path }}/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -9,14 +9,13 @@ resolves #
resolves #1234
-->
### Description
<!--- Describe the Pull Request here -->
### Checklist
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
- [ ] I have run this code in development and it appears to resolve the stated issue
- [ ] This PR includes tests, or tests are not required/relevant for this PR
- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section.
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
- [ ] I have run this code in development and it appears to resolve the stated issue
- [ ] This PR includes tests, or tests are not required/relevant for this PR
- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section.

View File

@@ -0,0 +1,90 @@
module.exports = ({ context }) => {
const defaultPythonVersion = "3.8";
const supportedPythonVersions = ["3.6", "3.7", "3.8", "3.9"];
const supportedAdapters = ["snowflake", "postgres", "bigquery", "redshift"];
if (context.eventName.includes("pull_request")) {
// if PR, generate matrix based on files changed and PR labels
const changes = JSON.parse(process.env.CHANGES);
const labels = context.payload.pull_request.labels.map(({ name }) => name);
console.log("labels", labels);
console.log("changes", labels);
const testAllLabel = labels.includes("test all");
const include = [];
for (const adapter of supportedAdapters) {
if (
changes.includes(adapter) ||
testAllLabel ||
labels.includes(`test ${adapter}`)
) {
for (const pythonVersion of supportedPythonVersions) {
if (
pythonVersion === defaultPythonVersion ||
labels.includes(`test python${pythonVersion}`) ||
testAllLabel
) {
// always run tests on ubuntu by default
include.push({
os: "ubuntu-latest",
adapter,
"python-version": pythonVersion,
});
if (labels.includes("test windows") || testAllLabel) {
include.push({
os: "windows-latest",
adapter,
"python-version": pythonVersion,
});
}
if (labels.includes("test macos") || testAllLabel) {
include.push({
os: "macos-latest",
adapter,
"python-version": pythonVersion,
});
}
}
}
}
}
console.log("matrix", { include });
return {
include,
};
}
const include = [];
// run for all adapters and python versions on ubuntu
for (const adapter of supportedAdapters) {
for (const pythonVersion of supportedPythonVersions) {
include.push({
os: 'ubuntu-latest',
adapter: adapter,
"python-version": pythonVersion,
});
}
}
// additionally include runs for all adapters, on macos and windows,
// but only for the default python version
for (const adapter of supportedAdapters) {
for (const operatingSystem of ["windows-latest", "macos-latest"]) {
include.push({
os: operatingSystem,
adapter: adapter,
"python-version": defaultPythonVersion,
});
}
}
console.log("matrix", { include });
return {
include,
};
};

244
.github/workflows/integration.yml vendored Normal file
View File

@@ -0,0 +1,244 @@
# **what?**
# This workflow runs all integration tests for supported OS
# and python versions and core adapters. If triggered by PR,
# the workflow will only run tests for adapters related
# to code changes. Use the `test all` and `test ${adapter}`
# label to run all or additional tests. Use `ok to test`
# label to mark PRs from forked repositories that are safe
# to run integration tests for. Requires secrets to run
# against different warehouses.
# **why?**
# This checks the functionality of dbt from a user's perspective
# and attempts to catch functional regressions.
# **when?**
# This workflow will run on every push to a protected branch
# and when manually triggered. It will also run for all PRs, including
# PRs from forks. The workflow will be skipped until there is a label
# to mark the PR as safe to run.
name: Integration Tests
on:
push:
branches:
- "main"
- "develop"
- "*.latest"
- "releases/*"
- "github-actions-matrix" # TODO: rm after testing
pull_request_target:
workflow_dispatch:
permissions: read-all
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
test-metadata:
# run if not a PR from a forked repository or has a label to mark as safe to test
if: >-
github.event_name != 'pull_request_target' ||
github.event.pull_request.head.repo.full_name == github.repository ||
contains(github.event.pull_request.labels.*.name, 'ok to test')
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.generate-matrix.outputs.result }}
steps:
- name: Check out the repository (non-PR)
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Check out the repository (PR)
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.pull_request.head.sha }}
- name: Check if relevant files changed
# https://github.com/marketplace/actions/paths-changes-filter
uses: dorny/paths-filter@v2
id: get-changes
with:
token: ${{ secrets.GITHUB_TOKEN }}
filters: |
postgres:
- 'core/**'
- 'plugins/postgres/**'
snowflake:
- 'core/**'
- 'plugins/snowflake/**'
bigquery:
- 'core/**'
- 'plugins/bigquery/**'
redshift:
- 'core/**'
- 'plugins/redshift/**'
- 'plugins/postgres/**'
- name: Generate integration test matrix
id: generate-matrix
uses: actions/github-script@v4
env:
CHANGES: ${{ steps.get-changes.outputs.changes }}
with:
script: |
const script = require('./.github/scripts/integration-test-matrix.js')
const matrix = script({ context })
console.log(matrix)
return matrix
test:
name: ${{ matrix.adapter }} ${{ matrix.python-version }} / ${{ matrix.os }}
# run if not a PR from a forked repository or has a label to mark as safe to test
if: >-
needs.test-metadata.outputs.matrix &&
fromJSON( needs.test-metadata.outputs.matrix ).include[0] &&
(
github.event_name != 'pull_request_target' ||
github.event.pull_request.head.repo.full_name == github.repository ||
contains(github.event.pull_request.labels.*.name, 'ok to test')
)
runs-on: ${{ matrix.os }}
needs: test-metadata
# only block on non-postgres jobs, trying to decrease amount of concurrent processing against warehouses
concurrency: ${{ matrix.adapter != 'postgres' && matrix.adapter || github.job }}-${{ matrix.python-version }}-${{ matrix.os }}
strategy:
fail-fast: false
matrix: ${{ fromJSON(needs.test-metadata.outputs.matrix) }}
env:
TOXENV: integration-${{ matrix.adapter }}
PYTEST_ADDOPTS: "-v --color=yes -n4"
DBT_INVOCATION_ENV: github-actions
steps:
- name: Check out the repository
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Check out the repository (PR)
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Set up postgres (linux)
if: |
matrix.adapter == 'postgres' &&
runner.os == 'Linux'
uses: ./.github/actions/setup-postgres-linux
- name: Set up postgres (macos)
if: |
matrix.adapter == 'postgres' &&
runner.os == 'macOS'
uses: ./.github/actions/setup-postgres-macos
- name: Set up postgres (windows)
if: |
matrix.adapter == 'postgres' &&
runner.os == 'Windows'
uses: ./.github/actions/setup-postgres-windows
- name: Upgrade pip
run: |
python -m pip install --upgrade pip
pip --version
- name: Install tox
run: |
pip install tox
tox --version
- name: Run tox (postgres)
if: matrix.adapter == 'postgres'
run: tox
- name: Run tox (redshift)
if: matrix.adapter == 'redshift'
env:
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
run: tox
- name: Run tox (snowflake)
if: matrix.adapter == 'snowflake'
env:
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }}
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }}
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }}
SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }}
SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }}
SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
run: tox
- name: Run tox (bigquery)
if: matrix.adapter == 'bigquery'
env:
BIGQUERY_TEST_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_TEST_SERVICE_ACCOUNT_JSON }}
BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }}
run: tox
- uses: actions/upload-artifact@v2
with:
name: logs
path: ./logs
require-label-comment:
runs-on: ubuntu-latest
needs: test
permissions:
pull-requests: write
steps:
- name: Needs permission PR comment
if: >-
needs.test.result == 'skipped' &&
github.event_name == 'pull_request_target' &&
github.event.pull_request.head.repo.full_name != github.repository
uses: unsplash/comment-on-pr@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
msg: |
"You do not have permissions to run integration tests, @dbt-labs/core "\
"needs to label this PR with `ok to test` in order to run integration tests!"
check_for_duplicate_msg: true

223
.github/workflows/main.yml vendored Normal file
View File

@@ -0,0 +1,223 @@
# **what?**
# Runs code quality checks, unit tests, and verifies python build on
# all code commited to the repository. This workflow should not
# require any secrets since it runs for PRs from forked repos.
# By default, secrets are not passed to workflows running from
# a forked repo.
# **why?**
# Ensure code for dbt meets a certain quality standard.
# **when?**
# This will run for all PRs, when code is pushed to a release
# branch, and when manually triggered.
name: Main
on:
push:
branches:
- "main"
- "develop"
- "*.latest"
- "releases/*"
- "github-actions-matrix" # TODO: rm after testing
pull_request:
workflow_dispatch:
permissions: read-all
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
code-quality:
name: ${{ matrix.toxenv }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
toxenv: [flake8, mypy]
env:
TOXENV: ${{ matrix.toxenv }}
PYTEST_ADDOPTS: "-v --color=yes"
steps:
- name: Check out the repository
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Check out the repository
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python
uses: actions/setup-python@v2
- name: Upgrade python dependencies
run: |
python -m pip install --upgrade pip
pip --version
- name: Install tox
run: |
pip install tox
tox --version
- name: Run tox
run: tox
unit:
name: unit ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [3.6, 3.7, 3.8] # TODO: support unit testing for python 3.9 (https://github.com/dbt-labs/dbt/issues/3689)
env:
TOXENV: "unit"
PYTEST_ADDOPTS: "-v --color=yes"
steps:
- name: Check out the repository
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Check out the repository
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Upgrade python dependencies
run: |
python -m pip install --upgrade pip
pip --version
- name: Install tox
run: |
pip install tox
tox --version
- name: Run tox
run: tox
build:
name: build packages
runs-on: ubuntu-latest
steps:
- name: Check out the repository
if: github.event_name != 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Check out the repository
if: github.event_name == 'pull_request_target'
uses: actions/checkout@v2
with:
persist-credentials: false
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Upgrade python dependencies
run: |
python -m pip install --upgrade pip
pip --version
pip install --upgrade setuptools wheel twine check-wheel-contents
- name: Build distributions
run: ./scripts/build-dist.sh
- name: Show distributions
run: ls -lh dist/
- name: Check distribution descriptions
run: |
twine check dist/*
- name: Check wheel contents
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- uses: actions/upload-artifact@v2
with:
name: dist
path: dist/
test-build:
name: verify packages ${{ matrix.python-version }} / ${{ matrix.os }}
needs: build
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Upgrade python dependencies
run: |
python -m pip install --upgrade pip
- uses: actions/download-artifact@v2
with:
name: dist
path: dist/
- name: Show distributions
run: ls -lh dist/
- name: Install wheel distributions
run: |
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
- name: Check wheel distributions
run: |
dbt --version
- name: Install source distributions
run: |
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
- name: Check source distributions
run: |
dbt --version

View File

@@ -1,15 +1,13 @@
name: Performance Regression Testing
name: Performance Regression Tests
# Schedule triggers
on:
# runs twice a day at 10:05am and 10:05pm
schedule:
- cron: '5 10,22 * * *'
- cron: "5 10,22 * * *"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
# checks fmt of runner code
# purposefully not a dependency of any other job
# will block merging, but not prevent developing
@@ -83,7 +81,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.8'
python-version: "3.8"
- name: install dbt
run: pip install -r dev-requirements.txt -r editable-requirements.txt
- name: install hyperfine
@@ -116,11 +114,11 @@ jobs:
- name: checkout latest
uses: actions/checkout@v2
with:
ref: '0.20.latest'
ref: "0.20.latest"
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.8'
python-version: "3.8"
- name: move repo up a level
run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline
- name: "[debug] ls new dbt location"

View File

@@ -1,139 +0,0 @@
# This is a workflow to run our integration tests for windows and mac
name: dbt Tests
# Triggers
on:
# Triggers the workflow on push or pull request events and also adds a manual trigger
push:
branches:
- 'develop'
- '*.latest'
- 'releases/*'
pull_request:
branches:
- 'develop'
- '*.latest'
- 'pr/*'
- 'releases/*'
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
PostgresIntegrationTest:
runs-on: 'windows-latest' #TODO: Add Mac support
environment: 'Postgres'
steps:
- uses: actions/checkout@v2
- name: 'Install postgresql and set up database'
shell: pwsh
run: |
$serviceName = Get-Service -Name postgresql*
Set-Service -InputObject $serviceName -StartupType Automatic
Start-Service -InputObject $serviceName
& $env:PGBIN\createdb.exe -U postgres dbt
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD '$env:ROOT_PASSWORD';"
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;"
& $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;"
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD '$env:NOACCESS_PASSWORD' NOSUPERUSER;"
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;"
& $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;"
env:
ROOT_PASSWORD: ${{ secrets.ROOT_PASSWORD }}
NOACCESS_PASSWORD: ${{ secrets.NOACCESS_PASSWORD }}
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.7'
architecture: 'x64'
- name: 'Install dependencies'
run: python -m pip install --upgrade pip && pip install tox
- name: 'Run integration tests'
run: python -m tox -e py-postgres -- -v -n4
# These three are all similar except secure environment variables, which MUST be passed along to their tasks,
# but there's probably a better way to do this!
SnowflakeIntegrationTest:
strategy:
matrix:
os: [windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
environment: 'Snowflake'
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.7'
architecture: 'x64'
- name: 'Install dependencies'
run: python -m pip install --upgrade pip && pip install tox
- name: 'Run integration tests'
run: python -m tox -e py-snowflake -- -v -n4
env:
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }}
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }}
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }}
SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }}
SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }}
SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
BigQueryIntegrationTest:
strategy:
matrix:
os: [windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
environment: 'Bigquery'
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.7'
architecture: 'x64'
- name: 'Install dependencies'
run: python -m pip install --upgrade pip && pip install tox
- name: 'Run integration tests'
run: python -m tox -e py-bigquery -- -v -n4
env:
BIGQUERY_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_SERVICE_ACCOUNT_JSON }}
BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }}
RedshiftIntegrationTest:
strategy:
matrix:
os: [windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
environment: 'Redshift'
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.7'
architecture: 'x64'
- name: 'Install dependencies'
run: python -m pip install --upgrade pip && pip install tox
- name: 'Run integration tests'
run: python -m tox -e py-redshift -- -v -n4
env:
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}

View File

@@ -1,61 +0,0 @@
# This is a workflow to run our linting and unit tests for windows, mac, and linux
name: Linting and Unit Tests
# Triggers
on:
# Trigger on commits to develop and releases branches
push:
branches:
- 'develop'
- '*.latest'
- 'releases/*'
pull_request: # Trigger for all PRs
workflow_dispatch: # Allow manual triggers
jobs:
Linting:
runs-on: ubuntu-latest #no need to run on every OS
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.6'
architecture: 'x64'
- name: 'Install dependencies'
run: python -m pip install --upgrade pip && pip install tox
- name: 'Linting'
run: tox -e mypy,flake8 -- -v
UnitTest:
strategy:
matrix:
os: [windows-latest, ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
needs: Linting
steps:
- uses: actions/checkout@v2
- name: Setup Python 3.6
uses: actions/setup-python@v2.2.2
with:
python-version: '3.6'
architecture: 'x64'
- name: Setup Python 3.7
uses: actions/setup-python@v2.2.2
with:
python-version: '3.7'
architecture: 'x64'
- name: Setup Python 3.8
uses: actions/setup-python@v2.2.2
with:
python-version: '3.8'
architecture: 'x64'
- name: 'Install dependencies'
run: python -m pip install --upgrade pip && pip install tox
- name: 'Run unit tests'
run: tox -p -e py36,py37,py38

View File

@@ -14,8 +14,7 @@
- Add `build` RPC method, and a subset of flags for `build` task ([#3595](https://github.com/dbt-labs/dbt/issues/3595), [#3674](https://github.com/dbt-labs/dbt/pull/3674))
- Get more information on partial parsing version mismatches ([#3757](https://github.com/dbt-labs/dbt/issues/3757), [#3758](https://github.com/dbt-labs/dbt/pull/3758))
## dbt 0.21.0b1 (August 03, 2021)
- Use GitHub Actions for CI ([#3688](https://github.com/dbt-labs/dbt/issues/3688), [#3669](https://github.com/dbt-labs/dbt/pull/3669))
## dbt 0.21.0b1 (August 03, 2021)
@@ -59,18 +58,18 @@ Contributors:
- [@jmriego](https://github.com/jmriego) ([#3526](https://github.com/dbt-labs/dbt/pull/3526))
- [@danielefrigo](https://github.com/danielefrigo) ([#3547](https://github.com/dbt-labs/dbt/pull/3547))
## dbt 0.20.2 (Release TBD)
### Under the hood
- Switch to full reparse on partial parsing exceptions. Log and report exception information. ([#3725](https://github.com/dbt-labs/dbt/issues/3725), [#3733](https://github.com/dbt-labs/dbt/pull/3733))
- Better error handling for BigQuery job labels that are too long. [#3703](https://github.com/dbt-labs/dbt/pull/3703)
### Fixes
- Check for existence of test node when removing. ([#3711](https://github.com/dbt-labs/dbt/issues/3711), [#3750](https://github.com/dbt-labs/dbt/pull/3750))
- Fix bug in finding analysis nodes when applying analysis patch ([#3764](https://github.com/dbt-labs/dbt/issues/3764), [#3767](https://github.com/dbt-labs/dbt/pull/3767))
## dbt 0.20.1 (August 11, 2021)
## dbt 0.20.1rc1 (August 02, 2021)

View File

@@ -13,10 +13,6 @@
</a>
</p>
**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
![dbt architecture](https://raw.githubusercontent.com/dbt-labs/dbt/6c6649f9129d5d108aa3b0526f634cd8f3a9d1ed/etc/dbt-arch.png)
## Understanding dbt
Analysts using dbt can transform their data by simply writing select statements, while dbt handles turning these statements into tables and views in a data warehouse.

View File

@@ -1,154 +0,0 @@
# Python package
# Create and test a Python package on multiple Python versions.
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
trigger:
branches:
include:
- develop
- '*.latest'
- pr/*
jobs:
- job: UnitTest
pool:
vmImage: 'vs2017-win2016'
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
architecture: 'x64'
- script: python -m pip install --upgrade pip && pip install tox
displayName: 'Install dependencies'
- script: python -m tox -e py -- -v
displayName: Run unit tests
- job: PostgresIntegrationTest
pool:
vmImage: 'vs2017-win2016'
dependsOn: UnitTest
steps:
- pwsh: |
$serviceName = Get-Service -Name postgresql*
Set-Service -InputObject $serviceName -StartupType Automatic
Start-Service -InputObject $serviceName
& $env:PGBIN\createdb.exe -U postgres dbt
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD 'password';"
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;"
& $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;"
& $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;"
& $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;"
& $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;"
displayName: Install postgresql and set up database
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
architecture: 'x64'
- script: python -m pip install --upgrade pip && pip install tox
displayName: 'Install dependencies'
- script: python -m tox -e py-postgres -- -v -n4
displayName: Run integration tests
# These three are all similar except secure environment variables, which MUST be passed along to their tasks,
# but there's probably a better way to do this!
- job: SnowflakeIntegrationTest
pool:
vmImage: 'vs2017-win2016'
dependsOn: UnitTest
condition: succeeded()
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
architecture: 'x64'
- script: python -m pip install --upgrade pip && pip install tox
displayName: 'Install dependencies'
- script: python -m tox -e py-snowflake -- -v -n4
env:
SNOWFLAKE_TEST_ACCOUNT: $(SNOWFLAKE_TEST_ACCOUNT)
SNOWFLAKE_TEST_PASSWORD: $(SNOWFLAKE_TEST_PASSWORD)
SNOWFLAKE_TEST_USER: $(SNOWFLAKE_TEST_USER)
SNOWFLAKE_TEST_WAREHOUSE: $(SNOWFLAKE_TEST_WAREHOUSE)
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: $(SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN)
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: $(SNOWFLAKE_TEST_OAUTH_CLIENT_ID)
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: $(SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET)
displayName: Run integration tests
- job: BigQueryIntegrationTest
pool:
vmImage: 'vs2017-win2016'
dependsOn: UnitTest
condition: succeeded()
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
architecture: 'x64'
- script: python -m pip install --upgrade pip && pip install tox
displayName: 'Install dependencies'
- script: python -m tox -e py-bigquery -- -v -n4
env:
BIGQUERY_SERVICE_ACCOUNT_JSON: $(BIGQUERY_SERVICE_ACCOUNT_JSON)
displayName: Run integration tests
- job: RedshiftIntegrationTest
pool:
vmImage: 'vs2017-win2016'
dependsOn: UnitTest
condition: succeeded()
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
architecture: 'x64'
- script: python -m pip install --upgrade pip && pip install tox
displayName: 'Install dependencies'
- script: python -m tox -e py-redshift -- -v -n4
env:
REDSHIFT_TEST_DBNAME: $(REDSHIFT_TEST_DBNAME)
REDSHIFT_TEST_PASS: $(REDSHIFT_TEST_PASS)
REDSHIFT_TEST_USER: $(REDSHIFT_TEST_USER)
REDSHIFT_TEST_PORT: $(REDSHIFT_TEST_PORT)
REDSHIFT_TEST_HOST: $(REDSHIFT_TEST_HOST)
displayName: Run integration tests
- job: BuildWheel
pool:
vmImage: 'vs2017-win2016'
dependsOn:
- UnitTest
- PostgresIntegrationTest
- RedshiftIntegrationTest
- SnowflakeIntegrationTest
- BigQueryIntegrationTest
condition: succeeded()
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.7'
architecture: 'x64'
- script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r dev-requirements.txt
displayName: Install dependencies
- task: ShellScript@2
inputs:
scriptPath: scripts/build-wheels.sh
- task: CopyFiles@2
inputs:
contents: 'dist\?(*.whl|*.tar.gz)'
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: dists

4
core/cli.py Normal file
View File

@@ -0,0 +1,4 @@
from dbt.main import main
if __name__ == '__main__':
main()

View File

@@ -19,7 +19,7 @@ services:
GROUP_ID: ${GROUP_ID:-}
command: "/root/.virtualenvs/dbt/bin/pytest"
environment:
DOCKER_TEST_DATABASE_HOST: "database"
POSTGRES_TEST_HOST: "database"
volumes:
- .:/usr/app
working_dir: /usr/app

View File

@@ -50,6 +50,9 @@ with open(os.path.join(this_directory, 'README.md')) as f:
DBT_PSYCOPG2_NAME = _dbt_psycopg2_name()
setup(
name=package_name,
version=package_version,

View File

@@ -1,11 +1,12 @@
#!/bin/bash -eo pipefail
#!/bin/bash
set -eo pipefail
DBT_PATH="$( cd "$(dirname "$0")/.." ; pwd -P )"
PYTHON_BIN=${PYTHON_BIN:-python}
echo $SCRIPTPATH
echo $PYTHON_BIN
echo "$PYTHON_BIN"
set -x

View File

@@ -1,23 +0,0 @@
#!/bin/bash -eo pipefail
DBT_PATH="$( cd "$(dirname "$0")/.." ; pwd -P )"
echo $SCRIPTPATH
set -x
rm -rf "$DBT_PATH"/dist
mkdir -p "$DBT_PATH"/dist
for SUBPATH in core plugins/postgres plugins/redshift plugins/bigquery plugins/snowflake
do
rm -rf "$DBT_PATH"/"$SUBPATH"/dist
cd "$DBT_PATH"/"$SUBPATH"
python setup.py sdist
cp -r "$DBT_PATH"/"$SUBPATH"/dist/* "$DBT_PATH"/dist/
done
cd "$DBT_PATH"
python setup.py sdist
set +x

View File

@@ -12,9 +12,9 @@ SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN=
SNOWFLAKE_TEST_OAUTH_CLIENT_ID=
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET=
BIGQUERY_SERVICE_ACCOUNT_JSON=
BIGQUERY_TEST_SERVICE_ACCOUNT_JSON=
BIGQUERY_TEST_ALT_DATABASE=
BIGQUERY_POLICY_TAG=
BIGQUERY_TEST_POLICY_TAG=
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=

View File

@@ -142,7 +142,7 @@ class DBTIntegrationTest(unittest.TestCase):
@property
def database_host(self):
return os.environ.get('DOCKER_TEST_DATABASE_HOST', 'localhost')
return os.getenv('POSTGRES_TEST_HOST', 'localhost')
def postgres_profile(self):
return {
@@ -155,20 +155,20 @@ class DBTIntegrationTest(unittest.TestCase):
'type': 'postgres',
'threads': 4,
'host': self.database_host,
'port': 5432,
'user': 'root',
'pass': 'password',
'dbname': 'dbt',
'port': int(os.getenv('POSTGRES_TEST_PORT', 5432)),
'user': os.getenv('POSTGRES_TEST_USER', 'root'),
'pass': os.getenv('POSTGRES_TEST_PASS', 'password'),
'dbname': os.getenv('POSTGRES_TEST_DATABASE', 'dbt'),
'schema': self.unique_schema()
},
'noaccess': {
'type': 'postgres',
'threads': 4,
'host': self.database_host,
'port': 5432,
'port': int(os.getenv('POSTGRES_TEST_PORT', 5432)),
'user': 'noaccess',
'pass': 'password',
'dbname': 'dbt',
'dbname': os.getenv('POSTGRES_TEST_DATABASE', 'dbt'),
'schema': self.unique_schema()
}
},
@@ -244,7 +244,7 @@ class DBTIntegrationTest(unittest.TestCase):
}
def bigquery_profile(self):
credentials_json_str = os.getenv('BIGQUERY_SERVICE_ACCOUNT_JSON').replace("'", '')
credentials_json_str = os.getenv('BIGQUERY_TEST_SERVICE_ACCOUNT_JSON').replace("'", '')
credentials = json.loads(credentials_json_str)
project_id = credentials.get('project_id')

View File

@@ -64,7 +64,6 @@ def project_root(tmpdir):
return tmpdir.mkdir('project')
def postgres_profile_data(unique_schema):
database_host = os.environ.get('DOCKER_TEST_DATABASE_HOST', 'localhost')
return {
'config': {
@@ -75,21 +74,21 @@ def postgres_profile_data(unique_schema):
'default': {
'type': 'postgres',
'threads': 4,
'host': database_host,
'port': 5432,
'user': 'root',
'pass': 'password',
'dbname': 'dbt',
'host': os.environ.get('POSTGRES_TEST_HOST', 'localhost'),
'port': int(os.environ.get('POSTGRES_TEST_PORT', 5432)),
'user': os.environ.get('POSTGRES_TEST_USER', 'root'),
'pass': os.environ.get('POSTGRES_TEST_PASS', 'password'),
'dbname': os.environ.get('POSTGRES_TEST_DATABASE', 'dbt'),
'schema': unique_schema,
},
'other_schema': {
'type': 'postgres',
'threads': 4,
'host': database_host,
'port': 5432,
'user': 'root',
'pass': 'password',
'dbname': 'dbt',
'host': os.environ.get('POSTGRES_TEST_HOST', 'localhost'),
'port': int(os.environ.get('POSTGRES_TEST_PORT', 5432)),
'user': os.environ.get('POSTGRES_TEST_USER', 'root'),
'pass': os.environ.get('POSTGRES_TEST_PASS', 'password'),
'dbname': os.environ.get('POSTGRES_TEST_DATABASE', 'dbt'),
'schema': unique_schema+'_alt',
}
},

View File

@@ -7,6 +7,7 @@ import yaml
import dbt.flags
import dbt.parser
from dbt import tracking
from dbt.exceptions import CompilationException
from dbt.parser import (
ModelParser, MacroParser, DataTestParser, SchemaParser,
@@ -69,6 +70,9 @@ class BaseParserTest(unittest.TestCase):
def setUp(self):
dbt.flags.STRICT_MODE = True
dbt.flags.WARN_ERROR = True
# HACK: this is needed since tracking events can
# be sent when using the model parser
tracking.do_not_track()
self.maxDiff = None

23
tox.ini
View File

@@ -1,14 +1,6 @@
[tox]
skipsdist = True
envlist = py36,py37,py38,flake8,mypy
[testenv]
description = unit testing
skip_install = true
commands = {envpython} -m pytest {posargs} test/unit
deps =
-rdev-requirements.txt
-reditable-requirements.txt
envlist = py36,py37,py38,py39,flake8,mypy
[testenv:flake8]
description = flake8 code checks
@@ -32,10 +24,19 @@ deps =
-rdev-requirements.txt
-reditable-requirements.txt
[testenv:py{36,37,38,39,}-{postgres,redshift,snowflake,bigquery}]
[testenv:{unit,py36,py37,py38,py39,py}]
description = unit testing
skip_install = true
passenv = DBT_* PYTEST_ADDOPTS
commands = {envpython} -m pytest {posargs} test/unit
deps =
-rdev-requirements.txt
-reditable-requirements.txt
[testenv:{integration,py36,py37,py38,py39,py}-{postgres,redshift,snowflake,bigquery}]
description = adapter plugin integration testing
skip_install = true
passenv = *
passenv = DBT_* REDSHIFT_TEST_* BIGQUERY_TEST_* SNOWFLAKE_TEST_* POSTGRES_TEST_* PYTEST_ADDOPTS
commands =
postgres: {envpython} -m pytest {posargs} -m profile_postgres test/integration
postgres: {envpython} -m pytest {posargs} --profile=postgres test/rpc