mirror of
https://github.com/dlt-hub/dlt.git
synced 2025-12-17 19:31:30 +00:00
* add python 3.12 linting * update locked versions to make project installable on py 3.12 * update flake8 * downgrade poetry for all tests relying on python3.8 * drop python 3.8 * enable python3.13 * copy test updates from python3.13 branch * update locked sentry version * pin poetry to 1.8.5 * install ibis outside of poetry * rename to workflows for consistency * switch to published alpha version of dlt-pendulum for python 3.13 * fix images * add note to readme
184 lines
6.7 KiB
YAML
184 lines
6.7 KiB
YAML
name: common | common
|
|
|
|
on:
|
|
pull_request:
|
|
branches:
|
|
- master
|
|
- devel
|
|
workflow_dispatch:
|
|
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
cancel-in-progress: true
|
|
|
|
env:
|
|
RUNTIME__LOG_LEVEL: ERROR
|
|
RUNTIME__DLTHUB_TELEMETRY_ENDPOINT: ${{ secrets.RUNTIME__DLTHUB_TELEMETRY_ENDPOINT }}
|
|
|
|
# we need the secrets only for the rest_api_pipeline tests which are in tests/sources
|
|
# so we inject them only at the end
|
|
SOURCES__GITHUB__ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
# and also for the github_api_pipeline tests
|
|
SOURCES__GITHUB_API_PIPELINE__ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
|
|
jobs:
|
|
get_docs_changes:
|
|
name: docs changes
|
|
uses: ./.github/workflows/get_docs_changes.yml
|
|
|
|
run_common:
|
|
name: test
|
|
needs: get_docs_changes
|
|
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true'
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
|
|
python-version: ["3.11.x"]
|
|
# Test all python versions on ubuntu only
|
|
include:
|
|
- python-version: "3.9.x"
|
|
os: "ubuntu-latest"
|
|
- python-version: "3.10.x"
|
|
os: "ubuntu-latest"
|
|
- python-version: "3.12.x"
|
|
os: "ubuntu-latest"
|
|
- python-version: "3.13.x"
|
|
os: "ubuntu-latest"
|
|
- python-version: "3.13.x"
|
|
os: "windows-latest"
|
|
|
|
defaults:
|
|
run:
|
|
shell: bash
|
|
runs-on: ${{ matrix.os }}
|
|
|
|
steps:
|
|
- name: Check out
|
|
uses: actions/checkout@master
|
|
|
|
- name: Setup Python
|
|
uses: actions/setup-python@v4
|
|
with:
|
|
python-version: ${{ matrix.python-version }}
|
|
|
|
- name: Install tzdata on windows
|
|
run: |
|
|
cd %USERPROFILE%
|
|
curl https://data.iana.org/time-zones/releases/tzdata2021e.tar.gz --output tzdata.tar.gz
|
|
mkdir tzdata
|
|
tar --extract --file tzdata.tar.gz --directory tzdata
|
|
mkdir %USERPROFILE%\Downloads\tzdata
|
|
copy tzdata %USERPROFILE%\Downloads\tzdata
|
|
curl https://raw.githubusercontent.com/unicode-org/cldr/master/common/supplemental/windowsZones.xml --output %USERPROFILE%\Downloads\tzdata\windowsZones.xml
|
|
if: runner.os == 'Windows'
|
|
shell: cmd
|
|
|
|
- name: Install Poetry
|
|
# https://github.com/snok/install-poetry#running-on-windows
|
|
uses: snok/install-poetry@v1.3.2
|
|
with:
|
|
virtualenvs-create: true
|
|
virtualenvs-in-project: true
|
|
installer-parallel: true
|
|
version: 1.8.5
|
|
|
|
# NOTE: do not cache. we want to have a clean state each run and we upgrade depdendencies later
|
|
# - name: Load cached venv
|
|
# id: cached-poetry-dependencies
|
|
# uses: actions/cache@v3
|
|
# with:
|
|
# # path: ${{ steps.pip-cache.outputs.dir }}
|
|
# path: .venv
|
|
# key: venv-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}
|
|
|
|
- name: Install dependencies
|
|
run: poetry install --no-interaction --with sentry-sdk
|
|
|
|
- run: |
|
|
poetry run pytest tests/common tests/normalize tests/reflection tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py
|
|
if: runner.os != 'Windows'
|
|
name: Run common tests with minimum dependencies Linux/MAC
|
|
- run: |
|
|
poetry run pytest tests/common tests/normalize tests/reflection tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py -m "not forked"
|
|
if: runner.os == 'Windows'
|
|
name: Run common tests with minimum dependencies Windows
|
|
shell: cmd
|
|
|
|
- name: Install duckdb dependencies
|
|
run: poetry install --no-interaction -E duckdb --with sentry-sdk
|
|
|
|
- run: |
|
|
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py
|
|
if: runner.os != 'Windows'
|
|
name: Run pipeline smoke tests with minimum deps Linux/MAC
|
|
- run: |
|
|
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py -m "not forked"
|
|
if: runner.os == 'Windows'
|
|
name: Run smoke tests with minimum deps Windows
|
|
shell: cmd
|
|
|
|
- name: Install pyarrow
|
|
run: poetry install --no-interaction -E duckdb -E cli -E parquet --with sentry-sdk
|
|
|
|
- run: |
|
|
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow
|
|
if: runner.os != 'Windows'
|
|
name: Run pipeline tests with pyarrow but no pandas installed
|
|
- run: |
|
|
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow -m "not forked"
|
|
if: runner.os == 'Windows'
|
|
name: Run pipeline tests with pyarrow but no pandas installed Windows
|
|
shell: cmd
|
|
|
|
- name: Install pipeline and sources dependencies
|
|
run: poetry install --no-interaction -E duckdb -E cli -E parquet -E deltalake -E sql_database --with sentry-sdk,pipeline,sources
|
|
|
|
- run: |
|
|
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources
|
|
if: runner.os != 'Windows'
|
|
name: Run extract and pipeline tests Linux/MAC
|
|
- run: |
|
|
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources -m "not forked"
|
|
if: runner.os == 'Windows'
|
|
name: Run extract tests Windows
|
|
shell: cmd
|
|
|
|
# here we upgrade sql alchemy to 2 an run the sql_database tests again
|
|
- name: Upgrade sql alchemy
|
|
run: poetry run pip install sqlalchemy==2.0.32
|
|
|
|
- run: |
|
|
poetry run pytest tests/sources/sql_database
|
|
if: runner.os != 'Windows'
|
|
name: Run extract and pipeline tests Linux/MAC
|
|
- run: |
|
|
poetry run pytest tests/sources/sql_database
|
|
if: runner.os == 'Windows'
|
|
name: Run extract tests Windows
|
|
shell: cmd
|
|
|
|
# - name: Install Pydantic 1.0
|
|
# run: pip install "pydantic<2"
|
|
|
|
# - run: |
|
|
# poetry run pytest tests/libs
|
|
# if: runner.os != 'Windows'
|
|
# name: Run extract and pipeline tests Linux/MAC
|
|
# - run: |
|
|
# poetry run pytest tests/libs
|
|
# if: runner.os == 'Windows'
|
|
# name: Run extract tests Windows
|
|
# shell: cmd
|
|
|
|
matrix_job_required_check:
|
|
name: common | common tests
|
|
needs: run_common
|
|
runs-on: ubuntu-latest
|
|
if: always()
|
|
steps:
|
|
- name: Check matrix job results
|
|
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')
|
|
run: |
|
|
echo "One or more matrix job tests failed or were cancelled. You may need to re-run them." && exit 1
|