Files
dlt/.github/workflows/test_destinations_local.yml
David Scharf 3ebbfa1f9e migrate to uv (#2766)
* move pyproject.toml and makefile from old branch and add inbetween changes

* update workflow files to use uv

* run new version of formatter

* fix building of images with uv

* possibly fix docs linting

* downgrade lancedb dependency to fix tests

* fix gcs compat mode for s3 for newest boto

* fix docstrings in examples

* add some uv constraints

* update readme.md and contributing.md and some other places

* allow duckdb 0.8 in range

* add link-mode copy to uv venv on windows

* remove poetry lockfile and unneeded lockfile checker

* fix chess api related failures

* sleep after dremio start..

* set correct package in pyproject

* Revert "add some uv constraints"

This reverts commit d611e9ecce.

# Conflicts:
#	pyproject.toml
#	uv.lock

* add missing databricks sql connector version bounds
2025-06-19 10:11:24 +02:00

213 lines
7.4 KiB
YAML

# Tests destinations that can run without credentials.
# i.e. local postgres, duckdb, filesystem (with local fs/memory bucket)
name: dest | postgres, duckdb, fs, weaviate, qdrant
on:
workflow_call:
workflow_dispatch:
env:
# RUNTIME__SENTRY_DSN: https://6f6f7b6f8e0f458a89be4187603b55fe@o1061158.ingest.sentry.io/4504819859914752
RUNTIME__LOG_LEVEL: ERROR
jobs:
run_destinations_local:
name: dest | local
strategy:
fail-fast: false
matrix:
include:
# Duckdb, Postgres, Filesystem, Weaviate, Qdrant (could be split?)
- name: filesystem, weaviate, qdrant
destinations: "[\"filesystem\", \"weaviate\", \"qdrant\"]"
filesystem_drivers: "[\"memory\", \"file\", \"sftp\"]"
extras: "--extra parquet --extra cli --extra filesystem --extra qdrant --extra weaviate --extra deltalake --extra pyiceberg --extra sftp"
needs_weaviate: true
needs_qdrant: true
needs_ftp: true
post_install_commands: "uv run pip install sqlalchemy==2.0.18" # minimum version required by `pyiceberg`
- name: postgres, duckdb and dummy with cli commands
destinations: "[\"postgres\", \"duckdb\", \"dummy\"]"
filesystem_drivers: "[\"memory\", \"file\"]"
extras: "--group adbc --extra postgres --extra postgis --extra parquet --extra duckdb --extra cli --extra filesystem"
needs_postgres: true
additional_tests: "pytest tests/cli"
# Clickhouse OSS (TODO: test with minio s3)
- name: clickhouse
destinations: "[\"clickhouse\"]"
filesystem_drivers: "[\"memory\", \"file\"]"
extras: "--extra clickhouse --extra parquet"
needs_clickhouse: true
# NOTE: we only run non-staging tests, as staging tests require credentials for s3 and azure
excluded_destination_configurations: "[\"clickhouse-parquet-staging-s3-authorization\", \"clickhouse-parquet-staging-az-authorization\", \"clickhouse-jsonl-staging-az-authorization\", \"clickhouse-jsonl-staging-s3-authorization\"]"
# Dremio
- name: dremio
destinations: "[\"dremio\"]"
filesystem_drivers: "[\"memory\"]"
extras: "--extra s3 --extra gs --extra az --extra parquet"
needs_dremio: true
# SQLAlchemy 1.4
- name: sqlalchemy
destinations: "[\"sqlalchemy\"]"
filesystem_drivers: "[\"memory\", \"file\"]"
extras: "--extra sqlalchemy --extra filesystem --extra parquet"
needs_mysql: true
post_install_commands: "uv run pip install pymysql && uv run pip install sqlalchemy==1.4"
# SQLAlchemy 2.0 (same as above but with sqlalchemy 2.0)
- name: sqlalchemy
destinations: "[\"sqlalchemy\"]"
filesystem_drivers: "[\"memory\", \"file\"]"
extras: "--extra sqlalchemy --extra filesystem --extra parquet"
needs_mysql: true
post_install_commands: "uv run pip install pymysql && uv run pip install sqlalchemy==2.0"
env:
ACTIVE_DESTINATIONS: ${{ matrix.destinations }}
ALL_FILESYSTEM_DRIVERS: ${{ matrix.filesystem_drivers }}
EXCLUDED_DESTINATION_CONFIGURATIONS: ${{ matrix.excluded_destination_configurations || '[]' }}
defaults:
run:
shell: bash
runs-on: "ubuntu-latest"
# Service containers to run with `container-job`
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgis/postgis
# Provide the password for postgres
env:
POSTGRES_DB: dlt_data
POSTGRES_USER: loader
POSTGRES_PASSWORD: loader
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
qdrant:
image: qdrant/qdrant:v1.8.4
ports:
- 6333:6333
mysql:
image: mysql:8
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: dlt_data
MYSQL_USER: loader
MYSQL_PASSWORD: loader
ports:
- 3306:3306
# Wait for the service to be ready before completing the job
options: >-
--health-cmd="mysqladmin ping -h localhost -u root -proot"
--health-interval=10s
--health-timeout=5s
--health-retries=5
steps:
- name: Check out
uses: actions/checkout@master
#
# Start required services
#
- name: Start weaviate
run: docker compose -f "tests/load/weaviate/docker-compose.yml" up -d
if: ${{ matrix.needs_weaviate }}
- name: Start SFTP server
run: docker compose -f "tests/load/filesystem_sftp/docker-compose.yml" up -d
if: ${{ matrix.needs_ftp }}
- name: Configure SSH Agent for sftp tests
run: |
mkdir -p /home/runner/.ssh
cp tests/load/filesystem_sftp/bootstrap/bobby_rsa /home/runner/.ssh/id_rsa
cp tests/load/filesystem_sftp/bootstrap/bobby_rsa.pub /home/runner/.ssh/id_rsa.pub
if: ${{ matrix.needs_ftp }}
- name: Start dremio
run: docker compose -f "tests/load/dremio/docker-compose.yml" up -d && sleep 30
if: ${{ matrix.needs_dremio }}
- run: |
docker compose -f "tests/load/clickhouse/docker-compose.yml" up -d
echo "Waiting for ClickHouse to be healthy..."
timeout 30s bash -c 'until docker compose -f "tests/load/clickhouse/docker-compose.yml" ps | grep -q "healthy"; do sleep 1; done'
echo "ClickHouse is up and running"
name: Start ClickHouse OSS
if: ${{ matrix.needs_clickhouse }}
#
# Setup python and run tests
#
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
python-version: "3.10"
activate-environment: true
enable-cache: true
- name: Install dependencies
run: uv sync --group sentry-sdk --group pipeline --group ibis --group providers ${{ matrix.extras }}
- name: Copy secrets for local tests
run: |
cp tests/.dlt/dev.secrets.toml tests/.dlt/secrets.toml
- name: Run post install commands
run: ${{ matrix.post_install_commands }}
if: ${{ matrix.post_install_commands }}
# always run full suite, also on branches
- name: Run tests Linux
run: |
eval "$(ssh-agent -s)"
pytest tests/load --ignore tests/load/sources --ignore tests/load/filesystem_sftp
- name: Run additional tests
run: ${{ matrix.additional_tests }}
if: ${{ matrix.additional_tests }}
#
# Tear down services
#
- name: Stop weaviate
run: docker compose -f "tests/load/weaviate/docker-compose.yml" down -v
if: ${{ always() }}
- name: Stop SFTP server
run: docker compose -f "tests/load/filesystem_sftp/docker-compose.yml" down -v
if: ${{ always() }}
- name: Stop dremio
run: docker compose -f "tests/load/dremio/docker-compose.yml" down -v
if: ${{ always() }}
- name: Stop ClickHouse OSS
if: always()
run: docker compose -f "tests/load/clickhouse/docker-compose.yml" down -v