mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-20 12:21:27 +00:00
Compare commits
16 Commits
enable-pos
...
er/hatch-i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
616ad02b5b | ||
|
|
e4abd2b365 | ||
|
|
7c49913998 | ||
|
|
25027d22ca | ||
|
|
85da858e34 | ||
|
|
ad002ecff6 | ||
|
|
b3b914c664 | ||
|
|
977f766195 | ||
|
|
95038afcbe | ||
|
|
b5f157a7f8 | ||
|
|
a158e39ebc | ||
|
|
d1bf4d2fa1 | ||
|
|
12c238c626 | ||
|
|
34c3e4c9b3 | ||
|
|
f53e953d06 | ||
|
|
4de1e78f78 |
@@ -32,6 +32,6 @@ first_value = 1
|
||||
|
||||
[bumpversion:part:nightly]
|
||||
|
||||
[bumpversion:file:core/pyproject.toml]
|
||||
[bumpversion:file:core/dbt/__version__.py]
|
||||
search = version = "{current_version}"
|
||||
replace = version = "{new_version}"
|
||||
|
||||
6
.changes/unreleased/Under the Hood-20251121-140515.yaml
Normal file
6
.changes/unreleased/Under the Hood-20251121-140515.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Switch hatchling for build tooling.
|
||||
time: 2025-11-21T14:05:15.838252-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "12151"
|
||||
11
.github/workflows/cut-release-branch.yml
vendored
11
.github/workflows/cut-release-branch.yml
vendored
@@ -200,16 +200,15 @@ jobs:
|
||||
|
||||
- name: "Install Python Dependencies"
|
||||
run: |
|
||||
python -m venv env
|
||||
source env/bin/activate
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install hatch bumpversion
|
||||
|
||||
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
|
||||
run: |
|
||||
source env/bin/activate
|
||||
python -m pip install -r dev-requirements.txt
|
||||
env/bin/bumpversion --allow-dirty --new-version ${{ needs.cleanup_changelog.outputs.next-version }} major
|
||||
git status
|
||||
cd core
|
||||
hatch run setup
|
||||
bumpversion --allow-dirty --new-version ${{ needs.cleanup_changelog.outputs.next-version }} major
|
||||
dbt --version
|
||||
|
||||
- name: "Commit Version Bump to Branch"
|
||||
run: |
|
||||
|
||||
72
.github/workflows/main.yml
vendored
72
.github/workflows/main.yml
vendored
@@ -54,19 +54,25 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
make dev
|
||||
make dev_req
|
||||
mypy --version
|
||||
dbt --version
|
||||
python -m pip install hatch
|
||||
cd core
|
||||
hatch run setup
|
||||
|
||||
- name: Verify dbt installation
|
||||
run: |
|
||||
cd core
|
||||
hatch run dbt --version
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: pre-commit run --all-files --show-diff-on-failure
|
||||
run: |
|
||||
cd core
|
||||
hatch run code-quality
|
||||
|
||||
unit:
|
||||
name: unit test / python ${{ matrix.python-version }}
|
||||
@@ -77,7 +83,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13" ]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
@@ -95,15 +101,15 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
python -m pip install hatch
|
||||
hatch --version
|
||||
|
||||
- name: Run unit tests
|
||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: 3
|
||||
command: tox -e unit
|
||||
command: cd core && hatch run ci:unit-tests
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -164,7 +170,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.10", "3.11", "3.12", "3.13" ]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
os: ["ubuntu-latest"]
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
@@ -218,15 +224,16 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
python -m pip install hatch
|
||||
hatch --version
|
||||
|
||||
- name: Run integration tests
|
||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: tox -- --ddtrace
|
||||
shell: bash
|
||||
command: cd core && hatch run ci:integration-tests -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
@@ -300,15 +307,16 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
python -m pip install hatch
|
||||
hatch --version
|
||||
|
||||
- name: Run integration tests
|
||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: tox -- --ddtrace
|
||||
shell: bash
|
||||
command: cd core && hatch run ci:integration-tests -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
@@ -361,12 +369,12 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
python -m pip install --upgrade hatch twine check-wheel-contents
|
||||
python -m pip --version
|
||||
|
||||
- name: Build distributions
|
||||
@@ -375,27 +383,7 @@ jobs:
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Check distribution descriptions
|
||||
- name: Check and verify distributions
|
||||
run: |
|
||||
twine check dist/*
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
cd core
|
||||
hatch run build:check-all
|
||||
|
||||
40
.github/workflows/release.yml
vendored
40
.github/workflows/release.yml
vendored
@@ -76,8 +76,6 @@ jobs:
|
||||
job-setup:
|
||||
name: Log Inputs
|
||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
||||
outputs:
|
||||
starting_sha: ${{ steps.set_sha.outputs.starting_sha }}
|
||||
steps:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
@@ -88,34 +86,21 @@ jobs:
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
echo Only Docker: ${{ inputs.only_docker }}
|
||||
|
||||
- name: "Checkout target branch"
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.target_branch }}
|
||||
|
||||
# release-prep.yml really shouldn't take in the sha but since core + all adapters
|
||||
# depend on it now this workaround lets us not input it manually with risk of error.
|
||||
# The changes always get merged into the head so we can't use a specific commit for
|
||||
# releases anyways.
|
||||
- name: "Capture sha"
|
||||
id: set_sha
|
||||
run: |
|
||||
echo "starting_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
needs: [job-setup]
|
||||
if: ${{ !inputs.only_docker }}
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@er/hatch-release
|
||||
|
||||
with:
|
||||
sha: ${{ needs.job-setup.outputs.starting_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
hatch_directory: "core"
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
use_hatch: true
|
||||
|
||||
secrets: inherit
|
||||
|
||||
@@ -138,21 +123,18 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
||||
needs: [job-setup, bump-version-generate-changelog]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@er/hatch-release
|
||||
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
hatch_directory: "core"
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
use_hatch: true
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
@@ -160,7 +142,7 @@ jobs:
|
||||
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@er/hatch-release
|
||||
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
@@ -173,7 +155,7 @@ jobs:
|
||||
|
||||
needs: [github-release]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@er/hatch-release
|
||||
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
@@ -222,7 +204,7 @@ jobs:
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@er/hatch-release
|
||||
with:
|
||||
package: ${{ matrix.package }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
@@ -241,7 +223,7 @@ jobs:
|
||||
docker-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@er/hatch-release
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
@@ -262,7 +244,7 @@ jobs:
|
||||
docker-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@er/hatch-release
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
|
||||
22
.github/workflows/schema-check.yml
vendored
22
.github/workflows/schema-check.yml
vendored
@@ -22,7 +22,7 @@ on:
|
||||
target_branch:
|
||||
description: "The branch to check against"
|
||||
type: string
|
||||
default: 'main'
|
||||
default: "main"
|
||||
required: true
|
||||
|
||||
# no special access is needed
|
||||
@@ -48,8 +48,8 @@ jobs:
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
ref: ${{ inputs.target_branch }}
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
ref: ${{ inputs.target_branch }}
|
||||
|
||||
- name: Check for changes in core/dbt/artifacts
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
@@ -72,18 +72,16 @@ jobs:
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
||||
with:
|
||||
repository: dbt-labs/schemas.getdbt.com
|
||||
ref: 'main'
|
||||
ref: "main"
|
||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
|
||||
- name: Generate current schema
|
||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
||||
run: |
|
||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||
cd ${{ env.DBT_REPO_DIRECTORY }}/core
|
||||
pip install --upgrade pip hatch
|
||||
hatch run setup
|
||||
hatch run json-schema -- --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||
|
||||
# Copy generated schema files into the schemas.getdbt.com repo
|
||||
# Do a git diff to find any changes
|
||||
@@ -99,5 +97,5 @@ jobs:
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
||||
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
||||
with:
|
||||
name: 'schema_changes.txt'
|
||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
||||
name: "schema_changes.txt"
|
||||
path: "${{ env.SCHEMA_DIFF_ARTIFACT }}"
|
||||
|
||||
@@ -102,8 +102,8 @@ jobs:
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip --version
|
||||
pip install tox
|
||||
tox --version
|
||||
pip install hatch
|
||||
hatch --version
|
||||
|
||||
- name: Run postgres setup script
|
||||
run: |
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
with:
|
||||
timeout_minutes: 30
|
||||
max_attempts: 3
|
||||
command: tox -e integration -- -nauto
|
||||
command: cd core && hatch run ci:integration-tests -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
|
||||
41
.github/workflows/test-repeater.yml
vendored
41
.github/workflows/test-repeater.yml
vendored
@@ -14,33 +14,33 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Branch to check out'
|
||||
description: "Branch to check out"
|
||||
type: string
|
||||
required: true
|
||||
default: 'main'
|
||||
default: "main"
|
||||
test_path:
|
||||
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
||||
description: "Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)"
|
||||
type: string
|
||||
required: true
|
||||
default: 'tests/functional/...'
|
||||
default: "tests/functional/..."
|
||||
python_version:
|
||||
description: 'Version of Python to Test Against'
|
||||
description: "Version of Python to Test Against"
|
||||
type: choice
|
||||
options:
|
||||
- '3.10'
|
||||
- '3.11'
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
os:
|
||||
description: 'OS to run test in'
|
||||
description: "OS to run test in"
|
||||
type: choice
|
||||
options:
|
||||
- 'ubuntu-latest'
|
||||
- 'macos-14'
|
||||
- 'windows-latest'
|
||||
- "ubuntu-latest"
|
||||
- "macos-14"
|
||||
- "windows-latest"
|
||||
num_runs_per_batch:
|
||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||
description: "Max number of times to run the test per batch. We always run 10 batches."
|
||||
type: number
|
||||
required: true
|
||||
default: '50'
|
||||
default: "50"
|
||||
|
||||
permissions: read-all
|
||||
|
||||
@@ -90,12 +90,19 @@ jobs:
|
||||
with:
|
||||
python-version: "${{ inputs.python_version }}"
|
||||
|
||||
- name: "Install hatch"
|
||||
run: python -m pip install --user --upgrade pip hatch
|
||||
|
||||
- name: "Setup Dev Environment"
|
||||
run: make dev
|
||||
run: |
|
||||
cd core
|
||||
hatch run setup
|
||||
|
||||
- name: "Set up postgres (linux)"
|
||||
if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
|
||||
run: make setup-db
|
||||
run: |
|
||||
cd core
|
||||
hatch run setup-db
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: Set up postgres (macos)
|
||||
@@ -153,5 +160,5 @@ jobs:
|
||||
- name: "Error for Failures"
|
||||
if: ${{ steps.pytest.outputs.failure }}
|
||||
run: |
|
||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||
exit 1
|
||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||
exit 1
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -15,6 +15,7 @@ build/
|
||||
!core/dbt/docs/build
|
||||
develop-eggs/
|
||||
dist/
|
||||
dist-*/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
@@ -95,6 +96,7 @@ target/
|
||||
# pycharm
|
||||
.idea/
|
||||
venv/
|
||||
.venv*/
|
||||
|
||||
# AWS credentials
|
||||
.aws/
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
|
||||
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
|
||||
|
||||
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development and package managers. Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
|
||||
|
||||
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
|
||||
|
||||
@@ -74,13 +74,14 @@ There are some tools that will be helpful to you in developing locally. While th
|
||||
|
||||
These are the tools used in `dbt-core` development and testing:
|
||||
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.10, 3.11, 3.12, and 3.13
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage isolated test environments across python versions. We currently target the latest patch releases for Python 3.10, 3.11, 3.12, and 3.13
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`black`](https://github.com/psf/black) for code formatting
|
||||
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
|
||||
- [`pre-commit`](https://pre-commit.com) to easily run those checks
|
||||
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
|
||||
- [`hatchling`](https://hatch.pypa.io/) as the build backend for creating distribution packages
|
||||
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
|
||||
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
|
||||
|
||||
@@ -88,14 +89,9 @@ A deep understanding of these tools in not required to effectively contribute to
|
||||
|
||||
#### Virtual environments
|
||||
|
||||
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv
|
||||
in the root of the `dbt-core` repository. To create a new virtualenv, run:
|
||||
```sh
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
```
|
||||
dbt-core uses [Hatch](https://hatch.pypa.io/) for dependency and environment management. Hatch automatically creates and manages isolated environments for development, testing, and building, so you don't need to manually create virtual environments.
|
||||
|
||||
This will create and activate a new Python virtual environment.
|
||||
For more information on how Hatch manages environments, see the [Hatch environment documentation](https://hatch.pypa.io/latest/environment/).
|
||||
|
||||
#### Docker and `docker-compose`
|
||||
|
||||
@@ -114,12 +110,15 @@ brew install postgresql
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
||||
First make sure you have Python 3.10 or later installed. Ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `hatch`. Finally set up `dbt-core` for development:
|
||||
|
||||
```sh
|
||||
make dev
|
||||
cd core
|
||||
hatch run setup
|
||||
```
|
||||
or, alternatively:
|
||||
|
||||
This will install all development dependencies and set up pre-commit hooks. Alternatively, you can install dependencies directly:
|
||||
|
||||
```sh
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
pre-commit install
|
||||
@@ -127,9 +126,22 @@ pre-commit install
|
||||
|
||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||
|
||||
#### Building dbt-core
|
||||
|
||||
dbt-core uses [Hatch](https://hatch.pypa.io/) (specifically `hatchling`) as its build backend. To build distribution packages:
|
||||
|
||||
```sh
|
||||
cd core
|
||||
hatch build
|
||||
```
|
||||
|
||||
This will create both wheel (`.whl`) and source distribution (`.tar.gz`) files in the `dist/` directory.
|
||||
|
||||
The build configuration is defined in `core/pyproject.toml`. You can also use the standard `python -m build` command if you prefer.
|
||||
|
||||
### Running `dbt-core`
|
||||
|
||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||
Once you've run `hatch run setup`, the `dbt` command will be available in your PATH. You can verify this by running `which dbt`.
|
||||
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
||||
|
||||
@@ -147,9 +159,12 @@ Although `dbt-core` works with a number of different databases, you won't need t
|
||||
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
|
||||
|
||||
```sh
|
||||
make setup-db
|
||||
cd core
|
||||
hatch run setup-db
|
||||
```
|
||||
or, alternatively:
|
||||
|
||||
Alternatively, you can run the setup commands directly:
|
||||
|
||||
```sh
|
||||
docker-compose up -d database
|
||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
|
||||
@@ -159,33 +174,51 @@ PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/s
|
||||
|
||||
There are a few methods for running tests locally.
|
||||
|
||||
#### Makefile
|
||||
#### Hatch scripts
|
||||
|
||||
There are multiple targets in the Makefile to run common test suites and code
|
||||
checks, most notably:
|
||||
The primary way to run tests and checks is using hatch scripts (defined in `core/pyproject.toml`):
|
||||
|
||||
```sh
|
||||
# Runs unit tests with py38 and code checks in parallel.
|
||||
make test
|
||||
# Runs postgres integration tests with py38 in "fail fast" mode.
|
||||
make integration
|
||||
```
|
||||
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
|
||||
> unless you use choose a Docker container to run tests. Run `make help` for more info.
|
||||
cd core
|
||||
|
||||
Check out the other targets in the Makefile to see other commonly used test
|
||||
suites.
|
||||
# Run all unit tests
|
||||
hatch run unit-tests
|
||||
|
||||
# Run unit tests and all code quality checks
|
||||
hatch run test
|
||||
|
||||
# Run integration tests
|
||||
hatch run integration-tests
|
||||
|
||||
# Run integration tests in fail-fast mode
|
||||
hatch run integration-tests-fail-fast
|
||||
|
||||
# Run linting checks only
|
||||
hatch run lint
|
||||
hatch run flake8
|
||||
hatch run mypy
|
||||
hatch run black
|
||||
|
||||
# Run all pre-commit hooks
|
||||
hatch run code-quality
|
||||
|
||||
# Clean build artifacts
|
||||
hatch run clean
|
||||
```
|
||||
|
||||
> These hatch scripts handle virtualenv management and dependency installation automatically via [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and `pre-commit` for code quality checks.
|
||||
|
||||
#### `pre-commit`
|
||||
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This command installs several pip executables including black, mypy, and flake8. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
|
||||
|
||||
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `hatch run setup` (or `pip install -r dev-requirements.txt && pre-commit install`) to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This installs several pip executables including black, mypy, and flake8. Once installed, hooks will run automatically on `git commit`, or you can run them manually with `hatch run code-quality`.
|
||||
|
||||
#### `tox`
|
||||
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing isolated test environments and installing dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.10, Python 3.11, Python 3.12 and Python 3.13 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py310`. The configuration for these tests is located in `tox.ini`.
|
||||
|
||||
#### `pytest`
|
||||
|
||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like:
|
||||
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. After running `hatch run setup`, you can run pytest commands like:
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
|
||||
163
Makefile
163
Makefile
@@ -1,146 +1,95 @@
|
||||
# ============================================================================
|
||||
# DEPRECATED: This Makefile is maintained for backwards compatibility only.
|
||||
#
|
||||
# dbt-core now uses Hatch for task management and development workflows.
|
||||
# Please migrate to using hatch commands directly:
|
||||
#
|
||||
# make dev → cd core && hatch run setup
|
||||
# make unit → cd core && hatch run unit-tests
|
||||
# make test → cd core && hatch run test
|
||||
# make integration → cd core && hatch run integration-tests
|
||||
# make lint → cd core && hatch run lint
|
||||
# make code_quality → cd core && hatch run code-quality
|
||||
# make setup-db → cd core && hatch run setup-db
|
||||
# make clean → cd core && hatch run clean
|
||||
#
|
||||
# See core/pyproject.toml [tool.hatch.envs.default.scripts] for all available
|
||||
# commands and CONTRIBUTING.md for detailed usage instructions.
|
||||
#
|
||||
# This Makefile will be removed in a future version of dbt-core.
|
||||
# ============================================================================
|
||||
|
||||
.DEFAULT_GOAL:=help
|
||||
|
||||
# Optional flag to run target in a docker container.
|
||||
# (example `make test USE_DOCKER=true`)
|
||||
ifeq ($(USE_DOCKER),true)
|
||||
DOCKER_CMD := docker-compose run --rm test
|
||||
endif
|
||||
|
||||
#
|
||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||
# DBT_TEST_USER_1=user
|
||||
# LOG_DIR="dir with a space in it"
|
||||
#
|
||||
# Warn: Restrict each line to one variable only.
|
||||
#
|
||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||
include ./makefile.test.env
|
||||
endif
|
||||
|
||||
CI_FLAGS =\
|
||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||
|
||||
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
@cd core && hatch run dev-req
|
||||
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit install
|
||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
@cd core && hatch run setup
|
||||
|
||||
.PHONY: dev-uninstall
|
||||
dev-uninstall: ## Uninstall all packages in venv except for build tools
|
||||
@\
|
||||
pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
||||
pip uninstall -y dbt-core
|
||||
@pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
|
||||
pip uninstall -y dbt-core
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run --hook-stage manual mypy-check | grep -v "INFO"
|
||||
mypy: ## Runs mypy against staged changes for static type checking.
|
||||
@cd core && hatch run mypy
|
||||
|
||||
.PHONY: flake8
|
||||
flake8: .env ## Runs flake8 against staged changes to enforce style guide.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run --hook-stage manual flake8-check | grep -v "INFO"
|
||||
flake8: ## Runs flake8 against staged changes to enforce style guide.
|
||||
@cd core && hatch run flake8
|
||||
|
||||
.PHONY: black
|
||||
black: .env ## Runs black against staged changes to enforce style guide.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run --hook-stage manual black-check -v | grep -v "INFO"
|
||||
black: ## Runs black against staged changes to enforce style guide.
|
||||
@cd core && hatch run black
|
||||
|
||||
.PHONY: lint
|
||||
lint: .env ## Runs flake8 and mypy code checks against staged changes.
|
||||
@\
|
||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
lint: ## Runs flake8 and mypy code checks against staged changes.
|
||||
@cd core && hatch run lint
|
||||
|
||||
.PHONY: code_quality
|
||||
code_quality: ## Runs all pre-commit hooks against all files.
|
||||
@cd core && hatch run code-quality
|
||||
|
||||
.PHONY: unit
|
||||
unit: .env ## Runs unit tests with py
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py
|
||||
unit: ## Runs unit tests with py
|
||||
@cd core && hatch run unit-tests
|
||||
|
||||
.PHONY: test
|
||||
test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py; \
|
||||
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
|
||||
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
|
||||
test: ## Runs unit tests with py and code checks against staged changes.
|
||||
@cd core && hatch run test
|
||||
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs core integration tests using postgres with py-integration
|
||||
@\
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
integration: ## Runs core integration tests using postgres with py-integration
|
||||
@cd core && hatch run integration-tests
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
||||
@\
|
||||
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
|
||||
|
||||
.PHONY: interop
|
||||
interop: clean
|
||||
@\
|
||||
mkdir $(LOG_DIR) && \
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
integration-fail-fast: ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
|
||||
@cd core && hatch run integration-tests-fail-fast
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
@\
|
||||
docker compose up -d database && \
|
||||
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres SKIP_HOMEBREW=true bash test/setup_db.sh
|
||||
|
||||
# This rule creates a file named .env that is used by docker-compose for passing
|
||||
# the USER_ID and GROUP_ID arguments to the Docker image.
|
||||
.env: ## Setup step for using using docker-compose with make target.
|
||||
@touch .env
|
||||
ifneq ($(OS),Windows_NT)
|
||||
ifneq ($(shell uname -s), Darwin)
|
||||
@echo USER_ID=$(shell id -u) > .env
|
||||
@echo GROUP_ID=$(shell id -g) >> .env
|
||||
endif
|
||||
endif
|
||||
@cd core && hatch run setup-db
|
||||
|
||||
.PHONY: clean
|
||||
clean: ## Resets development environment.
|
||||
@echo 'cleaning repo...'
|
||||
@rm -f .coverage
|
||||
@rm -f .coverage.*
|
||||
@rm -rf .eggs/
|
||||
@rm -f .env
|
||||
@rm -rf .tox/
|
||||
@rm -rf build/
|
||||
@rm -rf dbt.egg-info/
|
||||
@rm -f dbt_project.yml
|
||||
@rm -rf dist/
|
||||
@rm -f htmlcov/*.{css,html,js,json,png}
|
||||
@rm -rf logs/
|
||||
@rm -rf target/
|
||||
@find . -type f -name '*.pyc' -delete
|
||||
@find . -type d -name '__pycache__' -depth -delete
|
||||
@echo 'done.'
|
||||
@cd core && hatch run clean
|
||||
|
||||
.PHONY: json_schema
|
||||
json_schema: ## Update generated JSON schema using code changes.
|
||||
@cd core && hatch run json-schema
|
||||
|
||||
.PHONY: help
|
||||
help: ## Show this help message.
|
||||
@echo 'usage: make [target] [USE_DOCKER=true]'
|
||||
@echo 'usage: make [target]'
|
||||
@echo
|
||||
@echo 'DEPRECATED: This Makefile is a compatibility shim.'
|
||||
@echo 'Please use "cd core && hatch run <command>" directly.'
|
||||
@echo
|
||||
@echo 'targets:'
|
||||
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
@echo
|
||||
@echo 'options:'
|
||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||
|
||||
.PHONY: json_schema
|
||||
json_schema: ## Update generated JSON schema using code changes.
|
||||
scripts/collect-artifact-schema.py --path schemas
|
||||
@echo 'For more information, see CONTRIBUTING.md'
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
|
||||
include dbt/py.typed
|
||||
recursive-include dbt/task/docs *.html
|
||||
recursive-include dbt/jsonschemas *.json
|
||||
1
core/dbt/__version__.py
Normal file
1
core/dbt/__version__.py
Normal file
@@ -0,0 +1 @@
|
||||
version = "1.12.0a1"
|
||||
@@ -3,14 +3,13 @@ import importlib
|
||||
import importlib.util
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from importlib import metadata as importlib_metadata
|
||||
from pathlib import Path
|
||||
from typing import Iterator, List, Optional, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
import dbt_common.semver as semver
|
||||
from dbt.__version__ import version as __version_string
|
||||
from dbt_common.ui import green, yellow
|
||||
|
||||
PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json"
|
||||
@@ -233,16 +232,8 @@ def _resolve_version() -> str:
|
||||
try:
|
||||
return importlib_metadata.version("dbt-core")
|
||||
except importlib_metadata.PackageNotFoundError:
|
||||
pyproject_path = Path(__file__).resolve().parents[1] / "pyproject.toml"
|
||||
if not pyproject_path.exists():
|
||||
raise RuntimeError("Unable to locate pyproject.toml to determine dbt-core version")
|
||||
|
||||
text = pyproject_path.read_text(encoding="utf-8")
|
||||
match = re.search(r'^version\s*=\s*"(?P<version>[^"]+)"', text, re.MULTILINE)
|
||||
if match:
|
||||
return match.group("version")
|
||||
|
||||
raise RuntimeError("Unable to determine dbt-core version from pyproject.toml")
|
||||
# When running from source (not installed), use version from __version__.py
|
||||
return __version_string
|
||||
|
||||
|
||||
__version__ = _resolve_version()
|
||||
|
||||
137
core/hatch.toml
Normal file
137
core/hatch.toml
Normal file
@@ -0,0 +1,137 @@
|
||||
[version]
|
||||
path = "dbt/__version__.py"
|
||||
|
||||
[build.targets.wheel]
|
||||
packages = ["dbt"]
|
||||
only-packages = true
|
||||
artifacts = [
|
||||
"dbt/include/**/*",
|
||||
"dbt/task/docs/**/*.html",
|
||||
"dbt/jsonschemas/**/*.json",
|
||||
"dbt/py.typed",
|
||||
]
|
||||
|
||||
[build.targets.sdist]
|
||||
include = [
|
||||
"/dbt",
|
||||
"/README.md",
|
||||
]
|
||||
|
||||
[build.targets.sdist.force-include]
|
||||
"../LICENSE" = "LICENSE"
|
||||
"dbt/task/docs/index.html" = "dbt/task/docs/index.html"
|
||||
|
||||
[envs.default]
|
||||
dependencies = [
|
||||
"pre-commit~=3.7.0",
|
||||
"pytest>=7.0,<8.0",
|
||||
"pytest-xdist~=3.6",
|
||||
"pytest-csv~=3.0",
|
||||
"pytest-logbook~=1.2",
|
||||
"logbook<1.9",
|
||||
"mypy>=1.9,<2.0",
|
||||
"flake8>=6.0,<7.0",
|
||||
"black>=24.3,<25.0",
|
||||
"tox~=4.16",
|
||||
]
|
||||
|
||||
[envs.default.scripts]
|
||||
# Setup commands
|
||||
setup = [
|
||||
"pip install -r ../dev-requirements.txt",
|
||||
"pip install -e .",
|
||||
"pre-commit install",
|
||||
]
|
||||
dev-req = [
|
||||
"pip install -r ../dev-requirements.txt",
|
||||
"pip install -e .",
|
||||
]
|
||||
|
||||
# Code quality commands
|
||||
code-quality = "pre-commit run --all-files --show-diff-on-failure"
|
||||
lint = [
|
||||
"pre-commit run flake8-check --hook-stage manual --all-files",
|
||||
"pre-commit run mypy-check --hook-stage manual --all-files",
|
||||
]
|
||||
flake8 = "pre-commit run flake8-check --hook-stage manual --all-files"
|
||||
mypy = "pre-commit run mypy-check --hook-stage manual --all-files"
|
||||
black = "pre-commit run black-check --hook-stage manual --all-files"
|
||||
|
||||
# Testing commands
|
||||
unit-tests = "tox -e unit"
|
||||
integration-tests = "tox -e py-integration -- -nauto"
|
||||
integration-tests-fail-fast = "tox -e py-integration -- -x -nauto"
|
||||
test = [
|
||||
"tox -e unit",
|
||||
"pre-commit run black-check --hook-stage manual --all-files",
|
||||
"pre-commit run flake8-check --hook-stage manual --all-files",
|
||||
"pre-commit run mypy-check --hook-stage manual --all-files",
|
||||
]
|
||||
|
||||
# Database setup
|
||||
setup-db = [
|
||||
"docker compose up -d database",
|
||||
"bash ../test/setup_db.sh",
|
||||
]
|
||||
|
||||
# Utility commands
|
||||
clean = [
|
||||
"rm -f .coverage",
|
||||
"rm -f .coverage.*",
|
||||
"rm -rf .eggs/",
|
||||
"rm -rf .tox/",
|
||||
"rm -rf build/",
|
||||
"rm -rf dbt.egg-info/",
|
||||
"rm -f dbt_project.yml",
|
||||
"rm -rf dist/",
|
||||
"find . -type f -name '*.pyc' -delete",
|
||||
"find . -type d -name __pycache__ -exec rm -rf {} +",
|
||||
]
|
||||
json-schema = "python ../scripts/collect-artifact-schema.py --path ../schemas"
|
||||
|
||||
[envs.build]
|
||||
detached = true
|
||||
dependencies = [
|
||||
"wheel",
|
||||
"twine",
|
||||
"check-wheel-contents",
|
||||
]
|
||||
|
||||
[envs.build.scripts]
|
||||
check-all = [
|
||||
"- check-wheel",
|
||||
"- check-sdist",
|
||||
]
|
||||
check-wheel = [
|
||||
"twine check dist/*",
|
||||
"find ./dist/dbt_core-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
|
||||
"pip freeze | grep dbt-core",
|
||||
"dbt --version",
|
||||
]
|
||||
check-sdist = [
|
||||
"check-wheel-contents dist/*.whl --ignore W007,W008",
|
||||
"find ./dist/dbt_core-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
|
||||
"pip freeze | grep dbt-core",
|
||||
"dbt --version",
|
||||
]
|
||||
|
||||
[envs.ci]
|
||||
dependencies = [
|
||||
"pytest>=7.0,<8.0",
|
||||
"pytest-xdist~=3.6",
|
||||
"tox~=4.16",
|
||||
]
|
||||
|
||||
[envs.ci.scripts]
|
||||
unit-tests = "tox -e unit"
|
||||
integration-tests = "tox -- {args}"
|
||||
|
||||
[envs.cd]
|
||||
dependencies = [
|
||||
"pytest>=7.0,<8.0",
|
||||
"pytest-xdist~=3.6",
|
||||
]
|
||||
|
||||
[envs.cd.scripts]
|
||||
unit-tests = "python -m pytest tests/unit"
|
||||
integration-tests = "python -m pytest tests/functional"
|
||||
@@ -1,38 +1,12 @@
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "."}
|
||||
include-package-data = true
|
||||
zip-safe = false
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
include = [
|
||||
"dbt",
|
||||
"dbt.*",
|
||||
]
|
||||
|
||||
# this needs to match MANIFEST.in for the wheels
|
||||
[tool.setuptools.package-data]
|
||||
"dbt" = [
|
||||
"include/**/*.py",
|
||||
"include/**/*.sql",
|
||||
"include/**/*.yml",
|
||||
"include/**/*.html",
|
||||
"include/**/*.md",
|
||||
"include/**/.gitkeep",
|
||||
"include/**/.gitignore",
|
||||
"task/docs/**/*.html",
|
||||
"jsonschemas/**/*.json",
|
||||
"py.typed",
|
||||
]
|
||||
|
||||
[project]
|
||||
name = "dbt-core"
|
||||
version = "1.12.0a1"
|
||||
dynamic = ["version"]
|
||||
description = "With dbt, data analysts and engineers can build analytics the way engineers build applications."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["License.md"] # License.md copied to core/ by build script even though it lives at the root by convention
|
||||
license-files = { globs = ["../LICENSE"] }
|
||||
keywords = []
|
||||
authors = [
|
||||
{ name = "dbt Labs", email = "info@dbtlabs.com" },
|
||||
@@ -102,9 +76,9 @@ Changelog = "https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md"
|
||||
[project.scripts]
|
||||
dbt = "dbt.cli.main:cli"
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "dbt/__version__.py"
|
||||
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools>=61",
|
||||
"wheel",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
@@ -1,26 +1,16 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Legacy setuptools shim retained for compatibility with existing workflows. Will be removed in a future version."""
|
||||
"""
|
||||
DEPRECATED: This setup.py is maintained for backwards compatibility only.
|
||||
|
||||
dbt-core now uses hatchling as its build backend (defined in pyproject.toml).
|
||||
Please use `python -m build` or `pip install` directly instead of setup.py commands.
|
||||
|
||||
This file will be maintained indefinitely for legacy tooling support but is no
|
||||
longer the primary build interface.
|
||||
"""
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
# the user has a downlevel version of setuptools.
|
||||
# ----
|
||||
# dbt-core uses these packages deeply, throughout the codebase, and there have been breaking changes in past patch releases (even though these are major-version-one).
|
||||
# Pin to the patch or minor version, and bump in each new minor version of dbt-core.
|
||||
# ----
|
||||
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
|
||||
# with major versions in each new minor version of dbt-core.
|
||||
# ----
|
||||
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
|
||||
# and check compatibility / bump in each new minor version of dbt-core.
|
||||
# ----
|
||||
# These are major-version-0 packages also maintained by dbt-labs.
|
||||
# Accept patches but avoid automatically updating past a set minor version range.
|
||||
# Minor versions for these are expected to be backwards-compatible
|
||||
# ----
|
||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
||||
# ----
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
||||
@@ -17,15 +17,11 @@ rm -rf "$DBT_PATH"/core/build
|
||||
|
||||
mkdir -p "$DBT_PATH"/dist
|
||||
|
||||
# Copy License.md to core/ for inclusion in distribution (required by Apache 2.0)
|
||||
# The license-files in pyproject.toml references it relative to core/
|
||||
cp "$DBT_PATH"/License.md "$DBT_PATH"/core/License.md
|
||||
|
||||
cd "$DBT_PATH"/core
|
||||
$PYTHON_BIN -m pip install --upgrade build
|
||||
$PYTHON_BIN -m build --outdir "$DBT_PATH/dist"
|
||||
$PYTHON_BIN -m pip install --upgrade hatch
|
||||
hatch build --clean
|
||||
|
||||
# Clean up License.md that was copied to core/ for build
|
||||
rm -f "$DBT_PATH/core/License.md"
|
||||
# Move built distributions to top-level dist/
|
||||
mv "$DBT_PATH"/core/dist/* "$DBT_PATH"/dist/
|
||||
|
||||
set +x
|
||||
|
||||
84
scripts/compare-wheels.sh
Executable file
84
scripts/compare-wheels.sh
Executable file
@@ -0,0 +1,84 @@
|
||||
#!/bin/bash
|
||||
# Compare wheel contents between setuptools and hatch builds
|
||||
|
||||
set -e
|
||||
|
||||
BASELINE=$1
|
||||
HATCH=$2
|
||||
|
||||
if [ -z "$BASELINE" ] || [ -z "$HATCH" ]; then
|
||||
echo "Usage: $0 <baseline-dir> <hatch-dir>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "=== File List Comparison ==="
|
||||
echo "Comparing file lists..."
|
||||
DIFF_OUTPUT=$(diff <(cd "$BASELINE" && find . -type f | sort) \
|
||||
<(cd "$HATCH" && find . -type f | sort) || true)
|
||||
|
||||
if [ -z "$DIFF_OUTPUT" ]; then
|
||||
echo "✓ File lists are identical"
|
||||
else
|
||||
echo "✗ File lists differ:"
|
||||
echo "$DIFF_OUTPUT"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== File Count ==="
|
||||
BASELINE_COUNT=$(cd "$BASELINE" && find . -type f | wc -l | tr -d ' ')
|
||||
HATCH_COUNT=$(cd "$HATCH" && find . -type f | wc -l | tr -d ' ')
|
||||
echo "Baseline: $BASELINE_COUNT files"
|
||||
echo "Hatch: $HATCH_COUNT files"
|
||||
|
||||
if [ "$BASELINE_COUNT" = "$HATCH_COUNT" ]; then
|
||||
echo "✓ File counts match"
|
||||
else
|
||||
echo "✗ File counts differ"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== Metadata Comparison ==="
|
||||
METADATA_DIFF=$(diff <(cat "$BASELINE"/dbt_core-*.dist-info/METADATA | grep -v "^Generator:") \
|
||||
<(cat "$HATCH"/dbt_core-*.dist-info/METADATA | grep -v "^Generator:") || true)
|
||||
|
||||
if [ -z "$METADATA_DIFF" ]; then
|
||||
echo "✓ Metadata is identical (ignoring Generator line)"
|
||||
else
|
||||
echo "✗ Metadata differs:"
|
||||
echo "$METADATA_DIFF"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== Entry Points ==="
|
||||
ENTRY_DIFF=$(diff <(cat "$BASELINE"/dbt_core-*.dist-info/entry_points.txt) \
|
||||
<(cat "$HATCH"/dbt_core-*.dist-info/entry_points.txt) || true)
|
||||
|
||||
if [ -z "$ENTRY_DIFF" ]; then
|
||||
echo "✓ Entry points are identical"
|
||||
else
|
||||
echo "✗ Entry points differ:"
|
||||
echo "$ENTRY_DIFF"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== Package Contents (from RECORD) ==="
|
||||
echo "Comparing package contents..."
|
||||
RECORD_DIFF=$(diff <(cat "$BASELINE"/dbt_core-*.dist-info/RECORD | cut -d',' -f1 | sort) \
|
||||
<(cat "$HATCH"/dbt_core-*.dist-info/RECORD | cut -d',' -f1 | sort) || true)
|
||||
|
||||
if [ -z "$RECORD_DIFF" ]; then
|
||||
echo "✓ Package contents are identical"
|
||||
else
|
||||
echo "✗ Package contents differ:"
|
||||
echo "$RECORD_DIFF"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== Summary ==="
|
||||
if [ -z "$DIFF_OUTPUT" ] && [ -z "$METADATA_DIFF" ] && [ -z "$ENTRY_DIFF" ] && [ -z "$RECORD_DIFF" ]; then
|
||||
echo "✓✓✓ All checks passed! Wheels are equivalent."
|
||||
exit 0
|
||||
else
|
||||
echo "✗✗✗ Some differences found. Review output above."
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
# Set environment variables required for integration tests
|
||||
# This is used in the release workflow to set the environment variables for the integration tests
|
||||
echo "DBT_INVOCATION_ENV=github-actions" >> $GITHUB_ENV
|
||||
echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV
|
||||
echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV
|
||||
|
||||
Reference in New Issue
Block a user