Move to hatch for build tooling (#12192)

* initial hatch implmentation

* cleanup docs

* replacing makefile

* cleanup hatch commands to match adapters

reorganize more to match adapters setup

script comment

dont pip install

fix test commands

* changelog

improve changelog

* CI fix

* fix for env

* use a standard version file

* remove odd license logic

* fix bumpversion

* remove sha input

* more cleanup

* fix legacy build path

* define version for pyproject.toml

* use hatch hook for license

* remove tox

* ensure tests are split

* remove temp file for testing

* explicitly match old verion in pyproject.toml

* fix up testing

* get rid of bumpversion

* put dev_dependencies.txtin hatch

* setup.py is now dead

* set python version for local dev

* local dev fixes

* temp script to compare wheels

* parity with existing wheel builds

* Revert "temp script to compare wheels"

This reverts commit c31417a092.

* fix docker test file
This commit is contained in:
Emily Rockman
2025-12-05 21:59:44 -05:00
committed by GitHub
parent 748d352b6b
commit 97df9278c0
26 changed files with 446 additions and 433 deletions

View File

@@ -1,37 +0,0 @@
[bumpversion]
current_version = 1.12.0a1
parse = (?P<major>[\d]+) # major version number
\.(?P<minor>[\d]+) # minor version number
\.(?P<patch>[\d]+) # patch version number
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
(?P<prekind>a|b|rc) # pre-release type
(?P<num>[\d]+) # pre-release version number
)?
( # optional nightly release indicator
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
serialize =
{major}.{minor}.{patch}{prekind}{num}.{nightly}
{major}.{minor}.{patch}.{nightly}
{major}.{minor}.{patch}{prekind}{num}
{major}.{minor}.{patch}
commit = False
tag = False
[bumpversion:part:prekind]
first_value = a
optional_value = final
values =
a
b
rc
final
[bumpversion:part:num]
first_value = 1
[bumpversion:part:nightly]
[bumpversion:file:core/pyproject.toml]
search = version = "{current_version}"
replace = version = "{new_version}"

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Replace setuptools and tox with hatch for build, test, and environment management.
time: 2025-11-21T14:05:15.838252-05:00
custom:
Author: emmyoop
Issue: "12151"

View File

@@ -200,16 +200,15 @@ jobs:
- name: "Install Python Dependencies" - name: "Install Python Dependencies"
run: | run: |
python -m venv env
source env/bin/activate
python -m pip install --upgrade pip python -m pip install --upgrade pip
python -m pip install hatch
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}" - name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
run: | run: |
source env/bin/activate cd core
python -m pip install -r dev-requirements.txt hatch version ${{ needs.cleanup_changelog.outputs.next-version }}
env/bin/bumpversion --allow-dirty --new-version ${{ needs.cleanup_changelog.outputs.next-version }} major hatch run dev-req
git status dbt --version
- name: "Commit Version Bump to Branch" - name: "Commit Version Bump to Branch"
run: | run: |
@@ -250,13 +249,13 @@ jobs:
- name: "Cleanup - Remove Trailing Whitespace Via Pre-commit" - name: "Cleanup - Remove Trailing Whitespace Via Pre-commit"
continue-on-error: true continue-on-error: true
run: | run: |
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/* || true pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* || true
# this step will fail on newline errors but also correct them # this step will fail on newline errors but also correct them
- name: "Cleanup - Remove Extra Newlines Via Pre-commit" - name: "Cleanup - Remove Extra Newlines Via Pre-commit"
continue-on-error: true continue-on-error: true
run: | run: |
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/* || true pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* || true
- name: "Commit Version Bump to Branch" - name: "Commit Version Bump to Branch"
run: | run: |

View File

@@ -60,16 +60,22 @@ jobs:
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
make dev python -m pip install hatch
make dev_req cd core
mypy --version hatch run setup
dbt --version
- name: Verify dbt installation
run: |
cd core
hatch run dbt --version
- name: Run pre-commit hooks - name: Run pre-commit hooks
run: pre-commit run --all-files --show-diff-on-failure run: |
cd core
hatch run code-quality
unit: unit:
name: unit test / python ${{ matrix.python-version }} name: "unit test / python ${{ matrix.python-version }}"
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 10 timeout-minutes: 10
@@ -79,9 +85,6 @@ jobs:
matrix: matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"] python-version: ["3.10", "3.11", "3.12", "3.13"]
env:
TOXENV: "unit"
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
@@ -95,15 +98,15 @@ jobs:
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
python -m pip install tox python -m pip install hatch
tox --version hatch --version
- name: Run unit tests - name: Run unit tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3 uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with: with:
timeout_minutes: 10 timeout_minutes: 10
max_attempts: 3 max_attempts: 3
command: tox -e unit command: cd core && hatch run ci:unit-tests
- name: Get current date - name: Get current date
if: always() if: always()
@@ -156,7 +159,7 @@ jobs:
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
integration-postgres: integration-postgres:
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }} name: "(${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}"
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
timeout-minutes: 30 timeout-minutes: 30
@@ -169,7 +172,6 @@ jobs:
os: ["ubuntu-latest"] os: ["ubuntu-latest"]
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }} split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
env: env:
TOXENV: integration
DBT_INVOCATION_ENV: github-actions DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1 DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2 DBT_TEST_USER_2: dbt_test_user_2
@@ -219,17 +221,16 @@ jobs:
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
python -m pip install tox python -m pip install hatch
tox --version hatch --version
- name: Run integration tests - name: Run integration tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3 uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with: with:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
command: tox -- --ddtrace shell: bash
env: command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
- name: Get current date - name: Get current date
if: always() if: always()
@@ -265,7 +266,6 @@ jobs:
# already includes split group and runs mac + windows # already includes split group and runs mac + windows
include: ${{ fromJson(needs.integration-metadata.outputs.include) }} include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
env: env:
TOXENV: integration
DBT_INVOCATION_ENV: github-actions DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1 DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2 DBT_TEST_USER_2: dbt_test_user_2
@@ -302,17 +302,16 @@ jobs:
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
python -m pip install tox python -m pip install hatch
tox --version hatch --version
- name: Run integration tests - name: Run integration tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3 uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with: with:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
command: tox -- --ddtrace shell: bash
env: command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
- name: Get current date - name: Get current date
if: always() if: always()
@@ -369,7 +368,7 @@ jobs:
- name: Install python dependencies - name: Install python dependencies
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip install --upgrade setuptools wheel twine check-wheel-contents python -m pip install --upgrade hatch twine check-wheel-contents
python -m pip --version python -m pip --version
- name: Build distributions - name: Build distributions
@@ -378,27 +377,7 @@ jobs:
- name: Show distributions - name: Show distributions
run: ls -lh dist/ run: ls -lh dist/
- name: Check distribution descriptions - name: Check and verify distributions
run: | run: |
twine check dist/* cd core
hatch run build:check-all
- name: Check wheel contents
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- name: Install wheel distributions
run: |
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
- name: Check wheel distributions
run: |
dbt --version
- name: Install source distributions
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
run: |
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
- name: Check source distributions
run: |
dbt --version

View File

@@ -46,7 +46,7 @@ jobs:
- name: "Get Current Version Number" - name: "Get Current Version Number"
id: version-number-sources id: version-number-sources
run: | run: |
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '` current_version=$(grep '^version = ' core/dbt/__version__.py | sed 's/version = "\(.*\)"/\1/')
echo "current_version=$current_version" >> $GITHUB_OUTPUT echo "current_version=$current_version" >> $GITHUB_OUTPUT
- name: "Audit Version And Parse Into Parts" - name: "Audit Version And Parse Into Parts"

View File

@@ -22,7 +22,7 @@ on:
target_branch: target_branch:
description: "The branch to check against" description: "The branch to check against"
type: string type: string
default: 'main' default: "main"
required: true required: true
# no special access is needed # no special access is needed
@@ -48,8 +48,8 @@ jobs:
- name: Checkout dbt repo - name: Checkout dbt repo
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with: with:
path: ${{ env.DBT_REPO_DIRECTORY }} path: ${{ env.DBT_REPO_DIRECTORY }}
ref: ${{ inputs.target_branch }} ref: ${{ inputs.target_branch }}
- name: Check for changes in core/dbt/artifacts - name: Check for changes in core/dbt/artifacts
# https://github.com/marketplace/actions/paths-changes-filter # https://github.com/marketplace/actions/paths-changes-filter
@@ -72,18 +72,16 @@ jobs:
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with: with:
repository: dbt-labs/schemas.getdbt.com repository: dbt-labs/schemas.getdbt.com
ref: 'main' ref: "main"
path: ${{ env.SCHEMA_REPO_DIRECTORY }} path: ${{ env.SCHEMA_REPO_DIRECTORY }}
- name: Generate current schema - name: Generate current schema
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
run: | run: |
cd ${{ env.DBT_REPO_DIRECTORY }} cd ${{ env.DBT_REPO_DIRECTORY }}/core
python3 -m venv env pip install --upgrade pip hatch
source env/bin/activate hatch run setup
pip install --upgrade pip hatch run json-schema -- --path ${{ env.LATEST_SCHEMA_PATH }}
pip install -r dev-requirements.txt -r editable-requirements.txt
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
# Copy generated schema files into the schemas.getdbt.com repo # Copy generated schema files into the schemas.getdbt.com repo
# Do a git diff to find any changes # Do a git diff to find any changes
@@ -99,5 +97,5 @@ jobs:
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }} if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
with: with:
name: 'schema_changes.txt' name: "schema_changes.txt"
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}' path: "${{ env.SCHEMA_DIFF_ARTIFACT }}"

View File

@@ -102,8 +102,8 @@ jobs:
run: | run: |
pip install --user --upgrade pip pip install --user --upgrade pip
pip --version pip --version
pip install tox pip install hatch
tox --version hatch --version
- name: Run postgres setup script - name: Run postgres setup script
run: | run: |
@@ -123,7 +123,7 @@ jobs:
with: with:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
command: tox -e integration -- -nauto command: cd core && hatch run ci:integration-tests -- -nauto
env: env:
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}

View File

@@ -14,33 +14,33 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
branch: branch:
description: 'Branch to check out' description: "Branch to check out"
type: string type: string
required: true required: true
default: 'main' default: "main"
test_path: test_path:
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)' description: "Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)"
type: string type: string
required: true required: true
default: 'tests/functional/...' default: "tests/functional/..."
python_version: python_version:
description: 'Version of Python to Test Against' description: "Version of Python to Test Against"
type: choice type: choice
options: options:
- '3.10' - "3.10"
- '3.11' - "3.11"
os: os:
description: 'OS to run test in' description: "OS to run test in"
type: choice type: choice
options: options:
- 'ubuntu-latest' - "ubuntu-latest"
- 'macos-14' - "macos-14"
- 'windows-latest' - "windows-latest"
num_runs_per_batch: num_runs_per_batch:
description: 'Max number of times to run the test per batch. We always run 10 batches.' description: "Max number of times to run the test per batch. We always run 10 batches."
type: number type: number
required: true required: true
default: '50' default: "50"
permissions: read-all permissions: read-all
@@ -90,12 +90,19 @@ jobs:
with: with:
python-version: "${{ inputs.python_version }}" python-version: "${{ inputs.python_version }}"
- name: "Install hatch"
run: python -m pip install --user --upgrade pip hatch
- name: "Setup Dev Environment" - name: "Setup Dev Environment"
run: make dev run: |
cd core
hatch run setup
- name: "Set up postgres (linux)" - name: "Set up postgres (linux)"
if: inputs.os == '${{ vars.UBUNTU_LATEST }}' if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
run: make setup-db run: |
cd core
hatch run setup-db
# mac and windows don't use make due to limitations with docker with those runners in GitHub # mac and windows don't use make due to limitations with docker with those runners in GitHub
- name: Set up postgres (macos) - name: Set up postgres (macos)
@@ -153,5 +160,5 @@ jobs:
- name: "Error for Failures" - name: "Error for Failures"
if: ${{ steps.pytest.outputs.failure }} if: ${{ steps.pytest.outputs.failure }}
run: | run: |
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests" echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
exit 1 exit 1

2
.gitignore vendored
View File

@@ -15,6 +15,7 @@ build/
!core/dbt/docs/build !core/dbt/docs/build
develop-eggs/ develop-eggs/
dist/ dist/
dist-*/
downloads/ downloads/
eggs/ eggs/
.eggs/ .eggs/
@@ -95,6 +96,7 @@ target/
# pycharm # pycharm
.idea/ .idea/
venv/ venv/
.venv*/
# AWS credentials # AWS credentials
.aws/ .aws/

View File

@@ -20,9 +20,8 @@
- [Testing](#testing) - [Testing](#testing)
- [Initial setup](#initial-setup) - [Initial setup](#initial-setup)
- [Test commands](#test-commands) - [Test commands](#test-commands)
- [Makefile](#makefile) - [Hatch scripts](#hatch-scripts)
- [`pre-commit`](#pre-commit) - [`pre-commit`](#pre-commit)
- [`tox`](#tox)
- [`pytest`](#pytest) - [`pytest`](#pytest)
- [Unit, Integration, Functional?](#unit-integration-functional) - [Unit, Integration, Functional?](#unit-integration-functional)
- [Debugging](#debugging) - [Debugging](#debugging)
@@ -35,7 +34,7 @@
There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations). There are many ways to contribute to the ongoing development of `dbt-core`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line. The rest of this document serves as a more granular guide for contributing code changes to `dbt-core` (this repository). It is not intended as a guide for using `dbt-core`, and some pieces assume a level of familiarity with Python development and package managers. Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com). If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-development` channel in the [dbt Community Slack](https://community.getdbt.com).
@@ -74,28 +73,22 @@ There are some tools that will be helpful to you in developing locally. While th
These are the tools used in `dbt-core` development and testing: These are the tools used in `dbt-core` development and testing:
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.10, 3.11, 3.12, and 3.13 - [`hatch`](https://hatch.pypa.io/) for build backend, environment management, and running tests across Python versions (3.10, 3.11, 3.12, and 3.13)
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests - [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting - [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
- [`black`](https://github.com/psf/black) for code formatting - [`black`](https://github.com/psf/black) for code formatting
- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking - [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking
- [`pre-commit`](https://pre-commit.com) to easily run those checks - [`pre-commit`](https://pre-commit.com) to easily run those checks
- [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts - [`changie`](https://changie.dev/) to create changelog entries, without merge conflicts
- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) to run multiple setup or test steps in combination. Don't worry too much, nobody _really_ understands how `make` works, and our Makefile aims to be super simple.
- [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository - [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-core` repository
A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one. A deep understanding of these tools in not required to effectively contribute to `dbt-core`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
#### Virtual environments #### Virtual environments
We strongly recommend using virtual environments when developing code in `dbt-core`. We recommend creating this virtualenv dbt-core uses [Hatch](https://hatch.pypa.io/) for dependency and environment management. Hatch automatically creates and manages isolated environments for development, testing, and building, so you don't need to manually create virtual environments.
in the root of the `dbt-core` repository. To create a new virtualenv, run:
```sh
python3 -m venv env
source env/bin/activate
```
This will create and activate a new Python virtual environment. For more information on how Hatch manages environments, see the [Hatch environment documentation](https://hatch.pypa.io/latest/environment/).
#### Docker and `docker-compose` #### Docker and `docker-compose`
@@ -114,22 +107,42 @@ brew install postgresql
### Installation ### Installation
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies): First make sure you have Python 3.10 or later installed. Ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `hatch`. Finally set up `dbt-core` for development:
```sh ```sh
make dev cd core
hatch run setup
``` ```
or, alternatively:
This will install all development dependencies and set up pre-commit hooks.
By default, hatch will use whatever Python version is active in your environment. To specify a particular Python version, set the `HATCH_PYTHON` environment variable:
```sh ```sh
pip install -r dev-requirements.txt -r editable-requirements.txt export HATCH_PYTHON=3.12
pre-commit install hatch env create
``` ```
Or add it to your shell profile (e.g., `~/.zshrc` or `~/.bashrc`) for persistence.
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run. When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
#### Building dbt-core
dbt-core uses [Hatch](https://hatch.pypa.io/) (specifically `hatchling`) as its build backend. To build distribution packages:
```sh
cd core
hatch build
```
This will create both wheel (`.whl`) and source distribution (`.tar.gz`) files in the `dist/` directory.
The build configuration is defined in `core/pyproject.toml`. You can also use the standard `python -m build` command if you prefer.
### Running `dbt-core` ### Running `dbt-core`
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv. Once you've run `hatch run setup`, the `dbt` command will be available in your PATH. You can verify this by running `which dbt`.
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests. Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
@@ -147,9 +160,12 @@ Although `dbt-core` works with a number of different databases, you won't need t
Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database: Postgres offers the easiest way to test most `dbt-core` functionality today. They are the fastest to run, and the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database:
```sh ```sh
make setup-db cd core
hatch run setup-db
``` ```
or, alternatively:
Alternatively, you can run the setup commands directly:
```sh ```sh
docker-compose up -d database docker-compose up -d database
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh
@@ -159,33 +175,63 @@ PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/s
There are a few methods for running tests locally. There are a few methods for running tests locally.
#### Makefile #### Hatch scripts
There are multiple targets in the Makefile to run common test suites and code The primary way to run tests and checks is using hatch scripts (defined in `core/hatch.toml`):
checks, most notably:
```sh ```sh
# Runs unit tests with py38 and code checks in parallel. cd core
make test
# Runs postgres integration tests with py38 in "fail fast" mode.
make integration
```
> These make targets assume you have a local installation of a recent version of [`tox`](https://tox.readthedocs.io/en/latest/) for unit/integration testing and pre-commit for code quality checks,
> unless you use choose a Docker container to run tests. Run `make help` for more info.
Check out the other targets in the Makefile to see other commonly used test # Run all unit tests
suites. hatch run unit-tests
# Run unit tests and all code quality checks
hatch run test
# Run integration tests
hatch run integration-tests
# Run integration tests in fail-fast mode
hatch run integration-tests-fail-fast
# Run linting checks only
hatch run lint
hatch run flake8
hatch run mypy
hatch run black
# Run all pre-commit hooks
hatch run code-quality
# Clean build artifacts
hatch run clean
```
Hatch manages isolated environments and dependencies automatically. The commands above use the `default` environment which is recommended for most local development.
**Using the `ci` environment (optional)**
If you need to replicate exactly what runs in GitHub Actions (e.g., with coverage reporting), use the `ci` environment:
```sh
cd core
# Run unit tests with coverage
hatch run ci:unit-tests
# Run unit tests with a specific Python version
hatch run +py=3.11 ci:unit-tests
```
> **Note:** Most developers should use the default environment (`hatch run unit-tests`). The `ci` environment is primarily for debugging CI failures or running tests with coverage.
#### `pre-commit` #### `pre-commit`
[`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `make dev` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This command installs several pip executables including black, mypy, and flake8. Once this is done you can use any of the linter-based make targets as well as a git pre-commit hook that will ensure proper formatting and linting.
#### `tox` [`pre-commit`](https://pre-commit.com) takes care of running all code-checks for formatting and linting. Run `hatch run setup` to install `pre-commit` in your local environment (we recommend running this command with a python virtual environment active). This installs several pip executables including black, mypy, and flake8. Once installed, hooks will run automatically on `git commit`, or you can run them manually with `hatch run code-quality`.
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
#### `pytest` #### `pytest`
Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv active and dev dependencies installed you can do things like: Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. After running `hatch run setup`, you can run pytest commands like:
```sh ```sh
# run all unit tests in a file # run all unit tests in a file

View File

@@ -47,7 +47,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V
&& tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \
&& rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz
RUN pip3 install -U tox wheel six setuptools pre-commit RUN pip3 install -U hatch wheel pre-commit
# These args are passed in via docker-compose, which reads then from the .env file. # These args are passed in via docker-compose, which reads then from the .env file.
# On Linux, run `make .env` to create the .env file for the current user. # On Linux, run `make .env` to create the .env file for the current user.
@@ -62,7 +62,6 @@ RUN if [ ${USER_ID:-0} -ne 0 ] && [ ${GROUP_ID:-0} -ne 0 ]; then \
useradd -mU -l dbt_test_user; \ useradd -mU -l dbt_test_user; \
fi fi
RUN mkdir /usr/app && chown dbt_test_user /usr/app RUN mkdir /usr/app && chown dbt_test_user /usr/app
RUN mkdir /home/tox && chown dbt_test_user /home/tox
WORKDIR /usr/app WORKDIR /usr/app
VOLUME /usr/app VOLUME /usr/app

1
LICENSE Symbolic link
View File

@@ -0,0 +1 @@
core/LICENSE

163
Makefile
View File

@@ -1,146 +1,95 @@
# ============================================================================
# DEPRECATED: This Makefile is maintained for backwards compatibility only.
#
# dbt-core now uses Hatch for task management and development workflows.
# Please migrate to using hatch commands directly:
#
# make dev → cd core && hatch run setup
# make unit → cd core && hatch run unit-tests
# make test → cd core && hatch run test
# make integration → cd core && hatch run integration-tests
# make lint → cd core && hatch run lint
# make code_quality → cd core && hatch run code-quality
# make setup-db → cd core && hatch run setup-db
# make clean → cd core && hatch run clean
#
# See core/pyproject.toml [tool.hatch.envs.default.scripts] for all available
# commands and CONTRIBUTING.md for detailed usage instructions.
#
# This Makefile will be removed in a future version of dbt-core.
# ============================================================================
.DEFAULT_GOAL:=help .DEFAULT_GOAL:=help
# Optional flag to run target in a docker container.
# (example `make test USE_DOCKER=true`)
ifeq ($(USE_DOCKER),true)
DOCKER_CMD := docker-compose run --rm test
endif
#
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
# with any ENV_VAR overrides required by your test environment, e.g.
# DBT_TEST_USER_1=user
# LOG_DIR="dir with a space in it"
#
# Warn: Restrict each line to one variable only.
#
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
include ./makefile.test.env
endif
CI_FLAGS =\
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
.PHONY: dev_req .PHONY: dev_req
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies. dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
@\ @cd core && hatch run dev-req
pip install -r dev-requirements.txt -r editable-requirements.txt
.PHONY: dev .PHONY: dev
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit. dev: ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
@\ @cd core && hatch run setup
$(DOCKER_CMD) pre-commit install
.PHONY: dev-uninstall .PHONY: dev-uninstall
dev-uninstall: ## Uninstall all packages in venv except for build tools dev-uninstall: ## Uninstall all packages in venv except for build tools
@\ @pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \
pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \ pip uninstall -y dbt-core
pip uninstall -y dbt-core
.PHONY: mypy .PHONY: mypy
mypy: .env ## Runs mypy against staged changes for static type checking. mypy: ## Runs mypy against staged changes for static type checking.
@\ @cd core && hatch run mypy
$(DOCKER_CMD) pre-commit run --hook-stage manual mypy-check | grep -v "INFO"
.PHONY: flake8 .PHONY: flake8
flake8: .env ## Runs flake8 against staged changes to enforce style guide. flake8: ## Runs flake8 against staged changes to enforce style guide.
@\ @cd core && hatch run flake8
$(DOCKER_CMD) pre-commit run --hook-stage manual flake8-check | grep -v "INFO"
.PHONY: black .PHONY: black
black: .env ## Runs black against staged changes to enforce style guide. black: ## Runs black against staged changes to enforce style guide.
@\ @cd core && hatch run black
$(DOCKER_CMD) pre-commit run --hook-stage manual black-check -v | grep -v "INFO"
.PHONY: lint .PHONY: lint
lint: .env ## Runs flake8 and mypy code checks against staged changes. lint: ## Runs flake8 and mypy code checks against staged changes.
@\ @cd core && hatch run lint
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO" .PHONY: code_quality
code_quality: ## Runs all pre-commit hooks against all files.
@cd core && hatch run code-quality
.PHONY: unit .PHONY: unit
unit: .env ## Runs unit tests with py unit: ## Runs unit tests with py
@\ @cd core && hatch run unit-tests
$(DOCKER_CMD) tox -e py
.PHONY: test .PHONY: test
test: .env ## Runs unit tests with py and code checks against staged changes. test: ## Runs unit tests with py and code checks against staged changes.
@\ @cd core && hatch run test
$(DOCKER_CMD) tox -e py; \
$(DOCKER_CMD) pre-commit run black-check --hook-stage manual | grep -v "INFO"; \
$(DOCKER_CMD) pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \
$(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO"
.PHONY: integration .PHONY: integration
integration: .env ## Runs core integration tests using postgres with py-integration integration: ## Runs core integration tests using postgres with py-integration
@\ @cd core && hatch run integration-tests
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
.PHONY: integration-fail-fast .PHONY: integration-fail-fast
integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode. integration-fail-fast: ## Runs core integration tests using postgres with py-integration in "fail fast" mode.
@\ @cd core && hatch run integration-tests-fail-fast
$(DOCKER_CMD) tox -e py-integration -- -x -nauto
.PHONY: interop
interop: clean
@\
mkdir $(LOG_DIR) && \
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
.PHONY: setup-db .PHONY: setup-db
setup-db: ## Setup Postgres database with docker-compose for system testing. setup-db: ## Setup Postgres database with docker-compose for system testing.
@\ @cd core && hatch run setup-db
docker compose up -d database && \
PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres SKIP_HOMEBREW=true bash test/setup_db.sh
# This rule creates a file named .env that is used by docker-compose for passing
# the USER_ID and GROUP_ID arguments to the Docker image.
.env: ## Setup step for using using docker-compose with make target.
@touch .env
ifneq ($(OS),Windows_NT)
ifneq ($(shell uname -s), Darwin)
@echo USER_ID=$(shell id -u) > .env
@echo GROUP_ID=$(shell id -g) >> .env
endif
endif
.PHONY: clean .PHONY: clean
clean: ## Resets development environment. clean: ## Resets development environment.
@echo 'cleaning repo...' @cd core && hatch run clean
@rm -f .coverage
@rm -f .coverage.*
@rm -rf .eggs/
@rm -f .env
@rm -rf .tox/
@rm -rf build/
@rm -rf dbt.egg-info/
@rm -f dbt_project.yml
@rm -rf dist/
@rm -f htmlcov/*.{css,html,js,json,png}
@rm -rf logs/
@rm -rf target/
@find . -type f -name '*.pyc' -delete
@find . -type d -name '__pycache__' -depth -delete
@echo 'done.'
.PHONY: json_schema
json_schema: ## Update generated JSON schema using code changes.
@cd core && hatch run json-schema
.PHONY: help .PHONY: help
help: ## Show this help message. help: ## Show this help message.
@echo 'usage: make [target] [USE_DOCKER=true]' @echo 'usage: make [target]'
@echo
@echo 'DEPRECATED: This Makefile is a compatibility shim.'
@echo 'Please use "cd core && hatch run <command>" directly.'
@echo @echo
@echo 'targets:' @echo 'targets:'
@grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' @grep -E '^[8+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@echo @echo
@echo 'options:' @echo 'For more information, see CONTRIBUTING.md'
@echo 'use USE_DOCKER=true to run target in a docker container'
.PHONY: json_schema
json_schema: ## Update generated JSON schema using code changes.
scripts/collect-artifact-schema.py --path schemas

View File

@@ -1,4 +0,0 @@
recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore
include dbt/py.typed
recursive-include dbt/task/docs *.html
recursive-include dbt/jsonschemas *.json

1
core/dbt/__version__.py Normal file
View File

@@ -0,0 +1 @@
version = "1.12.0a1"

View File

@@ -3,14 +3,13 @@ import importlib
import importlib.util import importlib.util
import json import json
import os import os
import re
from importlib import metadata as importlib_metadata from importlib import metadata as importlib_metadata
from pathlib import Path
from typing import Iterator, List, Optional, Tuple from typing import Iterator, List, Optional, Tuple
import requests import requests
import dbt_common.semver as semver import dbt_common.semver as semver
from dbt.__version__ import version as __version_string
from dbt_common.ui import green, yellow from dbt_common.ui import green, yellow
PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json" PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json"
@@ -233,16 +232,8 @@ def _resolve_version() -> str:
try: try:
return importlib_metadata.version("dbt-core") return importlib_metadata.version("dbt-core")
except importlib_metadata.PackageNotFoundError: except importlib_metadata.PackageNotFoundError:
pyproject_path = Path(__file__).resolve().parents[1] / "pyproject.toml" # When running from source (not installed), use version from __version__.py
if not pyproject_path.exists(): return __version_string
raise RuntimeError("Unable to locate pyproject.toml to determine dbt-core version")
text = pyproject_path.read_text(encoding="utf-8")
match = re.search(r'^version\s*=\s*"(?P<version>[^"]+)"', text, re.MULTILINE)
if match:
return match.group("version")
raise RuntimeError("Unable to determine dbt-core version from pyproject.toml")
__version__ = _resolve_version() __version__ = _resolve_version()

206
core/hatch.toml Normal file
View File

@@ -0,0 +1,206 @@
[version]
path = "dbt/__version__.py"
[build.targets.wheel]
packages = ["dbt"]
only-packages = true
exclude = [
"**/*.md",
]
artifacts = [
"dbt/include/**/*.py",
"dbt/include/**/*.sql",
"dbt/include/**/*.yml",
"dbt/include/**/*.html",
"dbt/include/**/*.md",
"dbt/include/**/.gitkeep",
"dbt/include/**/.gitignore",
"dbt/task/docs/**/*.html",
"dbt/jsonschemas/**/*.json",
"dbt/py.typed",
# Directories without __init__.py (namespace packages)
"dbt/artifacts/resources/v1/**/*.py",
"dbt/artifacts/utils/**/*.py",
"dbt/event_time/**/*.py",
"dbt/docs/source/**/*.py",
"dbt/tests/util.py",
]
[build.targets.sdist]
include = [
"/dbt",
"/README.md",
]
[build.targets.sdist.force-include]
"dbt/task/docs/index.html" = "dbt/task/docs/index.html"
[envs.default]
# Python 3.10-3.11 required locally due to flake8==4.0.1 compatibility
# CI uses [envs.ci] which doesn't set python, allowing matrix testing
python = "3.11"
dependencies = [
# Git dependencies for development against main branches
"dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters",
"dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter",
"dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@main",
"dbt-postgres @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres",
# Code quality
"pre-commit~=3.7.0",
"black>=24.3,<25.0",
"flake8==4.0.1", # requires python <3.12
"mypy==1.4.1", # update requires code fixes
"isort==5.13.2",
# Testing
"pytest>=7.0,<8.0",
"pytest-xdist~=3.6",
"pytest-csv~=3.0",
"pytest-cov",
"pytest-dotenv",
"pytest-mock",
"pytest-split",
"pytest-logbook~=1.2",
"logbook<1.9",
"flaky",
"freezegun>=1.5.1",
"hypothesis",
"mocker",
# Debugging
"ipdb",
"ddtrace==2.21.3",
# Documentation
"docutils",
"sphinx",
# Type stubs
"types-docutils",
"types-PyYAML",
"types-Jinja2",
"types-jsonschema",
"types-mock",
"types-protobuf>=5.0,<6.0",
"types-python-dateutil",
"types-pytz",
"types-requests",
"types-setuptools",
# Other
"pip-tools",
"protobuf>=6.0,<7.0",
]
[envs.default.scripts]
# Setup commands
setup = [
"pip install -e .",
"pre-commit install",
]
# Code quality commands
code-quality = "pre-commit run --all-files --show-diff-on-failure"
lint = [
"pre-commit run flake8-check --hook-stage manual --all-files",
"pre-commit run mypy-check --hook-stage manual --all-files",
]
flake8 = "pre-commit run flake8-check --hook-stage manual --all-files"
mypy = "pre-commit run mypy-check --hook-stage manual --all-files"
black = "pre-commit run black-check --hook-stage manual --all-files"
# Testing commands
unit-tests = "python -m pytest {args} ../tests/unit"
integration-tests = "python -m pytest -nauto {args} ../tests/functional"
integration-tests-fail-fast = "python -m pytest -x -nauto {args} ../tests/functional"
test = [
"python -m pytest ../tests/unit",
"pre-commit run black-check --hook-stage manual --all-files",
"pre-commit run flake8-check --hook-stage manual --all-files",
"pre-commit run mypy-check --hook-stage manual --all-files",
]
# Database setup
setup-db = [
"docker compose up -d database",
"bash ../test/setup_db.sh",
]
# Utility commands
clean = [
"rm -f .coverage",
"rm -f .coverage.*",
"rm -rf .eggs/",
"rm -rf build/",
"rm -rf dbt.egg-info/",
"rm -f dbt_project.yml",
"rm -rf dist/",
"find . -type f -name '*.pyc' -delete",
"find . -type d -name __pycache__ -exec rm -rf {} +",
]
json-schema = "python ../scripts/collect-artifact-schema.py --path ../schemas"
[envs.build]
python = "3.11"
detached = true
dependencies = [
"wheel",
"twine",
"check-wheel-contents",
]
[envs.build.scripts]
check-all = [
"- check-wheel",
"- check-sdist",
]
check-wheel = [
"twine check dist/*",
"find ./dist/dbt_core-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
"pip freeze | grep dbt-core",
"dbt --version",
]
check-sdist = [
"check-wheel-contents dist/*.whl --ignore W007,W008",
"find ./dist/dbt_core-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
"pip freeze | grep dbt-core",
"dbt --version",
]
# CI environment - isolated environment with test dependencies
[envs.ci]
dependencies = [
# Git dependencies for development against main branches
"dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters",
"dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter",
"dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@main",
"dbt-postgres @ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres",
# Testing
"pytest>=7.0,<8.0",
"pytest-cov",
"pytest-xdist~=3.6",
"pytest-csv~=3.0",
"pytest-dotenv",
"pytest-mock",
"pytest-split",
"ddtrace==2.21.3",
"flaky",
"freezegun>=1.5.1",
"hypothesis",
]
pre-install-commands = [
"pip install -e .",
]
[envs.ci.env-vars]
DBT_TEST_USER_1 = "dbt_test_user_1"
DBT_TEST_USER_2 = "dbt_test_user_2"
DBT_TEST_USER_3 = "dbt_test_user_3"
[envs.ci.scripts]
unit-tests = "python -m pytest --cov=dbt --cov-report=xml {args} ../tests/unit"
# Run as single command to avoid pre-install-commands running twice
integration-tests = """
python -m pytest --cov=dbt --cov-append --cov-report=xml {args} ../tests/functional -k "not tests/functional/graph_selection" && \
python -m pytest --cov=dbt --cov-append --cov-report=xml {args} ../tests/functional/graph_selection
"""
# Note: Python version matrix is handled by GitHub Actions CI, not hatch.
# This avoids running tests 4x per job. The CI sets up the Python version
# and hatch uses whatever Python is active.

View File

@@ -1,38 +1,12 @@
[tool.setuptools]
package-dir = {"" = "."}
include-package-data = true
zip-safe = false
[tool.setuptools.packages.find]
where = ["."]
include = [
"dbt",
"dbt.*",
]
# this needs to match MANIFEST.in for the wheels
[tool.setuptools.package-data]
"dbt" = [
"include/**/*.py",
"include/**/*.sql",
"include/**/*.yml",
"include/**/*.html",
"include/**/*.md",
"include/**/.gitkeep",
"include/**/.gitignore",
"task/docs/**/*.html",
"jsonschemas/**/*.json",
"py.typed",
]
[project] [project]
name = "dbt-core" name = "dbt-core"
version = "1.12.0a1" dynamic = ["version"]
description = "With dbt, data analysts and engineers can build analytics the way engineers build applications." description = "With dbt, data analysts and engineers can build analytics the way engineers build applications."
readme = "README.md" readme = "README.md"
requires-python = ">=3.10" requires-python = ">=3.10"
license = "Apache-2.0" license = "Apache-2.0"
license-files = ["License.md"] # License.md copied to core/ by build script even though it lives at the root by convention license-files = { globs = ["LICENSE"] }
keywords = [] keywords = []
authors = [ authors = [
{ name = "dbt Labs", email = "info@dbtlabs.com" }, { name = "dbt Labs", email = "info@dbtlabs.com" },
@@ -102,9 +76,10 @@ Changelog = "https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md"
[project.scripts] [project.scripts]
dbt = "dbt.cli.main:cli" dbt = "dbt.cli.main:cli"
[tool.hatch.version]
path = "dbt/__version__.py"
[build-system] [build-system]
requires = [ requires = ["hatchling"]
"setuptools>=61", build-backend = "hatchling.build"
"wheel",
]
build-backend = "setuptools.build_meta"

View File

@@ -1,26 +0,0 @@
#!/usr/bin/env python
"""Legacy setuptools shim retained for compatibility with existing workflows. Will be removed in a future version."""
from setuptools import setup
# the user has a downlevel version of setuptools.
# ----
# dbt-core uses these packages deeply, throughout the codebase, and there have been breaking changes in past patch releases (even though these are major-version-one).
# Pin to the patch or minor version, and bump in each new minor version of dbt-core.
# ----
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
# with major versions in each new minor version of dbt-core.
# ----
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
# and check compatibility / bump in each new minor version of dbt-core.
# ----
# These are major-version-0 packages also maintained by dbt-labs.
# Accept patches but avoid automatically updating past a set minor version range.
# Minor versions for these are expected to be backwards-compatible
# ----
# Expect compatibility with all new versions of these packages, so lower bounds only.
# ----
if __name__ == "__main__":
setup()

View File

@@ -1,38 +0,0 @@
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-adapters
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter
git+https://github.com/dbt-labs/dbt-common.git@main
git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-postgres
black==24.3.0
bumpversion
ddtrace==2.21.3
docutils
flake8==4.0.1
flaky
freezegun>=1.5.1
hypothesis
ipdb
isort==5.13.2
mypy==1.4.1
pip-tools
pre-commit
protobuf>=6.0,<7.0
pytest>=7.4,<8.0
pytest-cov
pytest-csv>=3.0,<4.0
pytest-dotenv
pytest-mock
pytest-split
pytest-xdist
sphinx
tox>=3.13
types-docutils
types-PyYAML
types-Jinja2
types-jsonschema
types-mock
types-protobuf>=5.0,<6.0
types-python-dateutil
types-pytz
types-requests
types-setuptools
mocker

View File

@@ -23,7 +23,7 @@ services:
# Run `make .env` to set $USER_ID and $GROUP_ID # Run `make .env` to set $USER_ID and $GROUP_ID
USER_ID: ${USER_ID:-} USER_ID: ${USER_ID:-}
GROUP_ID: ${GROUP_ID:-} GROUP_ID: ${GROUP_ID:-}
command: "/root/.virtualenvs/dbt/bin/pytest" command: "bash -c 'cd core && hatch run ci:unit-tests'"
environment: environment:
POSTGRES_TEST_HOST: "database" POSTGRES_TEST_HOST: "database"
volumes: volumes:

View File

@@ -1 +0,0 @@
-e ./core

View File

@@ -17,15 +17,11 @@ rm -rf "$DBT_PATH"/core/build
mkdir -p "$DBT_PATH"/dist mkdir -p "$DBT_PATH"/dist
# Copy License.md to core/ for inclusion in distribution (required by Apache 2.0)
# The license-files in pyproject.toml references it relative to core/
cp "$DBT_PATH"/License.md "$DBT_PATH"/core/License.md
cd "$DBT_PATH"/core cd "$DBT_PATH"/core
$PYTHON_BIN -m pip install --upgrade build $PYTHON_BIN -m pip install --upgrade hatch
$PYTHON_BIN -m build --outdir "$DBT_PATH/dist" hatch build --clean
# Clean up License.md that was copied to core/ for build # Move built distributions to top-level dist/
rm -f "$DBT_PATH/core/License.md" mv "$DBT_PATH"/core/dist/* "$DBT_PATH"/dist/
set +x set +x

View File

@@ -1,5 +1,6 @@
#!/bin/bash #!/bin/bash
# Set environment variables required for integration tests # Set environment variables required for integration tests
# This is used in the release workflow to set the environment variables for the integration tests
echo "DBT_INVOCATION_ENV=github-actions" >> $GITHUB_ENV echo "DBT_INVOCATION_ENV=github-actions" >> $GITHUB_ENV
echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV
echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV

37
tox.ini
View File

@@ -1,37 +0,0 @@
[tox]
skipsdist = True
envlist = unit,integration
[testenv:{unit,py38,py39,py310,py311,py}]
description = unit testing
download = true
skip_install = true
passenv =
DBT_*
PYTEST_ADDOPTS
commands =
{envpython} -m pytest --cov=core --cov-report=xml {posargs} tests/unit
deps =
-rdev-requirements.txt
-reditable-requirements.txt
[testenv:{integration,py38-integration,py39-integration,py310-integration,py311-integration,py-integration}]
description = functional testing
download = true
skip_install = true
passenv =
DBT_*
POSTGRES_TEST_*
PYTEST_ADDOPTS
DD_CIVISIBILITY_AGENTLESS_ENABLED
DD_API_KEY
DD_SITE
DD_ENV
DD_SERVICE
commands =
{envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/functional -k "not tests/functional/graph_selection"
{envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/functional/graph_selection
deps =
-rdev-requirements.txt
-reditable-requirements.txt