Compare commits

..

12 Commits

Author SHA1 Message Date
Emily Rockman
d1f57566a9 use make 2025-03-04 11:19:37 -06:00
Peter Allen Webb
e6a0a1a86a Remove source indicator. 2025-03-04 11:43:12 -05:00
Peter Allen Webb
e8d10ea3c3 Add source indicator. 2025-03-04 11:33:06 -05:00
Peter Allen Webb
c6a18a3fb0 Change script invocation path. 2025-03-04 11:23:41 -05:00
Peter Allen Webb
18ea5d1c73 More debug logging. 2025-03-04 11:15:50 -05:00
Peter Allen Webb
0e5dc412c6 Set execute bit on scripts. 2025-03-04 11:11:44 -05:00
Peter Allen Webb
463bb6c1d0 Add debug logging. 2025-03-04 11:07:25 -05:00
Peter Allen Webb
f1fc49ba8c Add sudos. 2025-03-04 10:57:27 -05:00
Peter Allen Webb
2e4eccb55c Change owner of db creation script so postgres can run it. 2025-03-04 10:53:56 -05:00
Emily Rockman
0e5761dbbb try postgres update 2025-03-04 08:58:06 -06:00
Emily Rockman
8c3b1799a7 updates to ubuntu-latest instead 2025-03-04 08:43:31 -06:00
Emily Rockman
466ee24b86 update ubuntu 20.04 to 24.04 2025-03-04 08:35:25 -06:00
4602 changed files with 76687 additions and 58664 deletions

37
.bumpversion.cfg Normal file
View File

@@ -0,0 +1,37 @@
[bumpversion]
current_version = 1.10.0a1
parse = (?P<major>[\d]+) # major version number
\.(?P<minor>[\d]+) # minor version number
\.(?P<patch>[\d]+) # patch version number
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
(?P<prekind>a|b|rc) # pre-release type
(?P<num>[\d]+) # pre-release version number
)?
( # optional nightly release indicator
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
serialize =
{major}.{minor}.{patch}{prekind}{num}.{nightly}
{major}.{minor}.{patch}.{nightly}
{major}.{minor}.{patch}{prekind}{num}
{major}.{minor}.{patch}
commit = False
tag = False
[bumpversion:part:prekind]
first_value = a
optional_value = final
values =
a
b
rc
final
[bumpversion:part:num]
first_value = 1
[bumpversion:part:nightly]
[bumpversion:file:core/setup.py]
[bumpversion:file:core/dbt/version.py]

View File

@@ -3,9 +3,6 @@
For information on prior major and minor releases, see their changelogs: For information on prior major and minor releases, see their changelogs:
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md) * [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md) * [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md) * [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)

View File

@@ -0,0 +1,6 @@
kind: Breaking Changes
body: Add invocations_started_at field to artifact metadata
time: 2025-02-10T12:33:06.722803-05:00
custom:
Author: gshank
Issue: "11272"

View File

@@ -0,0 +1,6 @@
kind: Dependencies
body: Upgrading dbt-semantic-interfaces to 0.8.3 for custom grain support in offset windows
time: 2024-11-12T16:38:15.351519-05:00
custom:
Author: WilliamDee
Issue: None

View File

@@ -0,0 +1,6 @@
kind: "Dependencies"
body: "Bump codecov/codecov-action from 4 to 5"
time: 2024-11-18T00:11:13.00000Z
custom:
Author: dependabot[bot]
Issue: 11009

View File

@@ -1,6 +0,0 @@
kind: Dependencies
body: Use EventCatcher from dbt-common instead of maintaining a local copy
time: 2025-11-18T15:53:54.284561+05:30
custom:
Author: 3loka
Issue: "12124"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Add new hard_deletes="new_record" mode for snapshots.
time: 2024-11-04T12:00:53.95191-05:00
custom:
Author: peterallenwebb
Issue: "10235"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Add `batch` context object to model jinja context
time: 2024-11-21T12:56:30.715473-06:00
custom:
Author: QMalcolm
Issue: "11025"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Ensure pre/post hooks only run on first/last batch respectively for microbatch
model batches
time: 2024-12-06T19:53:08.928793-06:00
custom:
Author: MichelleArk QMalcolm
Issue: 11094 11104

View File

@@ -0,0 +1,6 @@
kind: Features
body: Support "tags" in Saved Queries
time: 2024-12-16T09:54:35.327675-08:00
custom:
Author: theyostalservice
Issue: "11155"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Calculate source freshness via a SQL query
time: 2024-12-17T17:16:31.841076-08:00
custom:
Author: ChenyuLInx
Issue: "8797"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Add freshness definition on model for adaptive job
time: 2024-12-18T17:07:29.55754-08:00
custom:
Author: ChenyuLInx
Issue: "11123"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Meta config for dimensions measures and entities
time: 2025-01-06T13:28:29.176439-06:00
custom:
Author: DevonFulcher
Issue: None

View File

@@ -0,0 +1,6 @@
kind: Features
body: Add doc_blocks to manifest for nodes and columns
time: 2025-01-22T17:03:28.866522Z
custom:
Author: aranke
Issue: 11000 11001

View File

@@ -0,0 +1,6 @@
kind: Features
body: Initial implementation of sample mode
time: 2025-02-02T14:00:54.074209-06:00
custom:
Author: QMalcolm
Issue: 11227 11230 11231 11248 11252 11254 11258

View File

@@ -0,0 +1,6 @@
kind: Features
body: Combine `--sample` and `--sample-window` CLI params
time: 2025-02-12T15:56:58.546879-06:00
custom:
Author: QMalcolm
Issue: "11299"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Allow for sampling of ref'd seeds
time: 2025-02-12T17:37:43.554156-06:00
custom:
Author: QMalcolm
Issue: "11300"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Enable sample mode for 'build' command
time: 2025-02-13T18:29:32.238857-06:00
custom:
Author: QMalcolm
Issue: "11298"

View File

@@ -0,0 +1,6 @@
kind: Features
body: Allow sampling nodes snapshots depend on and of snapshots as a dependency
time: 2025-02-14T15:29:57.118017-06:00
custom:
Author: QMalcolm
Issue: "11301"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Support partial parsing for function nodes
time: 2025-10-06T14:03:52.258104-05:00
custom:
Author: QMalcolm
Issue: "12072"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Allow for defining funciton arguments with default values
time: 2025-11-17T14:10:53.860178-06:00
custom:
Author: QMalcolm
Issue: "12044"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Raise jsonschema-based deprecation warnings by default
time: 2025-12-01T16:52:09.354436-05:00
custom:
Author: michelleark
Issue: 12240

View File

@@ -1,6 +0,0 @@
kind: Features
body: ':bug: :snowman: Disable unit tests whose model is disabled'
time: 2025-12-03T12:29:26.209248-05:00
custom:
Author: michelleark
Issue: "10540"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Implement config.meta_get and config.meta_require
time: 2025-12-10T20:20:01.354288-05:00
custom:
Author: gshank
Issue: "12012"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: dbt retry does not respect --threads
time: 2024-08-22T12:21:32.358066+05:30
custom:
Author: donjin-master
Issue: "10584"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: update adapter version messages
time: 2024-10-25T10:43:39.274723-05:00
custom:
Author: dave-connors-3
Issue: "10230"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Catch DbtRuntimeError for hooks
time: 2024-11-21T18:17:39.753235Z
custom:
Author: aranke
Issue: "11012"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Access DBUG flag more consistently with the rest of the codebase in ManifestLoader
time: 2024-11-28T16:29:36.236729+01:00
custom:
Author: Threynaud
Issue: "11068"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Improve the performance characteristics of add_test_edges()
time: 2024-12-04T10:04:29.096231-05:00
custom:
Author: peterallenwebb
Issue: "10950"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Implement partial parsing for singular data test configs in yaml files
time: 2024-12-05T14:53:07.295536-05:00
custom:
Author: gshank
Issue: "10801"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix debug log messages for microbatch batch execution information
time: 2024-12-09T11:38:06.972743-06:00
custom:
Author: MichelleArk QMalcolm
Issue: "11111"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix running of extra "last" batch when there is only one batch
time: 2024-12-09T13:33:17.253326-06:00
custom:
Author: QMalcolm
Issue: "11112"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix interpretation of `PartialSuccess` to result in non-zero exit code
time: 2024-12-09T15:07:11.391313-06:00
custom:
Author: QMalcolm
Issue: "11114"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Warn about invalid usages of `concurrent_batches` config
time: 2024-12-12T11:36:11.451962-06:00
custom:
Author: QMalcolm
Issue: "11122"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Error writing generic test at run time
time: 2024-12-16T13:46:45.936573-05:00
custom:
Author: gshank
Issue: "11110"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Run check_modified_contract for state:modified
time: 2024-12-17T15:48:48.053054-05:00
custom:
Author: gshank
Issue: "11034"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix unrendered_config for tests from dbt_project.yml
time: 2024-12-18T11:26:40.270022-05:00
custom:
Author: gshank
Issue: "11146"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Make partial parsing reparse referencing nodes of newly versioned models.
time: 2025-01-02T14:05:43.629959-05:00
custom:
Author: d-cole
Issue: "8872"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Ensure warning about microbatch lacking filter inputs is always fired
time: 2025-01-07T17:37:19.373261-06:00
custom:
Author: QMalcolm
Issue: "11159"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix microbatch dbt list --output json
time: 2025-01-09T12:33:09.958795+01:00
custom:
Author: internetcoffeephone
Issue: 10556 11098

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix for custom fields in generic test config for not_null and unique tests
time: 2025-01-10T15:58:24.479245-05:00
custom:
Author: gshank
Issue: "11208"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Loosen validation on freshness to accomodate previously wrong but harmless config.
time: 2025-01-28T13:55:09.318833-08:00
custom:
Author: ChenyuLInx peterallenwebb
Issue: "11123"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Handle `--limit -1` properly in `ShowTaskDirect` so that it propagates None instead of a negative int
time: 2025-02-07T13:14:24.725503-05:00
custom:
Author: WilliamDee
Issue: None

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: _get_doc_blocks is crashing parsing if .format is called
time: 2025-02-18T13:47:45.659731Z
custom:
Author: aranke
Issue: "11310"

View File

@@ -0,0 +1,6 @@
kind: Fixes
body: Fix microbatch execution to not block main thread nor hang
time: 2025-03-03T13:14:40.432874-06:00
custom:
Author: QMalcolm
Issue: 11243 11306

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Address Click 8.2+ deprecation warning
time: 2025-09-22T15:17:26.983151-06:00
custom:
Author: edgarrmondragon
Issue: "12038"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Include macros in unit test parsing
time: 2025-11-17T14:06:49.518566-05:00
custom:
Author: michelleark nathanskone
Issue: "10157"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Allow dbt deps to run when vars lack defaults in dbt_project.yml
time: 2025-11-17T18:50:25.759091+05:30
custom:
Author: 3loka
Issue: "8913"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names`
time: 2025-11-18T17:11:06.454784-05:00
custom:
Author: michelleark
Issue: "12152"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Allow the usage of `function` with `--exclude-resource-type` flag
time: 2025-11-19T19:50:34.703236-06:00
custom:
Author: QMalcolm
Issue: "12143"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix bug where schemas of functions weren't guaranteed to exist
time: 2025-11-24T15:56:29.467004-06:00
custom:
Author: QMalcolm
Issue: "12142"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix generation of deprecations summary
time: 2025-11-24T15:57:56.544123-08:00
custom:
Author: asiunov
Issue: "12146"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Correctly reference foreign key references when --defer and --state provided'
time: 2025-11-24T17:08:55.387946-05:00
custom:
Author: michellark
Issue: "11885"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Add exception when using --state and referring to a removed
test'
time: 2025-11-25T12:02:46.635026-05:00
custom:
Author: emmyoop
Issue: "10630"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command'
time: 2025-11-25T12:20:20.132379-06:00
custom:
Author: QMalcolm
Issue: "11627"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ":bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements"
time: 2025-11-27T14:13:08.082542-05:00
custom:
Author: emmyoop
Issue: "12049"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g.,
varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo'
time: 2025-11-27T14:59:29.256274-05:00
custom:
Author: emmyoop
Issue: "11186"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Support unit testing models that depend on sources with the same name'
time: 2025-11-27T17:01:24.193516-05:00
custom:
Author: michelleark
Issue: 11975 10433

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test
time: 2025-11-28T10:21:29.911147Z
custom:
Author: mattogburke
Issue: "12223"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Avoid retrying successful run-operation commands'
time: 2025-11-28T12:28:38.546261-05:00
custom:
Author: michelleark
Issue: "11850"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned:
false`'
time: 2025-11-28T16:19:37.608722-05:00
custom:
Author: emmyoop
Issue: "9104"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better
cross-source matching and word boundaries'
time: 2025-11-28T16:31:44.344099-05:00
custom:
Author: emmyoop
Issue: "12239"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs'
time: 2025-12-02T13:37:05.012112-05:00
custom:
Author: michelleark
Issue: "12244"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Ensure recent deprecation warnings include event name in message
time: 2025-12-09T17:50:31.334618-06:00
custom:
Author: QMalcolm
Issue: "12264"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Improve error message clarity when detecting nodes with space in name
time: 2025-12-10T14:39:35.107841-08:00
custom:
Author: michelleark
Issue: "11835"

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Create a no-op exposure runner
time: 2024-12-02T16:47:15.766574Z
custom:
Author: aranke
Issue: ' '

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Improve selection peformance by optimizing the select_children() and select_parents()
functions.
time: 2024-12-05T14:31:44.584216-05:00
custom:
Author: peterallenwebb
Issue: "11099"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Change exception type from DbtInternalException to UndefinedMacroError when
macro not found in 'run operation' command
time: 2025-01-07T12:39:55.234321-05:00
custom:
Author: michelleark
Issue: "11192"

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Create LogNodeResult event
time: 2025-01-07T20:58:38.821036Z
custom:
Author: aranke
Issue: ' '

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Fix error counts for exposures
time: 2025-01-10T20:20:57.01632Z
custom:
Author: aranke
Issue: ' '

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Misc fixes for group info in logging
time: 2025-01-17T15:22:15.497485Z
custom:
Author: aranke
Issue: '11218'

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: Add secondary profiles to profile.py
time: 2025-02-14T12:38:53.964266Z
custom:
Author: aranke
Issue: XPLAT-241

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Update jsonschemas for schema.yml and dbt_project.yml deprecations
time: 2025-11-19T11:01:10.616676-05:00
custom:
Author: michelleark
Issue: "12180"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Replace setuptools and tox with hatch for build, test, and environment management.
time: 2025-11-21T14:05:15.838252-05:00
custom:
Author: emmyoop
Issue: "12151"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Add add_catalog_integration call even if we have a pre-existing manifest
time: 2025-12-09T13:18:57.043254-08:00
custom:
Author: colin-rogers-dbt
Issue: "12262"

View File

@@ -41,26 +41,32 @@ newlines:
endOfVersion: 1 endOfVersion: 1
custom: custom:
- key: Author - key: Author
label: GitHub Username(s) (separated by a single space if multiple) label: GitHub Username(s) (separated by a single space if multiple)
type: string type: string
minLength: 3 minLength: 3
- key: Issue - key: Issue
label: GitHub Issue Number (separated by a single space if multiple) label: GitHub Issue Number (separated by a single space if multiple)
type: string type: string
minLength: 1 minLength: 1
footerFormat: | footerFormat: |
{{- $contributorDict := dict }} {{- $contributorDict := dict }}
{{- /* ensure we always skip snyk and dependabot */}} {{- /* ensure all names in this list are all lowercase for later matching purposes */}}
{{- $bots := list "dependabot[bot]" "snyk-bot"}} {{- $core_team := splitList " " .Env.CORE_TEAM }}
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
{{- range $team_member := $core_team }}
{{- $team_member_lower := lower $team_member }}
{{- $maintainers = append $maintainers $team_member_lower }}
{{- end }}
{{- range $change := .Changes }} {{- range $change := .Changes }}
{{- $authorList := splitList " " $change.Custom.Author }} {{- $authorList := splitList " " $change.Custom.Author }}
{{- /* loop through all authors for a single changelog */}} {{- /* loop through all authors for a single changelog */}}
{{- range $author := $authorList }} {{- range $author := $authorList }}
{{- $authorLower := lower $author }} {{- $authorLower := lower $author }}
{{- /* we only want to include non-bot contributors */}} {{- /* we only want to include non-core team contributors */}}
{{- if not (has $authorLower $bots)}} {{- if not (has $authorLower $maintainers)}}
{{- $changeList := splitList " " $change.Custom.Author }} {{- $changeList := splitList " " $change.Custom.Author }}
{{- $IssueList := list }} {{- $IssueList := list }}
{{- $changeLink := $change.Kind }} {{- $changeLink := $change.Kind }}

View File

@@ -10,5 +10,6 @@ ignore =
E704 # makes Flake8 work like black E704 # makes Flake8 work like black
E741 E741
E501 # long line checking is done in black E501 # long line checking is done in black
exclude = test/
per-file-ignores = per-file-ignores =
*/__init__.py: F401 */__init__.py: F401

View File

@@ -62,7 +62,7 @@ body:
description: | description: |
examples: examples:
- **OS**: Ubuntu 24.04 - **OS**: Ubuntu 24.04
- **Python**: 3.10.12 (`python3 --version`) - **Python**: 3.9.12 (`python3 --version`)
- **dbt-core**: 1.1.1 (`dbt --version`) - **dbt-core**: 1.1.1 (`dbt --version`)
value: | value: |
- OS: - OS:

View File

@@ -12,6 +12,15 @@ contact_links:
- name: Participate in Discussions - name: Participate in Discussions
url: https://github.com/dbt-labs/dbt-core/discussions url: https://github.com/dbt-labs/dbt-core/discussions
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
- name: Create an issue for adapters - name: Create an issue for dbt-redshift
url: https://github.com/dbt-labs/dbt-adapters/issues/new/choose url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
about: Report a bug or request a feature for an adapter about: Report a bug or request a feature for dbt-redshift
- name: Create an issue for dbt-bigquery
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
about: Report a bug or request a feature for dbt-bigquery
- name: Create an issue for dbt-snowflake
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
about: Report a bug or request a feature for dbt-snowflake
- name: Create an issue for dbt-spark
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
about: Report a bug or request a feature for dbt-spark

View File

@@ -56,7 +56,7 @@ body:
description: | description: |
examples: examples:
- **OS**: Ubuntu 24.04 - **OS**: Ubuntu 24.04
- **Python**: 3.10.12 (`python3 --version`) - **Python**: 3.9.12 (`python3 --version`)
- **dbt-core (working version)**: 1.1.1 (`dbt --version`) - **dbt-core (working version)**: 1.1.1 (`dbt --version`)
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`) - **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
value: | value: |

8
.github/_README.md vendored
View File

@@ -120,7 +120,7 @@ Some triggers of note that we use:
```yaml ```yaml
jobs: jobs:
dependency_changelog: dependency_changelog:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
steps: steps:
- name: Get File Name Timestamp - name: Get File Name Timestamp
@@ -188,12 +188,6 @@ ___
- The [GitHub CLI](https://cli.github.com/) is available in the default runners - The [GitHub CLI](https://cli.github.com/) is available in the default runners
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run. - Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
### Runners
- We dynamically set runners based on repository vars. Admins can view repository vars and reset them. Current values are the following but are subject to change:
- `vars.UBUNTU_LATEST` -> `ubuntu-latest`
- `vars.WINDOWS_LATEST` -> `windows-latest`
- `vars.MACOS_LATEST` -> `macos-14`
### Actions from the Marketplace ### Actions from the Marketplace
- Dont use external actions for things that can easily be accomplished manually. - Dont use external actions for things that can easily be accomplished manually.
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and wont change under us) and clear as to whats actually happening. It also prevents any - Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and wont change under us) and clear as to whats actually happening. It also prevents any

View File

@@ -33,7 +33,7 @@ on:
jobs: jobs:
build: build:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Wrangle latest tag - name: Wrangle latest tag

View File

@@ -3,24 +3,24 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
package: package:
description: The package to publish description: The package to publish
required: true required: true
version_number: version_number:
description: The version number description: The version number
required: true required: true
jobs: jobs:
build: build:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Wrangle latest tag - name: Wrangle latest tag
id: is_latest id: is_latest
uses: ./.github/actions/latest-wrangler uses: ./.github/actions/latest-wrangler
with: with:
package: ${{ github.event.inputs.package }} package: ${{ github.event.inputs.package }}
new_version: ${{ github.event.inputs.new_version }} new_version: ${{ github.event.inputs.new_version }}
gh_token: ${{ secrets.GITHUB_TOKEN }} gh_token: ${{ secrets.GITHUB_TOKEN }}
- name: Print the results - name: Print the results
run: | run: |
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !" echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"

View File

@@ -1,10 +1,9 @@
import os import os
from packaging.version import Version, parse
import requests
import sys import sys
from typing import List from typing import List
import requests
from packaging.version import Version, parse
def main(): def main():
package_name: str = os.environ["INPUT_PACKAGE_NAME"] package_name: str = os.environ["INPUT_PACKAGE_NAME"]

View File

@@ -0,0 +1,27 @@
name: "Set up postgres (linux)"
description: "Set up postgres service on linux vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: bash
run: |
sudo apt-get --purge remove postgresql postgresql-*
sudo apt update -y
sudo apt install gnupg2 wget vim -y
sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg
sudo apt update -y
sudo apt install postgresql-17
sudo apt-get -y install postgresql postgresql-contrib
sudo systemctl start postgresql
sudo systemctl enable postgresql
pg_isready
echo "Updating setup_db.sh script ownership and execute bit"
sudo chown postgres ${{ github.action_path }}/../../../test/setup_db.sh
sudo chown postgres ${{ github.action_path }}/setup_db.sh
sudo chmod +x ${{ github.action_path }}/../../../test/setup_db.sh
sudo chmod +x ${{ github.action_path }}/setup_db.sh
ls -la ${{ github.action_path }}/../../../test/setup_db.sh
ls -la ${{ github.action_path }}/setup_db.sh
echo "Running setup_db.sh"
sudo -u postgres bash ${{ github.action_path }}/../../../test/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -0,0 +1,26 @@
name: "Set up postgres (macos)"
description: "Set up postgres service on macos vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: bash
run: |
brew install postgresql@16
brew link postgresql@16 --force
brew services start postgresql@16
echo "Check PostgreSQL service is running"
i=10
COMMAND='pg_isready'
while [ $i -gt -1 ]; do
if [ $i == 0 ]; then
echo "PostgreSQL service not ready, all attempts exhausted"
exit 1
fi
echo "Check PostgreSQL service status"
eval $COMMAND && break
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
sleep 10
((i--))
done
createuser -s postgres
bash ${{ github.action_path }}/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -1 +1 @@
../../../scripts/setup_db.sh ../../../test/setup_db.sh

View File

@@ -1,169 +0,0 @@
# **what?**
# Runs all tests in dbt-postgres with this branch of dbt-core to ensure nothing is broken
# **why?**
# Ensure dbt-core changes do not break dbt-postgres, as a basic proxy for other adapters
# **when?**
# This will run when trying to merge a PR into main.
# It can also be manually triggered.
# This workflow can be skipped by adding the "Skip Postgres Testing" label to the PR. This is
# useful when making a change in both `dbt-postgres` and `dbt-core` where the changes are dependant
# and cause the other repository to break.
name: "dbt-postgres Tests"
run-name: >-
${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call')
&& format('dbt-postgres@{0} with dbt-core@{1}', inputs.dbt-postgres-ref, inputs.dbt-core-ref)
|| 'dbt-postgres@main with dbt-core branch' }}
on:
push:
branches:
- "main"
- "*.latest"
- "releases/*"
pull_request:
merge_group:
types: [checks_requested]
workflow_dispatch:
inputs:
dbt-postgres-ref:
description: "The branch of dbt-postgres to test against"
default: "main"
dbt-core-ref:
description: "The branch of dbt-core to test against"
default: "main"
workflow_call:
inputs:
dbt-postgres-ref:
description: "The branch of dbt-postgres to test against"
type: string
required: true
default: "main"
dbt-core-ref:
description: "The branch of dbt-core to test against"
type: string
required: true
default: "main"
permissions: read-all
# will cancel previous workflows triggered by the same event
# and for the same ref for PRs/merges or same SHA otherwise
# and for the same inputs on workflow_dispatch or workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(fromJson('["pull_request", "merge_group"]'), github.event_name) && github.event.pull_request.head.ref || github.sha }}-${{ contains(fromJson('["workflow_call", "workflow_dispatch"]'), github.event_name) && github.event.inputs.dbt-postgres-ref && github.event.inputs.dbt-core-ref || github.sha }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
job-prep:
# This allow us to run the workflow on pull_requests as well so we can always run unit tests
# and only run integration tests on merge for time purposes
name: Setup Repo Refs
runs-on: ubuntu-latest
outputs:
dbt-postgres-ref: ${{ steps.core-ref.outputs.ref }}
dbt-core-ref: ${{ steps.common-ref.outputs.ref }}
steps:
- name: "Input Refs"
id: job-inputs
run: |
echo "inputs.dbt-postgres-ref=${{ inputs.dbt-postgres-ref }}"
echo "inputs.dbt-core-ref=${{ inputs.dbt-core-ref }}"
- name: "Determine dbt-postgres ref"
id: core-ref
run: |
if [[ -z "${{ inputs.dbt-postgres-ref }}" ]]; then
REF="main"
else
REF=${{ inputs.dbt-postgres-ref }}
fi
echo "ref=$REF" >> $GITHUB_OUTPUT
- name: "Determine dbt-core ref"
id: common-ref
run: |
if [[ -z "${{ inputs.dbt-core-ref }}" ]]; then
# these will be commits instead of branches
if [[ "${{ github.event_name }}" == "merge_group" ]]; then
REF=${{ github.event.merge_group.head_sha }}
else
REF=${{ github.event.pull_request.base.sha }}
fi
else
REF=${{ inputs.dbt-core-ref }}
fi
echo "ref=$REF" >> $GITHUB_OUTPUT
- name: "Final Refs"
run: |
echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}"
echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}"
integration-tests-postgres:
name: "dbt-postgres integration tests"
needs: [job-prep]
runs-on: ubuntu-latest
defaults:
run:
working-directory: "./dbt-postgres"
environment:
name: "dbt-postgres"
env:
POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }}
POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }}
POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }}
POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }}
POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }}
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- ${{ vars.POSTGRES_TEST_PORT }}:5432
steps:
- name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
repository: dbt-labs/dbt-adapters
ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }}
- name: "Set up Python"
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- name: "Set environment variables"
run: |
echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV
echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV
- name: "Setup test database"
run: psql -f ./scripts/setup_test_database.sql
env:
PGHOST: ${{ vars.POSTGRES_TEST_HOST }}
PGPORT: ${{ vars.POSTGRES_TEST_PORT }}
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
- name: "Install hatch"
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install
- name: "Run integration tests"
run: hatch run ${{ inputs.hatch-env }}:integration-tests

View File

@@ -6,70 +6,92 @@
# multiple reviews on a single PR based on files changed, so we need to enforce this manually. # multiple reviews on a single PR based on files changed, so we need to enforce this manually.
# **when?** # **when?**
# This will run when reviews are submitted and dismissed. # This will run when PRs are opened, synchronized, reopened, edited, or when reviews
# are submitted and dismissed.
name: "Enforce Additional Reviews on Artifact and Validations Changes" name: "Enforce Additional Reviews on Artifact and Validations Changes"
permissions:
checks: write
pull-requests: write
contents: read
on: on:
# trigger check on review events. use pull_request_target for forks.
pull_request_target: pull_request_target:
types: [opened, reopened, ready_for_review, synchronize, review_requested] types: [opened, synchronize, reopened, edited]
# retrigger check on review events
pull_request_review: pull_request_review:
types: [submitted, edited, dismissed] types: [submitted, edited, dismissed]
# only run this once per PR at a time # only run this once per PR at a time
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }} group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true cancel-in-progress: false # wait for in-progress runs to complete to prevent race condition
env: env:
required_approvals: 2 required_approvals: 2
team: "core-group" team: "core-group"
jobs: jobs:
check-reviews: cleanup-old-runs:
name: "Validate Additional Reviews" # this job is only run once per PR at a time. Since it uses two types of triggers,
# when the pull_request trigger fails, that run stays around when the pull_request_review
# triggers a new run. This job will clean up those old runs so we only end up with a single run.
name: "Cleanup Previous Runs"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: "Dismiss previous workflow runs"
run: |
# Get all check runs for this PR's SHA
cleanup_checks=$(gh api repos/${{ github.repository }}/commits/${{ github.event.pull_request.head.sha }}/check-runs \
--jq '.check_runs[] | select(.name == "Cleanup Previous Runs")')
review_checks=$(gh api repos/${{ github.repository }}/commits/${{ github.event.pull_request.head.sha }}/check-runs \
--jq '.check_runs[] | select(.name == "Validate Additional Reviews")')
# For each check run from this workflow (except current), dismiss it
{ echo "$cleanup_checks"; echo "$review_checks"; } | jq -r '. | select(.id != ${{ github.run_id }}) | .id' | \
while read -r check_id; do
echo "Dismissing check $check_id"
gh api repos/${{ github.repository }}/check-runs/$check_id \
-X PATCH \
-F status="completed" \
-F conclusion="neutral" \
-F "output[title]=Superseded" \
-F "output[summary]=This check was superseded by a newer run"
done
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
check-reviews:
name: "Validate Additional Reviews"
needs: [cleanup-old-runs]
runs-on: ubuntu-latest
steps:
- name: "Checkout code"
uses: actions/checkout@v4
- name: "Get list of changed files" - name: "Get list of changed files"
id: changed_files id: changed_files
run: | run: |
# Fetch files as JSON and process with jq to sanitize output CHANGED_FILES=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files | jq -r '.[].filename')
gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files \ echo "Changed files:"
| jq -r '.[].filename' \ echo "$CHANGED_FILES"
| while IFS= read -r file; do
# Sanitize the filename by removing any special characters and command injection attempts
clean_file=$(echo "$file" | sed 's/[^a-zA-Z0-9\.\/\-_]//g')
echo "$clean_file"
done > changed_files.txt
echo "CHANGED_FILES<<EOF" >> $GITHUB_OUTPUT echo "CHANGED_FILES<<EOF" >> $GITHUB_OUTPUT
cat changed_files.txt >> $GITHUB_OUTPUT echo "$CHANGED_FILES" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT echo "EOF" >> $GITHUB_OUTPUT
env: env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: "Check if any artifact files changed" - name: "Check if any artifact files were changed"
id: artifact_files_changed id: artifact_files_changed
run: | run: |
artifact_changes=false artifact_changes=false
while IFS= read -r file; do while IFS= read -r file; do
# Only process if file path looks legitimate echo "Debug: Checking file: '$file'"
if [[ "$file" =~ ^[a-zA-Z0-9\.\/\-_]+$ ]]; then if [[ "$file" == "core/dbt/artifacts/"* ]] ; then
if [[ "$file" == "core/dbt/artifacts/"* ]] ; then artifact_changes=true
artifact_changes=true break
break
fi
fi fi
done < changed_files.txt done <<< "${{ steps.changed_files.outputs.CHANGED_FILES }}"
echo "artifact_changes=$artifact_changes" >> $GITHUB_OUTPUT echo "artifact_changes=$artifact_changes" >> $GITHUB_OUTPUT
- name: "Get Core Team Members" - name: "Get Core Team Members"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' if: ${{ steps.artifact_files_changed.outputs.artifact_changes == 'true' }}
id: core_members id: core_members
run: | run: |
gh api -H "Accept: application/vnd.github+json" \ gh api -H "Accept: application/vnd.github+json" \
@@ -83,104 +105,49 @@ jobs:
GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }} GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
- name: "Verify ${{ env.required_approvals }} core team approvals" - name: "Verify ${{ env.required_approvals }} core team approvals"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
id: check_approvals id: check_approvals
if: ${{ steps.artifact_files_changed.outputs.artifact_changes == 'true' }}
run: | run: |
# Get all reviews # Get all reviews
REVIEWS=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews) REVIEWS=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews)
echo "All reviews:"
echo "$REVIEWS"
# Count approved reviews from core team members (only most recent review per user) # Count approved reviews from core team members (only most recent review per user)
CORE_APPROVALS=0 CORE_APPROVALS=0
while IFS= read -r member; do while IFS= read -r member; do
echo "Checking member: $member" echo "member: $member"
APPROVED=$(echo "$REVIEWS" | jq --arg user "$member" ' APPROVED=$(echo "$REVIEWS" | jq --arg user "$member" '
group_by(.user.login) | group_by(.user.login) |
map(select(.[0].user.login == $user) | map(select(.[0].user.login == $user) |
sort_by(.submitted_at) | sort_by(.submitted_at) |
last) | last) |
map(select(.state == "APPROVED" and (.state != "DISMISSED"))) | map(select(.state == "APPROVED")) |
length') length')
echo "Latest review state for $member: $APPROVED"
CORE_APPROVALS=$((CORE_APPROVALS + APPROVED)) CORE_APPROVALS=$((CORE_APPROVALS + APPROVED))
echo "Running total: $CORE_APPROVALS"
done <<< "${{ steps.core_members.outputs.membership }}" done <<< "${{ steps.core_members.outputs.membership }}"
echo "CORE_APPROVALS=$CORE_APPROVALS" >> $GITHUB_OUTPUT echo "CORE_APPROVALS=$CORE_APPROVALS" >> $GITHUB_OUTPUT
echo "CORE_APPROVALS=$CORE_APPROVALS" echo $CORE_APPROVALS
env: env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: "Find Comment" - name: "Notify and fail if not enough approvals"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals if: ${{ steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS < fromJSON(env.required_approvals) }}
uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # peter-evans/find-comment@v2
id: find-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: "### Additional Artifact Review Required"
- name: "Create Comment"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.find-comment.outputs.comment-id == '' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
### Additional Artifact Review Required
Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members.
- name: "Notify if not enough approvals"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
run: | run: |
if [[ "${{ steps.check_approvals.outputs.CORE_APPROVALS }}" -ge "${{ env.required_approvals }}" ]]; then title="PR Approval Requirements Not Met"
title="Extra requirements met" message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} " echo "::error title=$title::$message"
echo "::notice title=$title::$message" exit 1
echo "REVIEW_STATUS=success" >> $GITHUB_OUTPUT
else
title="PR Approval Requirements Not Met"
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
echo "::notice title=$title::$message"
echo "REVIEW_STATUS=neutral" >> $GITHUB_OUTPUT
fi
id: review_check
- name: "Set check status" - name: "Notify of sufficient approvals"
id: status_check if: ${{ steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS >= fromJSON(env.required_approvals) }}
run: | run: |
if [[ "${{ steps.artifact_files_changed.outputs.artifact_changes }}" == 'false' ]]; then title="Extra requirements met"
# no extra review required message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
echo "current_status=success" >> $GITHUB_OUTPUT echo "::notice title=$title::$message"
elif [[ "${{ steps.review_check.outputs.REVIEW_STATUS }}" == "success" ]]; then
# we have all the required reviews
echo "current_status=success" >> $GITHUB_OUTPUT
else
# neutral exit - neither success nor failure
# we can't fail here because we use multiple triggers for this workflow and they won't reset the check
# workaround is to use a neutral exit to skip the check run until it's actually successful
echo "current_status=neutral" >> $GITHUB_OUTPUT
fi
- name: "Post Event" - name: "Notify of no extra requirements"
# This step posts the status of the check because the workflow is triggered by multiple events if: ${{ steps.artifact_files_changed.outputs.artifact_changes != 'true' }}
# and we need to ensure the check is always updated. Otherwise we would end up with duplicate
# checks in the GitHub UI.
run: | run: |
if [[ "${{ steps.status_check.outputs.current_status }}" == "success" ]]; then title="No extra requirements"
state="success" message="No additional reviews required"
else echo "::notice title=$title::$message"
state="failure"
fi
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.base.sha }} \
-f state="$state" \
-f description="Artifact Review Check" \
-f context="Artifact Review Check" \
-f target_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
env:
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}

View File

@@ -22,7 +22,7 @@ permissions:
jobs: jobs:
auto-response: auto-response:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
steps: steps:
- name: Check if current date is within holiday period - name: Check if current date is within holiday period
id: date-check id: date-check

View File

@@ -28,13 +28,13 @@ permissions:
jobs: jobs:
backport: backport:
name: Backport name: Backport
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
# Only react to merged PRs for security reasons. # Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target. # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: > if: >
github.event.pull_request.merged github.event.pull_request.merged
&& contains(github.event.label.name, 'backport') && contains(github.event.label.name, 'backport')
steps: steps:
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # tibdex/backport@v2.0.4 - uses: tibdex/backport@v2.0.4
with: with:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -41,14 +41,14 @@ jobs:
include: include:
- label: "dependencies" - label: "dependencies"
changie_kind: "Dependencies" changie_kind: "Dependencies"
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
steps: steps:
- name: Create and commit changelog on bot PR - name: Create and commit changelog on bot PR
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }} if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
id: bot_changelog id: bot_changelog
uses: emmyoop/changie_bot@22b70618b13d0d1c64ea95212bafca2d2bf6b764 # emmyoop/changie_bot@v1.1.0 uses: emmyoop/changie_bot@v1.1.0
with: with:
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }} GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
commit_author_name: "Github Build Bot" commit_author_name: "Github Build Bot"

View File

@@ -4,26 +4,22 @@ on:
pull_request: pull_request:
types: [ opened, reopened, labeled, unlabeled, synchronize ] types: [ opened, reopened, labeled, unlabeled, synchronize ]
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ] paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
merge_group:
types: [checks_requested]
workflow_dispatch:
permissions: workflow_dispatch:
contents: read
jobs: jobs:
check-artifact-changes: check-artifact-changes:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }} if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Check for changes in core/dbt/artifacts - name: Check for changes in core/dbt/artifacts
# https://github.com/marketplace/actions/paths-changes-filter # https://github.com/marketplace/actions/paths-changes-filter
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # dorny/paths-filter@v3 uses: dorny/paths-filter@v3
id: check_artifact_changes id: check_artifact_changes
with: with:
filters: | filters: |

View File

@@ -7,6 +7,7 @@
# **when?** # **when?**
# When a PR is opened, not in draft or moved from draft to ready for review # When a PR is opened, not in draft or moved from draft to ready for review
name: Label community PRs name: Label community PRs
on: on:
@@ -28,15 +29,9 @@ jobs:
# If this PR is opened and not draft, determine if it needs to be labeled # If this PR is opened and not draft, determine if it needs to be labeled
# if the PR is converted out of draft, determine if it needs to be labeled # if the PR is converted out of draft, determine if it needs to be labeled
if: | if: |
( (!contains(github.event.pull_request.labels.*.name, 'community') &&
!contains(github.event.pull_request.labels.*.name, 'community') (github.event.action == 'opened' && github.event.pull_request.draft == false ) ||
&& ( github.event.action == 'ready_for_review' )
(github.event.action == 'opened' && github.event.pull_request.draft == false)
|| github.event.action == 'ready_for_review'
)
&& github.event.pull_request.user.type != 'Bot'
&& github.event.pull_request.user.login != 'dependabot[bot]'
)
uses: dbt-labs/actions/.github/workflows/label-community.yml@main uses: dbt-labs/actions/.github/workflows/label-community.yml@main
with: with:
github_team: 'core-group' github_team: 'core-group'

View File

@@ -1,44 +1,25 @@
# **what?** # **what?**
# Cuts the `*.latest` branch, bumps dependencies on it, cleans up all files in `.changes/unreleased` # Cuts a new `*.latest` branch
# and `.changes/previous verion on main and bumps main to the input version. # Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
# `main` and bumps `main` to the input version.
# **why?** # **why?**
# Clean up the main branch after a release branch is cut and automate cutting the release branch. # Generally reduces the workload of engineers and reduces error. Allow automation.
# Generally reduces the workload of engineers and reducing error.
# **when?** # **when?**
# This will run when called manually or when triggered in another workflow. # This will run when called manually.
# Example Usage including required permissions: TODO: update once finalized
# permissions:
# contents: read
# pull-requests: write
#
# name: Cut Release Branch
# jobs:
# changelog:
# uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
# with:
# new_branch_name: 1.7.latest
# PR_title: "Cleanup main after cutting new 1.7.latest branch"
# PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
# secrets:
# FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
# TODOs
# add note to eventually commit changes directly and bypass checks - same as release - when we move to this model run test action after merge
name: Cut new release branch name: Cut new release branch
run-name: "Cutting New Branch: ${{ inputs.new_branch_name }}"
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
new_branch_name: version_to_bump_main:
description: "The full name of the new branch (ex. 1.5.latest)" description: 'The alpha version main should bump to (ex. 1.6.0a1)'
required: true
new_branch_name:
description: 'The full name of the new branch (ex. 1.5.latest)'
required: true required: true
type: string
defaults: defaults:
run: run:
@@ -46,346 +27,15 @@ defaults:
permissions: permissions:
contents: write contents: write
pull-requests: write
env:
PYTHON_TARGET_VERSION: "3.10"
PR_TITLE: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
PR_BODY: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
jobs: jobs:
prep_work: cut_branch:
name: "Prep Work" name: "Cut branch and clean up main for dbt-core"
runs-on: ubuntu-latest uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
steps: with:
- name: "[DEBUG] Print Inputs" version_to_bump_main: ${{ inputs.version_to_bump_main }}
run: | new_branch_name: ${{ inputs.new_branch_name }}
echo "new_branch_name: ${{ inputs.new_branch_name }}" PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
echo "PR_title: ${{ env.PR_TITLE }}" PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
echo "PR_body: ${{ env.PR_BODY }}" secrets:
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
create_temp_branch:
name: "Create Temp branch off main"
runs-on: ubuntu-latest
outputs:
temp_branch_name: ${{ steps.variables.outputs.BRANCH_NAME }}
steps:
- name: "Set Branch Value"
id: variables
run: |
echo "BRANCH_NAME=cutting_release_branch/main_cleanup_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: "main"
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Create PR Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
git push --set-upstream origin ${{ steps.variables.outputs.BRANCH_NAME }}
- name: "[Notification] Temp branch created"
run: |
message="Temp branch ${{ steps.variables.outputs.BRANCH_NAME }} created"
echo "::notice title="Temporary branch created": $title::$message"
cleanup_changelog:
name: "Clean Up Changelog"
needs: ["create_temp_branch"]
runs-on: ubuntu-latest
outputs:
next-version: ${{ steps.semver-current.outputs.next-minor-alpha-version }}
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Add Homebrew To PATH"
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- name: "Install Homebrew Packages"
run: |
brew install pre-commit
brew tap miniscruff/changie https://github.com/miniscruff/changie
brew install changie
- name: "Check Current Version In Code"
id: determine_version
run: |
current_version=$(grep '^version = ' core/pyproject.toml | sed 's/version = "\(.*\)"/\1/')
echo "current_version=$current_version" >> $GITHUB_OUTPUT
- name: "[Notification] Check Current Version In Code"
run: |
message="The current version is ${{ steps.determine_version.outputs.current_version }}"
echo "::notice title="Version Bump Check": $title::$message"
- name: "Parse Current Version Into Parts for Changelog Directories"
id: semver-current
uses: dbt-labs/actions/parse-semver@main
with:
version: ${{ steps.determine_version.outputs.current_version }}
- name: "[Notification] Next Alpha Version"
run: |
message="The next alpha version is ${{ steps.semver-current.outputs.next-minor-alpha-version }}"
echo "::notice title="Version Bump Check": $title::$message"
- name: "Delete Unreleased Changelog YAMLs"
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
continue-on-error: true
run: |
rm .changes/unreleased/*.yaml || true
- name: "Delete Pre Release Changelogs and YAMLs"
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
continue-on-error: true
run: |
rm .changes/${{ steps.semver-current.outputs.base-version }}/*.yaml || true
rm .changes/${{ steps.semver-current.outputs.major }}.${{ steps.semver-current.outputs.minor }}.*.md || true
- name: "Cleanup CHANGELOG.md"
run: |
changie merge
- name: "Commit Changelog Cleanup to Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git status
git add .
git commit -m "Clean up changelog on main"
git push
- name: "[Notification] Changelog cleaned up"
run: |
message="Changelog on ${{ needs.create_temp_branch.outputs.temp_branch_name }} cleaned up"
echo "::notice title="Changelog cleaned up": $title::$message"
bump_version:
name: "Bump to next minor version"
needs: ["cleanup_changelog", "create_temp_branch"]
runs-on: ubuntu-latest
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
with:
python-version: "${{ env.PYTHON_TARGET_VERSION }}"
- name: "Install Spark Dependencies"
if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }}
run: |
sudo apt-get update
sudo apt-get install libsasl2-dev
- name: "Install Python Dependencies"
run: |
python -m pip install --upgrade pip
python -m pip install hatch
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
run: |
cd core
hatch version ${{ needs.cleanup_changelog.outputs.next-version }}
hatch run dev-req
dbt --version
- name: "Commit Version Bump to Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git status
git add .
git commit -m "Bumping version to ${{ needs.cleanup_changelog.outputs.next-version }}"
git push
- name: "[Notification] Version Bump completed"
run: |
message="Version on ${{ needs.create_temp_branch.outputs.temp_branch_name }} bumped to ${{ needs.cleanup_changelog.outputs.next-version }}"
echo "::notice title="Version Bump Completed": $title::$message"
cleanup:
name: "Cleanup Code Quality"
needs: ["create_temp_branch", "bump_version"]
runs-on: ubuntu-latest
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Add Homebrew To PATH"
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- name: "brew install pre-commit"
run: |
brew install pre-commit
# this step will fail on whitespace errors but also correct them
- name: "Cleanup - Remove Trailing Whitespace Via Pre-commit"
continue-on-error: true
run: |
pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* || true
# this step will fail on newline errors but also correct them
- name: "Cleanup - Remove Extra Newlines Via Pre-commit"
continue-on-error: true
run: |
pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* || true
- name: "Commit Version Bump to Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git status
git add .
git commit -m "Code quality cleanup"
git push
open_pr:
name: "Open PR Against main"
needs: ["cleanup_changelog", "create_temp_branch", "cleanup"]
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create_pr.outputs.pull-request-number }}
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Determine PR Title"
id: pr_title
run: |
echo "pr_title=${{ env.PR_TITLE }}" >> $GITHUB_OUTPUT
if [${{ env.PR_TITLE }} == ""]; then
echo "pr_title='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
fi
- name: "Determine PR Body"
id: pr_body
run: |
echo "pr_body=${{ env.PR_BODY }}" >> $GITHUB_OUTPUT
if [${{ env.PR_BODY }} == ""]; then
echo "pr_body='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
fi
- name: "Add Branch Details"
id: pr_body_branch
run: |
branch_details="The workflow that generated this PR also created a new branch: ${{ inputs.new_branch_name }}"
full_body="${{ steps.pr_body.outputs.pr_body }} $branch_details"
echo "pr_full_body=$full_body" >> $GITHUB_OUTPUT
- name: "Open Pull Request"
id: create_pr
run: |
pr_url=$(gh pr create -B main -H ${{ needs.create_temp_branch.outputs.temp_branch_name }} -l "Skip Changelog" -t "${{ steps.pr_title.outputs.pr_title }}" -b "${{ steps.pr_body_branch.outputs.pr_full_body }}")
echo "pr_url=$pr_url" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "[Notification] Pull Request Opened"
run: |
message="PR opened at ${{ steps.create_pr.outputs.pr_url }}"
echo "::notice title="Pull Request Opened": $title::$message"
cut_new_branch:
# don't cut the new branch until we're done opening the PR against main
name: "Cut New Branch ${{ inputs.new_branch_name }}"
needs: [open_pr]
runs-on: ubuntu-latest
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
token: ${{ secrets.FISHTOWN_BOT_PAT }}
fetch-depth: 0
- name: "Ensure New Branch Does Not Exist"
id: check_new_branch
run: |
title="Check New Branch Existence"
if git show-ref --quiet ${{ inputs.new_branch_name }}; then
message="Branch ${{ inputs.new_branch_name }} already exists. Exiting."
echo "::error $title::$message"
exit 1
fi
- name: "Create New Release Branch"
run: |
git checkout -b ${{ inputs.new_branch_name }}
- name: "Push up New Branch"
run: |
#Data for commit
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git push --set-upstream origin ${{ inputs.new_branch_name }}
- name: "[Notification] New branch created"
run: |
message="New branch ${{ inputs.new_branch_name }} created"
echo "::notice title="New branch created": $title::$message"
- name: "Bump dependencies via script"
# This bumps the dependency on dbt-core in the adapters
if: ${{ !contains(github.repository, 'dbt-core') }}
run: |
echo ${{ github.repository }}
echo "running update_dependencies script"
bash ${GITHUB_WORKSPACE}/.github/scripts/update_dependencies.sh ${{ inputs.new_branch_name }}
commit_message="bumping .latest branch variable in update_dependencies.sh to ${{ inputs.new_branch_name }}"
git status
git add .
git commit -m "$commit_message"
git push
- name: "Bump env variable via script"
# bumps the RELEASE_BRANCH variable in nightly-release.yml in adapters
if: ${{ !contains(github.repository, 'dbt-core') }}
run: |
file="./.github/scripts/update_release_branch.sh"
if test -f "$file"; then
echo ${{ github.repository }}
echo "running some script yet to be written now"
bash $file ${{ inputs.new_branch_name }}
commit_message="updating env variable to ${{ inputs.new_branch_name }} in nightly-release.yml"
git status
git add .
git commit -m "$commit_message"
git push
else
echo "no $file seen skipping step"
fi

View File

@@ -20,8 +20,6 @@ on:
- "*.latest" - "*.latest"
- "releases/*" - "releases/*"
pull_request: pull_request:
merge_group:
types: [checks_requested]
workflow_dispatch: workflow_dispatch:
permissions: read-all permissions: read-all
@@ -49,33 +47,27 @@ jobs:
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python - name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6 uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: '3.9'
- name: Install python dependencies - name: Install python dependencies
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
python -m pip install hatch make dev
cd core make dev_req
hatch run setup mypy --version
dbt --version
- name: Verify dbt installation
run: |
cd core
hatch run dbt --version
- name: Run pre-commit hooks - name: Run pre-commit hooks
run: | run: pre-commit run --all-files --show-diff-on-failure
cd core
hatch run code-quality
unit: unit:
name: "unit test / python ${{ matrix.python-version }}" name: unit test / python ${{ matrix.python-version }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 10 timeout-minutes: 10
@@ -83,14 +75,17 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"] python-version: [ "3.9", "3.10", "3.11", "3.12" ]
env:
TOXENV: "unit"
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6 uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
@@ -98,15 +93,15 @@ jobs:
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
python -m pip install hatch python -m pip install tox
hatch --version tox --version
- name: Run unit tests - name: Run unit tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3 uses: nick-fields/retry@v3
with: with:
timeout_minutes: 10 timeout_minutes: 10
max_attempts: 3 max_attempts: 3
command: cd core && hatch run ci:unit-tests command: tox -e unit
- name: Get current date - name: Get current date
if: always() if: always()
@@ -117,11 +112,10 @@ jobs:
- name: Upload Unit Test Coverage to Codecov - name: Upload Unit Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }} if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
flags: unit flags: unit
fail_ci_if_error: false
integration-metadata: integration-metadata:
name: integration test metadata generation name: integration test metadata generation
@@ -146,7 +140,7 @@ jobs:
- name: generate include - name: generate include
id: generate-include id: generate-include
run: | run: |
INCLUDE=('"python-version":"3.10","os":"windows-latest"' '"python-version":"3.10","os":"macos-14"' ) INCLUDE=('"python-version":"3.9","os":"windows-latest"' '"python-version":"3.9","os":"macos-14"' )
INCLUDE_GROUPS="[" INCLUDE_GROUPS="["
for include in ${INCLUDE[@]}; do for include in ${INCLUDE[@]}; do
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
@@ -158,102 +152,7 @@ jobs:
echo "include=${INCLUDE_GROUPS}" echo "include=${INCLUDE_GROUPS}"
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
integration-postgres: integration:
name: "(${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}"
runs-on: ${{ matrix.os }}
timeout-minutes: 30
needs:
- integration-metadata
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
os: ["ubuntu-latest"]
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
env:
DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2
DBT_TEST_USER_3: dbt_test_user_3
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: ${{ github.event.repository.name }}
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres
# Provide the password for postgres
env:
POSTGRES_PASSWORD: password
POSTGRES_USER: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Run postgres setup script
run: |
./scripts/setup_db.sh
env:
PGHOST: localhost
PGPORT: 5432
PGPASSWORD: password
- name: Install python tools
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install hatch
hatch --version
- name: Run integration tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
shell: bash
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
if: always()
with:
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
path: ./logs
- name: Upload Integration Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: integration
fail_ci_if_error: false
integration-mac-windows:
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }} name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
@@ -263,9 +162,13 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
# already includes split group and runs mac + windows python-version: [ "3.9", "3.10", "3.11", "3.12" ]
os: [ubuntu-latest]
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
# this include is where we add the mac and windows os
include: ${{ fromJson(needs.integration-metadata.outputs.include) }} include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
env: env:
TOXENV: integration
DBT_INVOCATION_ENV: github-actions DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1 DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2 DBT_TEST_USER_2: dbt_test_user_2
@@ -278,21 +181,20 @@ jobs:
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6 uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: "Set up postgres (linux)"
if: runner.os == 'Linux'
run: make setup-db
- name: Set up postgres (macos) - name: Set up postgres (macos)
if: runner.os == 'macOS' if: runner.os == 'macOS'
uses: ./.github/actions/setup-postgres-macos
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with:
timeout_minutes: 10
max_attempts: 3
command: ./scripts/setup_db.sh
- name: Set up postgres (windows) - name: Set up postgres (windows)
if: runner.os == 'Windows' if: runner.os == 'Windows'
@@ -302,16 +204,17 @@ jobs:
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip --version python -m pip --version
python -m pip install hatch python -m pip install tox
hatch --version tox --version
- name: Run integration tests - name: Run integration tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3 uses: nick-fields/retry@v3
with: with:
timeout_minutes: 30 timeout_minutes: 30
max_attempts: 3 max_attempts: 3
shell: bash command: tox -- --ddtrace
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }} env:
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
- name: Get current date - name: Get current date
if: always() if: always()
@@ -320,7 +223,7 @@ jobs:
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
if: always() if: always()
with: with:
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }} name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
@@ -328,20 +231,19 @@ jobs:
- name: Upload Integration Test Coverage to Codecov - name: Upload Integration Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }} if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
flags: integration flags: integration
fail_ci_if_error: false
integration-report: integration-report:
if: ${{ always() }} if: ${{ always() }}
name: Integration Test Suite name: Integration Test Suite
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [integration-mac-windows, integration-postgres] needs: integration
steps: steps:
- name: "Integration Tests Failed" - name: "Integration Tests Failed"
if: ${{ contains(needs.integration-mac-windows.result, 'failure') || contains(needs.integration-mac-windows.result, 'cancelled') || contains(needs.integration-postgres.result, 'failure') || contains(needs.integration-postgres.result, 'cancelled') }} if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
# when this is true the next step won't execute # when this is true the next step won't execute
run: | run: |
echo "::notice title='Integration test suite failed'" echo "::notice title='Integration test suite failed'"
@@ -358,17 +260,17 @@ jobs:
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python - name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6 uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: '3.9'
- name: Install python dependencies - name: Install python dependencies
run: | run: |
python -m pip install --user --upgrade pip python -m pip install --user --upgrade pip
python -m pip install --upgrade hatch twine check-wheel-contents python -m pip install --upgrade setuptools wheel twine check-wheel-contents
python -m pip --version python -m pip --version
- name: Build distributions - name: Build distributions
@@ -377,7 +279,27 @@ jobs:
- name: Show distributions - name: Show distributions
run: ls -lh dist/ run: ls -lh dist/
- name: Check and verify distributions - name: Check distribution descriptions
run: | run: |
cd core twine check dist/*
hatch run build:check-all
- name: Check wheel contents
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- name: Install wheel distributions
run: |
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
- name: Check wheel distributions
run: |
dbt --version
- name: Install source distributions
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
run: |
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
- name: Check source distributions
run: |
dbt --version

265
.github/workflows/model_performance.yml vendored Normal file
View File

@@ -0,0 +1,265 @@
# **what?**
# This workflow models the performance characteristics of a point in time in dbt.
# It runs specific dbt commands on committed projects multiple times to create and
# commit information about the distribution to the current branch. For more information
# see the readme in the performance module at /performance/README.md.
#
# **why?**
# When developing new features, we can take quick performance samples and compare
# them against the commited baseline measurements produced by this workflow to detect
# some performance regressions at development time before they reach users.
#
# **when?**
# This is only run once directly after each release (for non-prereleases). If for some
# reason the results of a run are not satisfactory, it can also be triggered manually.
name: Model Performance Characteristics
on:
# runs after non-prereleases are published.
release:
types: [released]
# run manually from the actions tab
workflow_dispatch:
inputs:
release_id:
description: 'dbt version to model (must be non-prerelease in Pypi)'
type: string
required: true
env:
RUNNER_CACHE_PATH: performance/runner/target/release/runner
# both jobs need to write
permissions:
contents: write
pull-requests: write
jobs:
set-variables:
name: Setting Variables
runs-on: ubuntu-latest
outputs:
cache_key: ${{ steps.variables.outputs.cache_key }}
release_id: ${{ steps.semver.outputs.base-version }}
release_branch: ${{ steps.variables.outputs.release_branch }}
steps:
# explicitly checkout the performance runner from main regardless of which
# version we are modeling.
- name: Checkout
uses: actions/checkout@v4
with:
ref: main
- name: Parse version into parts
id: semver
uses: dbt-labs/actions/parse-semver@v1
with:
version: ${{ github.event.inputs.release_id || github.event.release.tag_name }}
# collect all the variables that need to be used in subsequent jobs
- name: Set variables
id: variables
run: |
# create a cache key that will be used in the next job. without this the
# next job would have to checkout from main and hash the files itself.
echo "cache_key=${{ runner.os }}-${{ hashFiles('performance/runner/Cargo.toml')}}-${{ hashFiles('performance/runner/src/*') }}" >> $GITHUB_OUTPUT
branch_name="${{steps.semver.outputs.major}}.${{steps.semver.outputs.minor}}.latest"
echo "release_branch=$branch_name" >> $GITHUB_OUTPUT
echo "release branch is inferred to be ${branch_name}"
latest-runner:
name: Build or Fetch Runner
runs-on: ubuntu-latest
needs: [set-variables]
env:
RUSTFLAGS: "-D warnings"
steps:
- name: '[DEBUG] print variables'
run: |
echo "all variables defined in set-variables"
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
# explicitly checkout the performance runner from main regardless of which
# version we are modeling.
- name: Checkout
uses: actions/checkout@v4
with:
ref: main
# attempts to access a previously cached runner
- uses: actions/cache@v4
id: cache
with:
path: ${{ env.RUNNER_CACHE_PATH }}
key: ${{ needs.set-variables.outputs.cache_key }}
- name: Fetch Rust Toolchain
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Add fmt
if: steps.cache.outputs.cache-hit != 'true'
run: rustup component add rustfmt
- name: Cargo fmt
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/cargo@v1
with:
command: fmt
args: --manifest-path performance/runner/Cargo.toml --all -- --check
- name: Test
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/cargo@v1
with:
command: test
args: --manifest-path performance/runner/Cargo.toml
- name: Build (optimized)
if: steps.cache.outputs.cache-hit != 'true'
uses: actions-rs/cargo@v1
with:
command: build
args: --release --manifest-path performance/runner/Cargo.toml
# the cache action automatically caches this binary at the end of the job
model:
# depends on `latest-runner` as a separate job so that failures in this job do not prevent
# a successfully tested and built binary from being cached.
needs: [set-variables, latest-runner]
name: Model a release
runs-on: ubuntu-latest
steps:
- name: '[DEBUG] print variables'
run: |
echo "all variables defined in set-variables"
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.9"
- name: Install dbt
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
- name: Install Hyperfine
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
# explicitly checkout main to get the latest project definitions
- name: Checkout
uses: actions/checkout@v4
with:
ref: main
# this was built in the previous job so it will be there.
- name: Fetch Runner
uses: actions/cache@v4
id: cache
with:
path: ${{ env.RUNNER_CACHE_PATH }}
key: ${{ needs.set-variables.outputs.cache_key }}
- name: Move Runner
run: mv performance/runner/target/release/runner performance/app
- name: Change Runner Permissions
run: chmod +x ./performance/app
- name: '[DEBUG] ls baseline directory before run'
run: ls -R performance/baselines/
# `${{ github.workspace }}` is used to pass the absolute path
- name: Create directories
run: |
mkdir ${{ github.workspace }}/performance/tmp/
mkdir -p performance/baselines/${{ needs.set-variables.outputs.release_id }}/
# Run modeling with taking 20 samples
- name: Run Measurement
run: |
performance/app model -v ${{ needs.set-variables.outputs.release_id }} -b ${{ github.workspace }}/performance/baselines/ -p ${{ github.workspace }}/performance/projects/ -t ${{ github.workspace }}/performance/tmp/ -n 20
- name: '[DEBUG] ls baseline directory after run'
run: ls -R performance/baselines/
- uses: actions/upload-artifact@v4
with:
name: baseline
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
create-pr:
name: Open PR for ${{ matrix.base-branch }}
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
# i.e. release branch and main
needs: [set-variables, latest-runner, model]
runs-on: ubuntu-latest
strategy:
matrix:
include:
- base-branch: refs/heads/main
target-branch: performance-bot/main_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
- base-branch: refs/heads/${{ needs.set-variables.outputs.release_branch }}
target-branch: performance-bot/release_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
steps:
- name: '[DEBUG] print variables'
run: |
echo "all variables defined in set-variables"
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ matrix.base-branch }}
- name: Create PR branch
run: |
git checkout -b ${{ matrix.target-branch }}
git push origin ${{ matrix.target-branch }}
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
- uses: actions/download-artifact@v4
with:
name: baseline
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
- name: '[DEBUG] ls baselines after artifact download'
run: ls -R performance/baselines/
- name: Commit baseline
uses: EndBug/add-and-commit@v9
with:
add: 'performance/baselines/*'
author_name: 'Github Build Bot'
author_email: 'buildbot@fishtownanalytics.com'
message: 'adding performance baseline for ${{ needs.set-variables.outputs.release_id }}'
push: 'origin origin/${{ matrix.target-branch }}'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
base: ${{ matrix.base-branch }}
branch: '${{ matrix.target-branch }}'
title: 'Adding performance modeling for ${{needs.set-variables.outputs.release_id}} to ${{ matrix.base-branch }}'
body: 'Committing perf results for tracking for the ${{needs.set-variables.outputs.release_id}}'
labels: |
Skip Changelog
Performance

View File

@@ -31,7 +31,7 @@ env:
jobs: jobs:
aggregate-release-data: aggregate-release-data:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
outputs: outputs:
version_number: ${{ steps.nightly-release-version.outputs.number }} version_number: ${{ steps.nightly-release-version.outputs.number }}
@@ -39,14 +39,14 @@ jobs:
steps: steps:
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}" - name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4 uses: actions/checkout@v4
with: with:
ref: ${{ env.RELEASE_BRANCH }} ref: ${{ env.RELEASE_BRANCH }}
- name: "Get Current Version Number" - name: "Get Current Version Number"
id: version-number-sources id: version-number-sources
run: | run: |
current_version=$(grep '^version = ' core/dbt/__version__.py | sed 's/version = "\(.*\)"/\1/') current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
echo "current_version=$current_version" >> $GITHUB_OUTPUT echo "current_version=$current_version" >> $GITHUB_OUTPUT
- name: "Audit Version And Parse Into Parts" - name: "Audit Version And Parse Into Parts"
@@ -76,7 +76,7 @@ jobs:
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
log-outputs-aggregate-release-data: log-outputs-aggregate-release-data:
runs-on: ${{ vars.UBUNTU_LATEST }} runs-on: ubuntu-latest
needs: [aggregate-release-data] needs: [aggregate-release-data]
steps: steps:

Some files were not shown because too many files have changed in this diff Show More