Compare commits

..

16 Commits

Author SHA1 Message Date
Jeremy Cohen
5cce911842 Ongoing experiment 2023-01-29 21:32:19 +01:00
lostmygithubaccount
158aa81b0c update per suggestions 2022-11-23 09:06:33 -08:00
lostmygithubaccount
5ddb088049 Merge remote-tracking branch 'origin/main' into cody/ibis 2022-11-22 21:54:05 -08:00
lostmygithubaccount
3edc9e53ad initial implementation based on prql pr 2022-11-20 17:55:34 -08:00
Maximilian Roos
e0c32f425d Merge branch 'main' into prql 2022-10-11 11:18:13 -07:00
Maximilian Roos
90223ed279 Merge branch 'main' into prql 2022-10-06 13:00:37 -07:00
Maximilian Roos
472940423c Remove unused PrqlNode & friends 2022-10-05 18:35:06 -07:00
Maximilian Roos
dddb0bff5a Merge branch 'main' into prql 2022-10-05 18:02:20 -07:00
Maximilian Roos
bc8b65095e Add language on error nodes 2022-10-05 11:34:15 -07:00
Maximilian Roos
86eb68f40d Add test to test_graph.py 2022-10-05 11:34:15 -07:00
Maximilian Roos
8eece383ea flake 2022-10-05 11:34:15 -07:00
Maximilian Roos
c9572c3106 Always use the mock method to align the snapshot tests 2022-10-05 11:34:15 -07:00
Maximilian Roos
ebff2ceb72 Revert to importing builtins from typing 2022-10-05 11:34:15 -07:00
Maximilian Roos
5a8fd1e90d Ignore types in the import hacks
(tests still fail b/c typing_extensions is not installed)
2022-10-05 11:34:15 -07:00
Maximilian Roos
fa3f17200f Add a mock return from prql_python 2022-10-05 11:34:15 -07:00
Maximilian Roos
506f2c939a A very-WIP implementation of the PRQL parser 2022-10-05 11:34:08 -07:00
5522 changed files with 133526 additions and 207484 deletions

39
.bumpversion.cfg Normal file
View File

@@ -0,0 +1,39 @@
[bumpversion]
current_version = 1.4.0a1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
((?P<prekind>a|b|rc)
(?P<pre>\d+) # pre-release version num
)?
serialize =
{major}.{minor}.{patch}{prekind}{pre}
{major}.{minor}.{patch}
commit = False
tag = False
[bumpversion:part:prekind]
first_value = a
optional_value = final
values =
a
b
rc
final
[bumpversion:part:pre]
first_value = 1
[bumpversion:file:core/setup.py]
[bumpversion:file:core/dbt/version.py]
[bumpversion:file:plugins/postgres/setup.py]
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
[bumpversion:file:docker/Dockerfile]
[bumpversion:file:tests/adapter/setup.py]
[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py]

View File

@@ -3,13 +3,6 @@
For information on prior major and minor releases, see their changelogs:
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)

View File

@@ -1,6 +1,6 @@
# dbt Core Changelog
- This file provides a full account of all changes to `dbt-core`
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)

View File

@@ -1,6 +0,0 @@
kind: Dependencies
body: Use EventCatcher from dbt-common instead of maintaining a local copy
time: 2025-11-18T15:53:54.284561+05:30
custom:
Author: 3loka
Issue: "12124"

View File

@@ -0,0 +1,7 @@
kind: "Dependency"
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
time: 2022-09-23T00:06:46.00000Z
custom:
Author: dependabot[bot]
Issue: 4904
PR: 5917

View File

@@ -0,0 +1,7 @@
kind: "Dependency"
body: "Bump black from 22.8.0 to 22.10.0"
time: 2022-10-07T00:08:48.00000Z
custom:
Author: dependabot[bot]
Issue: 4904
PR: 6019

View File

@@ -0,0 +1,7 @@
kind: "Dependency"
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
time: 2022-10-26T00:09:10.00000Z
custom:
Author: dependabot[bot]
Issue: 4904
PR: 6144

View File

@@ -0,0 +1,7 @@
kind: Docs
body: minor doc correction
time: 2022-09-08T15:41:57.689162-04:00
custom:
Author: andy-clapson
Issue: "5791"
PR: "5684"

View File

@@ -0,0 +1,7 @@
kind: Docs
body: Generate API docs for new CLI interface
time: 2022-10-07T09:06:56.446078-05:00
custom:
Author: stu-k
Issue: "5528"
PR: "6022"

View File

@@ -0,0 +1,6 @@
kind: Docs
time: 2022-10-17T17:14:11.715348-05:00
custom:
Author: paulbenschmidt
Issue: "5880"
PR: "324"

View File

@@ -0,0 +1,7 @@
kind: Docs
body: Fix rendering of sample code for metrics
time: 2022-11-16T15:57:43.204201+01:00
custom:
Author: jtcohen6
Issue: "323"
PR: "346"

View File

@@ -0,0 +1,8 @@
kind: Features
body: Added favor-state flag to optionally favor state nodes even if unselected node
exists
time: 2022-04-08T16:54:59.696564+01:00
custom:
Author: daniel-murray josephberni
Issue: "2968"
PR: "5859"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Proto logging messages
time: 2022-08-17T15:48:57.225267-04:00
custom:
Author: gshank
Issue: "5610"
PR: "5643"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Friendlier error messages when packages.yml is malformed
time: 2022-09-12T12:59:35.121188+01:00
custom:
Author: jared-rimmer
Issue: "5486"
PR: "5812"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Migrate dbt-utils current_timestamp macros into core + adapters
time: 2022-09-14T09:56:25.97818-07:00
custom:
Author: colin-rogers-dbt
Issue: "5521"
PR: "5838"

View File

@@ -0,0 +1,7 @@
kind: Features
body: Allow partitions in external tables to be supplied as a list
time: 2022-09-25T21:16:51.051239654+02:00
custom:
Author: pgoslatara
Issue: "5929"
PR: "5930"

View File

@@ -0,0 +1,7 @@
kind: Features
body: extend -f flag shorthand for seed command
time: 2022-10-03T11:07:05.381632-05:00
custom:
Author: dave-connors-3
Issue: "5990"
PR: "5991"

View File

@@ -0,0 +1,8 @@
kind: Features
body: This pulls the profile name from args when constructing a RuntimeConfig in lib.py,
enabling the dbt-server to override the value that's in the dbt_project.yml
time: 2022-11-02T15:00:03.000805-05:00
custom:
Author: racheldaniel
Issue: "6201"
PR: "6202"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Support partial parsing for function nodes
time: 2025-10-06T14:03:52.258104-05:00
custom:
Author: QMalcolm
Issue: "12072"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Allow for defining funciton arguments with default values
time: 2025-11-17T14:10:53.860178-06:00
custom:
Author: QMalcolm
Issue: "12044"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Raise jsonschema-based deprecation warnings by default
time: 2025-12-01T16:52:09.354436-05:00
custom:
Author: michelleark
Issue: 12240

View File

@@ -1,6 +0,0 @@
kind: Features
body: ':bug: :snowman: Disable unit tests whose model is disabled'
time: 2025-12-03T12:29:26.209248-05:00
custom:
Author: michelleark
Issue: "10540"

View File

@@ -1,6 +0,0 @@
kind: Features
body: Implement config.meta_get and config.meta_require
time: 2025-12-10T20:20:01.354288-05:00
custom:
Author: gshank
Issue: "12012"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Account for disabled flags on models in schema files more completely
time: 2022-09-16T10:48:54.162273-05:00
custom:
Author: emmyoop
Issue: "3992"
PR: "5868"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Add validation of enabled config for metrics, exposures and sources
time: 2022-10-10T11:32:18.752322-05:00
custom:
Author: emmyoop
Issue: "6030"
PR: "6038"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: check length of args of python model function before accessing it
time: 2022-10-11T16:07:15.464093-04:00
custom:
Author: chamini2
Issue: "6041"
PR: "6042"

View File

@@ -0,0 +1,8 @@
kind: Fixes
body: Add functors to ensure event types with str-type attributes are initialized
to spec, even when provided non-str type params.
time: 2022-10-16T17:37:42.846683-07:00
custom:
Author: versusfacit
Issue: "5436"
PR: "5874"

View File

@@ -0,0 +1,7 @@
kind: Fixes
body: Allow hooks to fail without halting execution flow
time: 2022-11-07T09:53:14.340257-06:00
custom:
Author: ChenyuLInx
Issue: "5625"
PR: "6059"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Address Click 8.2+ deprecation warning
time: 2025-09-22T15:17:26.983151-06:00
custom:
Author: edgarrmondragon
Issue: "12038"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Include macros in unit test parsing
time: 2025-11-17T14:06:49.518566-05:00
custom:
Author: michelleark nathanskone
Issue: "10157"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Allow dbt deps to run when vars lack defaults in dbt_project.yml
time: 2025-11-17T18:50:25.759091+05:30
custom:
Author: 3loka
Issue: "8913"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names`
time: 2025-11-18T17:11:06.454784-05:00
custom:
Author: michelleark
Issue: "12152"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Allow the usage of `function` with `--exclude-resource-type` flag
time: 2025-11-19T19:50:34.703236-06:00
custom:
Author: QMalcolm
Issue: "12143"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix bug where schemas of functions weren't guaranteed to exist
time: 2025-11-24T15:56:29.467004-06:00
custom:
Author: QMalcolm
Issue: "12142"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix generation of deprecations summary
time: 2025-11-24T15:57:56.544123-08:00
custom:
Author: asiunov
Issue: "12146"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Correctly reference foreign key references when --defer and --state provided'
time: 2025-11-24T17:08:55.387946-05:00
custom:
Author: michellark
Issue: "11885"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Add exception when using --state and referring to a removed
test'
time: 2025-11-25T12:02:46.635026-05:00
custom:
Author: emmyoop
Issue: "10630"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command'
time: 2025-11-25T12:20:20.132379-06:00
custom:
Author: QMalcolm
Issue: "11627"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ":bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements"
time: 2025-11-27T14:13:08.082542-05:00
custom:
Author: emmyoop
Issue: "12049"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g.,
varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo'
time: 2025-11-27T14:59:29.256274-05:00
custom:
Author: emmyoop
Issue: "11186"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Support unit testing models that depend on sources with the same name'
time: 2025-11-27T17:01:24.193516-05:00
custom:
Author: michelleark
Issue: 11975 10433

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test
time: 2025-11-28T10:21:29.911147Z
custom:
Author: mattogburke
Issue: "12223"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Avoid retrying successful run-operation commands'
time: 2025-11-28T12:28:38.546261-05:00
custom:
Author: michelleark
Issue: "11850"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned:
false`'
time: 2025-11-28T16:19:37.608722-05:00
custom:
Author: emmyoop
Issue: "9104"

View File

@@ -1,7 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better
cross-source matching and word boundaries'
time: 2025-11-28T16:31:44.344099-05:00
custom:
Author: emmyoop
Issue: "12239"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: ':bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs'
time: 2025-12-02T13:37:05.012112-05:00
custom:
Author: michelleark
Issue: "12244"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Ensure recent deprecation warnings include event name in message
time: 2025-12-09T17:50:31.334618-06:00
custom:
Author: QMalcolm
Issue: "12264"

View File

@@ -1,6 +0,0 @@
kind: Fixes
body: Improve error message clarity when detecting nodes with space in name
time: 2025-12-10T14:39:35.107841-08:00
custom:
Author: michelleark
Issue: "11835"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Put black config in explicit config
time: 2022-09-27T19:42:59.241433-07:00
custom:
Author: max-sixty
Issue: "5946"
PR: "5947"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Added flat_graph attribute the Manifest class's deepcopy() coverage
time: 2022-09-29T13:44:06.275941-04:00
custom:
Author: peterallenwebb
Issue: "5809"
PR: "5975"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Add mypy configs so `mypy` passes from CLI
time: 2022-10-05T12:03:10.061263-07:00
custom:
Author: max-sixty
Issue: "5983"
PR: "5983"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Exception message cleanup.
time: 2022-10-07T09:46:27.682872-05:00
custom:
Author: emmyoop
Issue: "6023"
PR: "6024"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Add dmypy cache to gitignore
time: 2022-10-07T14:00:44.227644-07:00
custom:
Author: max-sixty
Issue: "6028"
PR: "5978"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Provide useful errors when the value of 'materialized' is invalid
time: 2022-10-13T18:19:12.167548-04:00
custom:
Author: peterallenwebb
Issue: "5229"
PR: "6025"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Fixed extra whitespace in strings introduced by black.
time: 2022-10-17T15:15:11.499246-05:00
custom:
Author: luke-bassett
Issue: "1350"
PR: "6086"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Clean up string formatting
time: 2022-10-17T15:58:44.676549-04:00
custom:
Author: eve-johns
Issue: "6068"
PR: "6082"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Remove the 'root_path' field from most nodes
time: 2022-10-28T10:48:37.687886-04:00
custom:
Author: gshank
Issue: "6171"
PR: "6172"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Combine certain logging events with different levels
time: 2022-10-28T11:03:44.887836-04:00
custom:
Author: gshank
Issue: "6173"
PR: "6174"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Convert threading tests to pytest
time: 2022-11-08T07:45:50.589147-06:00
custom:
Author: stu-k
Issue: "5942"
PR: "6226"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Convert postgres index tests to pytest
time: 2022-11-08T11:56:33.743042-06:00
custom:
Author: stu-k
Issue: "5770"
PR: "6228"

View File

@@ -0,0 +1,7 @@
kind: Under the Hood
body: Convert use color tests to pytest
time: 2022-11-08T13:31:04.788547-06:00
custom:
Author: stu-k
Issue: "5771"
PR: "6230"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Update jsonschemas for schema.yml and dbt_project.yml deprecations
time: 2025-11-19T11:01:10.616676-05:00
custom:
Author: michelleark
Issue: "12180"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Replace setuptools and tox with hatch for build, test, and environment management.
time: 2025-11-21T14:05:15.838252-05:00
custom:
Author: emmyoop
Issue: "12151"

View File

@@ -1,6 +0,0 @@
kind: Under the Hood
body: Add add_catalog_integration call even if we have a pre-existing manifest
time: 2025-12-09T13:18:57.043254-08:00
custom:
Author: colin-rogers-dbt
Issue: "12262"

View File

@@ -4,34 +4,21 @@ headerPath: header.tpl.md
versionHeaderPath: ""
changelogPath: CHANGELOG.md
versionExt: md
envPrefix: "CHANGIE_"
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
kindFormat: '### {{.Kind}}'
changeFormat: |-
{{- $IssueList := list }}
{{- $changes := splitList " " $.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
kinds:
- label: Breaking Changes
- label: Features
- label: Fixes
- label: Docs
changeFormat: |-
{{- $IssueList := list }}
{{- $changes := splitList " " $.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end -}}
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
changeFormat: '- {{.Body}} ([dbt-docs/#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-docs/issues/{{.Custom.Issue}}), [dbt-docs/#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-docs/pull/{{.Custom.PR}}))'
- label: Under the Hood
- label: Dependencies
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
- label: Security
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
newlines:
afterChangelogHeader: 1
@@ -41,44 +28,47 @@ newlines:
endOfVersion: 1
custom:
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: Issue
label: GitHub Issue Number (separated by a single space if multiple)
type: string
minLength: 1
- key: Author
label: GitHub Username(s) (separated by a single space if multiple)
type: string
minLength: 3
- key: Issue
label: GitHub Issue Number
type: int
minInt: 1
- key: PR
label: GitHub Pull Request Number
type: int
minInt: 1
footerFormat: |
{{- $contributorDict := dict }}
{{- /* ensure we always skip snyk and dependabot */}}
{{- $bots := list "dependabot[bot]" "snyk-bot"}}
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
{{- range $change := .Changes }}
{{- $authorList := splitList " " $change.Custom.Author }}
{{- /* loop through all authors for a single changelog */}}
{{- /* loop through all authors for a PR */}}
{{- range $author := $authorList }}
{{- $authorLower := lower $author }}
{{- /* we only want to include non-bot contributors */}}
{{- if not (has $authorLower $bots)}}
{{- $changeList := splitList " " $change.Custom.Author }}
{{- $IssueList := list }}
{{- $changeLink := $change.Kind }}
{{- $changes := splitList " " $change.Custom.Issue }}
{{- range $issueNbr := $changes }}
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
{{- $IssueList = append $IssueList $changeLink }}
{{- end }}
{{- /* check if this contributor has other changes associated with them already */}}
{{- if hasKey $contributorDict $author }}
{{- $contributionList := get $contributorDict $author }}
{{- $contributionList = concat $contributionList $IssueList }}
{{- $contributorDict := set $contributorDict $author $contributionList }}
{{- else }}
{{- $contributionList := $IssueList }}
{{- $contributorDict := set $contributorDict $author $contributionList }}
{{- end }}
{{- end}}
{{- /* we only want to include non-core team contributors */}}
{{- if not (has $authorLower $core_team)}}
{{- /* Docs kind link back to dbt-docs instead of dbt-core PRs */}}
{{- $prLink := $change.Kind }}
{{- if eq $change.Kind "Docs" }}
{{- $prLink = "[dbt-docs/#pr](https://github.com/dbt-labs/dbt-docs/pull/pr)" | replace "pr" $change.Custom.PR }}
{{- else }}
{{- $prLink = "[#pr](https://github.com/dbt-labs/dbt-core/pull/pr)" | replace "pr" $change.Custom.PR }}
{{- end }}
{{- /* check if this contributor has other PRs associated with them already */}}
{{- if hasKey $contributorDict $author }}
{{- $prList := get $contributorDict $author }}
{{- $prList = append $prList $prLink }}
{{- $contributorDict := set $contributorDict $author $prList }}
{{- else }}
{{- $prList := list $prLink }}
{{- $contributorDict := set $contributorDict $author $prList }}
{{- end }}
{{- end}}
{{- end}}
{{- end }}
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}

View File

@@ -7,8 +7,6 @@ ignore =
W503 # makes Flake8 work like black
W504
E203 # makes Flake8 work like black
E704 # makes Flake8 work like black
E741
E501 # long line checking is done in black
per-file-ignores =
*/__init__.py: F401
exclude = test

6
.gitattributes vendored
View File

@@ -1,6 +0,0 @@
core/dbt/task/docs/index.html binary
tests/functional/artifacts/data/state/*/manifest.json binary
core/dbt/docs/build/html/searchindex.js binary
core/dbt/docs/build/html/index.html binary
performance/runner/Cargo.lock binary
core/dbt/events/types_pb2.py binary

63
.github/CODEOWNERS vendored
View File

@@ -11,8 +11,65 @@
# As a default for areas with no assignment,
# the core team as a whole will be assigned
* @dbt-labs/core-team
* @dbt-labs/core
### ARTIFACTS
# Changes to GitHub configurations including Actions
/.github/ @leahwicz
/schemas/dbt @dbt-labs/cloud-artifacts
### LANGUAGE
# Language core modules
/core/dbt/config/ @dbt-labs/core-language
/core/dbt/context/ @dbt-labs/core-language
/core/dbt/contracts/ @dbt-labs/core-language
/core/dbt/deps/ @dbt-labs/core-language
/core/dbt/events/ @dbt-labs/core-language # structured logging
/core/dbt/parser/ @dbt-labs/core-language
# Language misc files
/core/dbt/dataclass_schema.py @dbt-labs/core-language
/core/dbt/hooks.py @dbt-labs/core-language
/core/dbt/node_types.py @dbt-labs/core-language
/core/dbt/semver.py @dbt-labs/core-language
### EXECUTION
# Execution core modules
/core/dbt/graph/ @dbt-labs/core-execution
/core/dbt/task/ @dbt-labs/core-execution
# Execution misc files
/core/dbt/compilation.py @dbt-labs/core-execution
/core/dbt/flags.py @dbt-labs/core-execution
/core/dbt/lib.py @dbt-labs/core-execution
/core/dbt/main.py @dbt-labs/core-execution
/core/dbt/profiler.py @dbt-labs/core-execution
/core/dbt/selected_resources.py @dbt-labs/core-execution
/core/dbt/tracking.py @dbt-labs/core-execution
/core/dbt/version.py @dbt-labs/core-execution
### ADAPTERS
# Adapter interface ("base" + "sql" adapter defaults, cache)
/core/dbt/adapters @dbt-labs/core-adapters
# Global project (default macros + materializations), starter project
/core/dbt/include @dbt-labs/core-adapters
# Postgres plugin
/plugins/ @dbt-labs/core-adapters
# Functional tests for adapter plugins
/tests/adapter @dbt-labs/core-adapters
### TESTS
# Overlapping ownership for vast majority of unit + functional tests
# Perf regression testing framework
# This excludes the test project files itself since those aren't specific
# framework changes (excluded by not setting an owner next to it- no owner)
/performance @nathaniel-may
/performance/projects

View File

@@ -61,8 +61,8 @@ body:
label: Environment
description: |
examples:
- **OS**: Ubuntu 24.04
- **Python**: 3.10.12 (`python3 --version`)
- **OS**: Ubuntu 20.04
- **Python**: 3.9.12 (`python3 --version`)
- **dbt-core**: 1.1.1 (`dbt --version`)
value: |
- OS:

View File

@@ -1,18 +0,0 @@
name: 📄 Code docs
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
title: "[Code docs] <title>"
labels: ["triage"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this code docs issue!
- type: textarea
attributes:
label: Please describe the issue and your proposals.
description: |
Links? References? Anything that will give us more context about the issue you are encountering!
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
validations:
required: false

View File

@@ -1,8 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: Documentation
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
- name: Ask the community for help
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
about: Need help troubleshooting? Check out our guide on how to ask
@@ -12,6 +9,15 @@ contact_links:
- name: Participate in Discussions
url: https://github.com/dbt-labs/dbt-core/discussions
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
- name: Create an issue for adapters
url: https://github.com/dbt-labs/dbt-adapters/issues/new/choose
about: Report a bug or request a feature for an adapter
- name: Create an issue for dbt-redshift
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
about: Report a bug or request a feature for dbt-redshift
- name: Create an issue for dbt-bigquery
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
about: Report a bug or request a feature for dbt-bigquery
- name: Create an issue for dbt-snowflake
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
about: Report a bug or request a feature for dbt-snowflake
- name: Create an issue for dbt-spark
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
about: Report a bug or request a feature for dbt-spark

View File

@@ -1,67 +0,0 @@
name: 🛠️ Implementation
description: This is an implementation ticket intended for use by the maintainers of dbt-core
title: "[<project>] <title>"
labels: ["user docs"]
body:
- type: markdown
attributes:
value: This is an implementation ticket intended for use by the maintainers of dbt-core
- type: checkboxes
attributes:
label: Housekeeping
description: >
A couple friendly reminders:
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
options:
- label: I am a maintainer of dbt-core
required: true
- type: textarea
attributes:
label: Short description
description: |
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
validations:
required: true
- type: textarea
attributes:
label: Acceptance criteria
description: |
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
validations:
required: true
- type: textarea
attributes:
label: Suggested Tests
description: |
Provide scenarios to test. Link to existing similar tests if appropriate.
placeholder: |
1. Test with no version specified in the schema file and use selection logic on a versioned model for a specific version. Expect pass.
2. Test with a version specified in the schema file that is no valid. Expect ParsingError.
validations:
required: true
- type: textarea
attributes:
label: Impact to Other Teams
description: |
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
placeholder: |
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
validations:
required: true
- type: textarea
attributes:
label: Will backports be required?
description: |
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
placeholder: |
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
validations:
required: true
- type: textarea
attributes:
label: Context
description: |
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes and documentation as appropriate
validations:
required: false

View File

@@ -55,8 +55,8 @@ body:
label: Environment
description: |
examples:
- **OS**: Ubuntu 24.04
- **Python**: 3.10.12 (`python3 --version`)
- **OS**: Ubuntu 20.04
- **Python**: 3.9.12 (`python3 --version`)
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
value: |

33
.github/_README.md vendored
View File

@@ -47,8 +47,7 @@ ___
### How to re-run jobs
- From the UI you can rerun from failure
- You can retrigger the cla check by commenting on the PR with `@cla-bot check`
- Some actions cannot be rerun in the GitHub UI. Namely the snyk checks and the cla check. Snyk checks are rerun by closing and reopening the PR. You can retrigger the cla check by commenting on the PR with `@cla-bot check`
___
@@ -64,12 +63,12 @@ permissions:
contents: read
pull-requests: write
```
### Secrets
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
@@ -106,7 +105,7 @@ Some triggers of note that we use:
```
# **what?**
# Describe what the action does.
# Describe what the action does.
# **why?**
# Why does this action exist?
@@ -120,7 +119,7 @@ Some triggers of note that we use:
```yaml
jobs:
dependency_changelog:
runs-on: ${{ vars.UBUNTU_LATEST }}
runs-on: ubuntu-latest
steps:
- name: Get File Name Timestamp
@@ -139,7 +138,7 @@ Some triggers of note that we use:
id: fp
run: |
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
echo "::set-output name=FILEPATH::$FILEPATH"
```
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
@@ -159,14 +158,14 @@ Some triggers of note that we use:
echo "The build_script_path: ${{ inputs.build_script_path }}"
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
echo "The package_test_command: ${{ inputs.package_test_command }}"
# collect all the variables that need to be used in subsequent jobs
- name: Set Variables
id: variables
run: |
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
echo "::set-output name=important_path::'performance/runner/Cargo.toml'"
echo "::set-output name=release_id::${{github.event.inputs.release_id}}"
echo "::set-output name=open_prs::${{github.event.inputs.open_prs}}"
job2:
needs: [job1]
@@ -188,23 +187,17 @@ ___
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
### Runners
- We dynamically set runners based on repository vars. Admins can view repository vars and reset them. Current values are the following but are subject to change:
- `vars.UBUNTU_LATEST` -> `ubuntu-latest`
- `vars.WINDOWS_LATEST` -> `windows-latest`
- `vars.MACOS_LATEST` -> `macos-14`
### Actions from the Marketplace
- Dont use external actions for things that can easily be accomplished manually.
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and wont change under us) and clear as to whats actually happening. It also prevents any
- Pin actions _we don't control_ to tags.
- Pin actions _we don't control_ to tags.
### Connecting to AWS
- Authenticate with the aws managed workflow
```yaml
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -215,7 +208,7 @@ ___
```yaml
- name: Copy Artifacts from S3 via CLI
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
```
### Testing

View File

@@ -33,9 +33,9 @@ on:
jobs:
build:
runs-on: ${{ vars.UBUNTU_LATEST }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v1
- name: Wrangle latest tag
id: is_latest
uses: ./.github/actions/latest-wrangler

View File

@@ -1,21 +1,20 @@
name: "GitHub package `latest` tag wrangler for containers"
description: "Determines if the published image should include `latest` tags"
name: "Github package 'latest' tag wrangler for containers"
description: "Determines wether or not a given dbt container should be given a bare 'latest' tag (I.E. dbt-core:latest)"
inputs:
package_name:
description: "Package being published (i.e. `dbt-core`, `dbt-redshift`, etc.)"
description: "Package to check (I.E. dbt-core, dbt-redshift, etc)"
required: true
new_version:
description: "SemVer of the package being published (i.e. 1.7.2, 1.8.0a1, etc.)"
description: "Semver of the container being built (I.E. 1.0.4)"
required: true
github_token:
description: "Auth token for GitHub (must have view packages scope)"
gh_token:
description: "Auth token for github (must have view packages scope)"
required: true
outputs:
tags:
description: "A list of tags to associate with this version"
latest:
description: "Wether or not built container should be tagged latest (bool)"
minor_latest:
description: "Wether or not built container should be tagged minor.latest (bool)"
runs:
using: "docker"
image: "Dockerfile"

View File

@@ -3,24 +3,24 @@ on:
workflow_dispatch:
inputs:
package:
description: The package to publish
required: true
description: The package to publish
required: true
version_number:
description: The version number
required: true
description: The version number
required: true
jobs:
build:
runs-on: ${{ vars.UBUNTU_LATEST }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Wrangle latest tag
id: is_latest
uses: ./.github/actions/latest-wrangler
with:
package: ${{ github.event.inputs.package }}
new_version: ${{ github.event.inputs.new_version }}
gh_token: ${{ secrets.GITHUB_TOKEN }}
- name: Print the results
run: |
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
- uses: actions/checkout@v1
- name: Wrangle latest tag
id: is_latest
uses: ./.github/actions/latest-wrangler
with:
package: ${{ github.event.inputs.package }}
new_version: ${{ github.event.inputs.new_version }}
gh_token: ${{ secrets.GITHUB_TOKEN }}
- name: Print the results
run: |
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"

View File

@@ -1,72 +1,95 @@
import os
import sys
from typing import List
import requests
from packaging.version import Version, parse
def main():
package_name: str = os.environ["INPUT_PACKAGE_NAME"]
new_version: Version = parse(os.environ["INPUT_NEW_VERSION"])
github_token: str = os.environ["INPUT_GITHUB_TOKEN"]
response = _package_metadata(package_name, github_token)
published_versions = _published_versions(response)
new_version_tags = _new_version_tags(new_version, published_versions)
_register_tags(new_version_tags, package_name)
def _package_metadata(package_name: str, github_token: str) -> requests.Response:
url = f"https://api.github.com/orgs/dbt-labs/packages/container/{package_name}/versions"
return requests.get(url, auth=("", github_token))
def _published_versions(response: requests.Response) -> List[Version]:
package_metadata = response.json()
return [
parse(tag)
for version in package_metadata
for tag in version["metadata"]["container"]["tags"]
if "latest" not in tag
]
def _new_version_tags(new_version: Version, published_versions: List[Version]) -> List[str]:
# the package version is always a tag
tags = [str(new_version)]
# pre-releases don't get tagged with `latest`
if new_version.is_prerelease:
return tags
if new_version > max(published_versions):
tags.append("latest")
published_patches = [
version
for version in published_versions
if version.major == new_version.major and version.minor == new_version.minor
]
if new_version > max(published_patches):
tags.append(f"{new_version.major}.{new_version.minor}.latest")
return tags
def _register_tags(tags: List[str], package_name: str) -> None:
fully_qualified_tags = ",".join([f"ghcr.io/dbt-labs/{package_name}:{tag}" for tag in tags])
github_output = os.environ.get("GITHUB_OUTPUT")
with open(github_output, "at", encoding="utf-8") as gh_output:
gh_output.write(f"fully_qualified_tags={fully_qualified_tags}")
def _validate_response(response: requests.Response) -> None:
message = response["message"]
if response.status_code != 200:
print(f"Call to GitHub API failed: {response.status_code} - {message}")
sys.exit(1)
from distutils.util import strtobool
from typing import Union
from packaging.version import parse, Version
if __name__ == "__main__":
main()
# get inputs
package = os.environ["INPUT_PACKAGE"]
new_version = parse(os.environ["INPUT_NEW_VERSION"])
gh_token = os.environ["INPUT_GH_TOKEN"]
halt_on_missing = strtobool(os.environ.get("INPUT_HALT_ON_MISSING", "False"))
# get package metadata from github
package_request = requests.get(
f"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions",
auth=("", gh_token),
)
package_meta = package_request.json()
# Log info if we don't get a 200
if package_request.status_code != 200:
print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}")
# Make an early exit if there is no matching package in github
if package_request.status_code == 404:
if halt_on_missing:
sys.exit(1)
else:
# everything is the latest if the package doesn't exist
print(f"::set-output name=latest::{True}")
print(f"::set-output name=minor_latest::{True}")
sys.exit(0)
# TODO: verify package meta is "correct"
# https://github.com/dbt-labs/dbt-core/issues/4640
# map versions and tags
version_tag_map = {
version["id"]: version["metadata"]["container"]["tags"] for version in package_meta
}
# is pre-release
pre_rel = True if any(x in str(new_version) for x in ["a", "b", "rc"]) else False
# semver of current latest
for version, tags in version_tag_map.items():
if "latest" in tags:
# N.B. This seems counterintuitive, but we expect any version tagged
# 'latest' to have exactly three associated tags:
# latest, major.minor.latest, and major.minor.patch.
# Subtracting everything that contains the string 'latest' gets us
# the major.minor.patch which is what's needed for comparison.
current_latest = parse([tag for tag in tags if "latest" not in tag][0])
else:
current_latest = False
# semver of current_minor_latest
for version, tags in version_tag_map.items():
if f"{new_version.major}.{new_version.minor}.latest" in tags:
# Similar to above, only now we expect exactly two tags:
# major.minor.patch and major.minor.latest
current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0])
else:
current_minor_latest = False
def is_latest(
pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]
) -> bool:
"""Determine if a given contaier should be tagged 'latest' based on:
- it's pre-release status
- it's version
- the version of a previously identified container tagged 'latest'
:param pre_rel: Wether or not the version of the new container is a pre-release
:param new_version: The version of the new container
:param remote_latest: The version of the previously identified container that's
already tagged latest or False
"""
# is a pre-release = not latest
if pre_rel:
return False
# + no latest tag found = is latest
if not remote_latest:
return True
# + if remote version is lower than current = is latest, else not latest
return True if remote_latest <= new_version else False
latest = is_latest(pre_rel, new_version, current_latest)
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
print(f"::set-output name=latest::{latest}")
print(f"::set-output name=minor_latest::{minor_latest}")

View File

@@ -0,0 +1,10 @@
name: "Set up postgres (linux)"
description: "Set up postgres service on linux vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: bash
run: |
sudo systemctl start postgresql.service
pg_isready
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -0,0 +1,24 @@
name: "Set up postgres (macos)"
description: "Set up postgres service on macos vm for dbt integration tests"
runs:
using: "composite"
steps:
- shell: bash
run: |
brew services start postgresql
echo "Check PostgreSQL service is running"
i=10
COMMAND='pg_isready'
while [ $i -gt -1 ]; do
if [ $i == 0 ]; then
echo "PostgreSQL service not ready, all attempts exhausted"
exit 1
fi
echo "Check PostgreSQL service status"
eval $COMMAND && break
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
sleep 10
((i--))
done
createuser -s postgres
bash ${{ github.action_path }}/setup_db.sh

View File

@@ -0,0 +1 @@
../../../test/setup_db.sh

View File

@@ -5,22 +5,8 @@ runs:
steps:
- shell: pwsh
run: |
Write-Host -Object "Installing PostgreSQL 16 as windows service..."
$installerArgs = @("--install_runtimes 0", "--superpassword root", "--enable_acledit 1", "--unattendedmodeui none", "--mode unattended")
$filePath = Invoke-DownloadWithRetry -Url "https://get.enterprisedb.com/postgresql/postgresql-16.1-1-windows-x64.exe" -Path "$env:PGROOT/postgresql-16.1-1-windows-x64.exe"
Start-Process -FilePath $filePath -ArgumentList $installerArgs -Wait -PassThru
Write-Host -Object "Validating PostgreSQL 16 Install..."
Get-Service -Name postgresql*
$pgReady = Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
$exitCode = $pgReady.ExitCode
if ($exitCode -ne 0) {
Write-Host -Object "PostgreSQL is not ready. Exitcode: $exitCode"
exit $exitCode
}
Write-Host -Object "Starting PostgreSQL 16 Service..."
$pgService = Get-Service -Name postgresql-x64-16
$pgService = Get-Service -Name postgresql*
Set-Service -InputObject $pgService -Status running -StartupType automatic
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
$env:Path += ";$env:PGBIN"
bash ${{ github.action_path }}/setup_db.sh

View File

@@ -1 +1 @@
../../../scripts/setup_db.sh
../../../test/setup_db.sh

View File

@@ -1,169 +0,0 @@
# **what?**
# Runs all tests in dbt-postgres with this branch of dbt-core to ensure nothing is broken
# **why?**
# Ensure dbt-core changes do not break dbt-postgres, as a basic proxy for other adapters
# **when?**
# This will run when trying to merge a PR into main.
# It can also be manually triggered.
# This workflow can be skipped by adding the "Skip Postgres Testing" label to the PR. This is
# useful when making a change in both `dbt-postgres` and `dbt-core` where the changes are dependant
# and cause the other repository to break.
name: "dbt-postgres Tests"
run-name: >-
${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call')
&& format('dbt-postgres@{0} with dbt-core@{1}', inputs.dbt-postgres-ref, inputs.dbt-core-ref)
|| 'dbt-postgres@main with dbt-core branch' }}
on:
push:
branches:
- "main"
- "*.latest"
- "releases/*"
pull_request:
merge_group:
types: [checks_requested]
workflow_dispatch:
inputs:
dbt-postgres-ref:
description: "The branch of dbt-postgres to test against"
default: "main"
dbt-core-ref:
description: "The branch of dbt-core to test against"
default: "main"
workflow_call:
inputs:
dbt-postgres-ref:
description: "The branch of dbt-postgres to test against"
type: string
required: true
default: "main"
dbt-core-ref:
description: "The branch of dbt-core to test against"
type: string
required: true
default: "main"
permissions: read-all
# will cancel previous workflows triggered by the same event
# and for the same ref for PRs/merges or same SHA otherwise
# and for the same inputs on workflow_dispatch or workflow_call
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(fromJson('["pull_request", "merge_group"]'), github.event_name) && github.event.pull_request.head.ref || github.sha }}-${{ contains(fromJson('["workflow_call", "workflow_dispatch"]'), github.event_name) && github.event.inputs.dbt-postgres-ref && github.event.inputs.dbt-core-ref || github.sha }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
job-prep:
# This allow us to run the workflow on pull_requests as well so we can always run unit tests
# and only run integration tests on merge for time purposes
name: Setup Repo Refs
runs-on: ubuntu-latest
outputs:
dbt-postgres-ref: ${{ steps.core-ref.outputs.ref }}
dbt-core-ref: ${{ steps.common-ref.outputs.ref }}
steps:
- name: "Input Refs"
id: job-inputs
run: |
echo "inputs.dbt-postgres-ref=${{ inputs.dbt-postgres-ref }}"
echo "inputs.dbt-core-ref=${{ inputs.dbt-core-ref }}"
- name: "Determine dbt-postgres ref"
id: core-ref
run: |
if [[ -z "${{ inputs.dbt-postgres-ref }}" ]]; then
REF="main"
else
REF=${{ inputs.dbt-postgres-ref }}
fi
echo "ref=$REF" >> $GITHUB_OUTPUT
- name: "Determine dbt-core ref"
id: common-ref
run: |
if [[ -z "${{ inputs.dbt-core-ref }}" ]]; then
# these will be commits instead of branches
if [[ "${{ github.event_name }}" == "merge_group" ]]; then
REF=${{ github.event.merge_group.head_sha }}
else
REF=${{ github.event.pull_request.base.sha }}
fi
else
REF=${{ inputs.dbt-core-ref }}
fi
echo "ref=$REF" >> $GITHUB_OUTPUT
- name: "Final Refs"
run: |
echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}"
echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}"
integration-tests-postgres:
name: "dbt-postgres integration tests"
needs: [job-prep]
runs-on: ubuntu-latest
defaults:
run:
working-directory: "./dbt-postgres"
environment:
name: "dbt-postgres"
env:
POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }}
POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }}
POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }}
POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }}
POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }}
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- ${{ vars.POSTGRES_TEST_PORT }}:5432
steps:
- name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
repository: dbt-labs/dbt-adapters
ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }}
- name: "Set up Python"
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- name: "Set environment variables"
run: |
echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV
echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV
- name: "Setup test database"
run: psql -f ./scripts/setup_test_database.sql
env:
PGHOST: ${{ vars.POSTGRES_TEST_HOST }}
PGPORT: ${{ vars.POSTGRES_TEST_PORT }}
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
- name: "Install hatch"
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install
- name: "Run integration tests"
run: hatch run ${{ inputs.hatch-env }}:integration-tests

View File

@@ -11,6 +11,11 @@ updates:
schedule:
interval: "daily"
rebase-strategy: "disabled"
- package-ecosystem: "pip"
directory: "/plugins/postgres"
schedule:
interval: "daily"
rebase-strategy: "disabled"
# docker dependencies
- package-ecosystem: "docker"
@@ -23,10 +28,3 @@ updates:
schedule:
interval: "weekly"
rebase-strategy: "disabled"
# github dependencies
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
rebase-strategy: "disabled"

View File

@@ -1,33 +1,23 @@
Resolves #
resolves #
<!---
Include the number of the issue addressed by this PR above, if applicable.
Include the number of the issue addressed by this PR above if applicable.
PRs for code changes without an associated issue *will not be merged*.
See CONTRIBUTING.md for more information.
Add the `user docs` label to this PR if it will need docs changes. An
issue will get opened in docs.getdbt.com upon successful merge of this PR.
-->
### Problem
### Description
<!---
Describe the problem this PR is solving. What is the application state
before this PR is merged?
-->
### Solution
<!---
Describe the way this PR solves the above problem. Add as much detail as you
can to help reviewers understand your changes. Include any alternatives and
tradeoffs you considered.
Describe the Pull Request here. Add any references and info to help reviewers
understand your changes. Include any tradeoffs you considered.
-->
### Checklist
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
- [ ] I have run this code in development, and it appears to resolve the stated issue.
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
- [ ] I have run this code in development and it appears to resolve the stated issue
- [ ] This PR includes tests, or tests are not required/relevant for this PR
- [ ] I have [opened an issue to add/update docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose), or docs changes are not required/relevant for this PR
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-a-changelog-entry)

View File

@@ -1,186 +0,0 @@
# **what?**
# Enforces 2 reviews when artifact or validation files are modified.
# **why?**
# Ensure artifact changes receive proper review from designated team members. GitHub doesn't support
# multiple reviews on a single PR based on files changed, so we need to enforce this manually.
# **when?**
# This will run when reviews are submitted and dismissed.
name: "Enforce Additional Reviews on Artifact and Validations Changes"
permissions:
checks: write
pull-requests: write
contents: read
on:
# trigger check on review events. use pull_request_target for forks.
pull_request_target:
types: [opened, reopened, ready_for_review, synchronize, review_requested]
pull_request_review:
types: [submitted, edited, dismissed]
# only run this once per PR at a time
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
env:
required_approvals: 2
team: "core-group"
jobs:
check-reviews:
name: "Validate Additional Reviews"
runs-on: ubuntu-latest
steps:
- name: "Get list of changed files"
id: changed_files
run: |
# Fetch files as JSON and process with jq to sanitize output
gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files \
| jq -r '.[].filename' \
| while IFS= read -r file; do
# Sanitize the filename by removing any special characters and command injection attempts
clean_file=$(echo "$file" | sed 's/[^a-zA-Z0-9\.\/\-_]//g')
echo "$clean_file"
done > changed_files.txt
echo "CHANGED_FILES<<EOF" >> $GITHUB_OUTPUT
cat changed_files.txt >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: "Check if any artifact files changed"
id: artifact_files_changed
run: |
artifact_changes=false
while IFS= read -r file; do
# Only process if file path looks legitimate
if [[ "$file" =~ ^[a-zA-Z0-9\.\/\-_]+$ ]]; then
if [[ "$file" == "core/dbt/artifacts/"* ]] ; then
artifact_changes=true
break
fi
fi
done < changed_files.txt
echo "artifact_changes=$artifact_changes" >> $GITHUB_OUTPUT
- name: "Get Core Team Members"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
id: core_members
run: |
gh api -H "Accept: application/vnd.github+json" \
/orgs/dbt-labs/teams/${{ env.team }}/members > core_members.json
# Extract usernames and set as multiline output
echo "membership<<EOF" >> $GITHUB_OUTPUT
jq -r '.[].login' core_members.json >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
- name: "Verify ${{ env.required_approvals }} core team approvals"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
id: check_approvals
run: |
# Get all reviews
REVIEWS=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews)
echo "All reviews:"
echo "$REVIEWS"
# Count approved reviews from core team members (only most recent review per user)
CORE_APPROVALS=0
while IFS= read -r member; do
echo "Checking member: $member"
APPROVED=$(echo "$REVIEWS" | jq --arg user "$member" '
group_by(.user.login) |
map(select(.[0].user.login == $user) |
sort_by(.submitted_at) |
last) |
map(select(.state == "APPROVED" and (.state != "DISMISSED"))) |
length')
echo "Latest review state for $member: $APPROVED"
CORE_APPROVALS=$((CORE_APPROVALS + APPROVED))
echo "Running total: $CORE_APPROVALS"
done <<< "${{ steps.core_members.outputs.membership }}"
echo "CORE_APPROVALS=$CORE_APPROVALS" >> $GITHUB_OUTPUT
echo "CORE_APPROVALS=$CORE_APPROVALS"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: "Find Comment"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # peter-evans/find-comment@v2
id: find-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: "### Additional Artifact Review Required"
- name: "Create Comment"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.find-comment.outputs.comment-id == '' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
### Additional Artifact Review Required
Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members.
- name: "Notify if not enough approvals"
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
run: |
if [[ "${{ steps.check_approvals.outputs.CORE_APPROVALS }}" -ge "${{ env.required_approvals }}" ]]; then
title="Extra requirements met"
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
echo "::notice title=$title::$message"
echo "REVIEW_STATUS=success" >> $GITHUB_OUTPUT
else
title="PR Approval Requirements Not Met"
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
echo "::notice title=$title::$message"
echo "REVIEW_STATUS=neutral" >> $GITHUB_OUTPUT
fi
id: review_check
- name: "Set check status"
id: status_check
run: |
if [[ "${{ steps.artifact_files_changed.outputs.artifact_changes }}" == 'false' ]]; then
# no extra review required
echo "current_status=success" >> $GITHUB_OUTPUT
elif [[ "${{ steps.review_check.outputs.REVIEW_STATUS }}" == "success" ]]; then
# we have all the required reviews
echo "current_status=success" >> $GITHUB_OUTPUT
else
# neutral exit - neither success nor failure
# we can't fail here because we use multiple triggers for this workflow and they won't reset the check
# workaround is to use a neutral exit to skip the check run until it's actually successful
echo "current_status=neutral" >> $GITHUB_OUTPUT
fi
- name: "Post Event"
# This step posts the status of the check because the workflow is triggered by multiple events
# and we need to ensure the check is always updated. Otherwise we would end up with duplicate
# checks in the GitHub UI.
run: |
if [[ "${{ steps.status_check.outputs.current_status }}" == "success" ]]; then
state="success"
else
state="failure"
fi
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.base.sha }} \
-f state="$state" \
-f description="Artifact Review Check" \
-f context="Artifact Review Check" \
-f target_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
env:
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}

View File

@@ -1,50 +0,0 @@
# **what?**
# Check if the an issue is opened near or during an extended holiday period.
# If so, post an automatically-generated comment about the holiday for bug reports.
# Also provide specific information to customers of dbt Cloud.
# **why?**
# Explain why responses will be delayed during our holiday period.
# **when?**
# This will run when new issues are opened.
name: Auto-Respond to Bug Reports During Holiday Period
on:
issues:
types:
- opened
permissions:
contents: read
issues: write
jobs:
auto-response:
runs-on: ${{ vars.UBUNTU_LATEST }}
steps:
- name: Check if current date is within holiday period
id: date-check
run: |
current_date=$(date -u +"%Y-%m-%d")
start_date="2024-12-23"
end_date="2025-01-05"
if [[ "$current_date" < "$start_date" || "$current_date" > "$end_date" ]]; then
echo "outside_holiday=true" >> $GITHUB_ENV
else
echo "outside_holiday=false" >> $GITHUB_ENV
fi
- name: Post comment
if: ${{ env.outside_holiday == 'false' && contains(github.event.issue.labels.*.name, 'bug') }}
run: |
gh issue comment ${{ github.event.issue.number }} --repo ${{ github.repository }} --body "Thank you for your bug report! Our team is will be out of the office for [Christmas and our Global Week of Rest](https://handbook.getdbt.com/docs/time_off#2024-us-holidays), from December 25, 2024, through January 3, 2025.
We will review your issue as soon as possible after returning.
Thank you for your understanding, and happy holidays! 🎄🎉
If you are a customer of dbt Cloud, please contact our Customer Support team via the dbt Cloud web interface or email **support@dbtlabs.com**."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -28,13 +28,13 @@ permissions:
jobs:
backport:
name: Backport
runs-on: ${{ vars.UBUNTU_LATEST }}
runs-on: ubuntu-latest
# Only react to merged PRs for security reasons.
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
if: >
github.event.pull_request.merged
&& contains(github.event.label.name, 'backport')
steps:
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # tibdex/backport@v2.0.4
- uses: tibdex/backport@v2.0.2
with:
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -40,15 +40,17 @@ jobs:
matrix:
include:
- label: "dependencies"
changie_kind: "Dependencies"
runs-on: ${{ vars.UBUNTU_LATEST }}
changie_kind: "Dependency"
- label: "snyk"
changie_kind: "Security"
runs-on: ubuntu-latest
steps:
- name: Create and commit changelog on bot PR
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
id: bot_changelog
uses: emmyoop/changie_bot@22b70618b13d0d1c64ea95212bafca2d2bf6b764 # emmyoop/changie_bot@v1.1.0
uses: emmyoop/changie_bot@v1.0.1
with:
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
commit_author_name: "Github Build Bot"
@@ -56,4 +58,4 @@ jobs:
commit_message: "Add automated changelog yaml from template for bot PR"
changie_kind: ${{ matrix.changie_kind }}
label: ${{ matrix.label }}
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ${{ github.event.pull_request.number }}"
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: 4904\n PR: ${{ github.event.pull_request.number }}"

View File

@@ -2,8 +2,10 @@
# Checks that a file has been committed under the /.changes directory
# as a new CHANGELOG entry. Cannot check for a specific filename as
# it is dynamically generated by change type and timestamp.
# This workflow runs on pull_request_target because it requires
# secrets to post comments.
# This workflow should not require any secrets since it runs for PRs
# from forked repos.
# By default, secrets are not passed to workflows running from
# a forked repo.
# **why?**
# Ensure code change gets reflected in the CHANGELOG.
@@ -17,10 +19,8 @@
name: Check Changelog Entry
on:
pull_request_target:
pull_request:
types: [opened, reopened, labeled, unlabeled, synchronize]
paths-ignore: ['.changes/**', '.github/**', 'tests/**', '**.md', '**.yml']
workflow_dispatch:
defaults:

View File

@@ -1,45 +0,0 @@
name: Check Artifact Changes
on:
pull_request:
types: [ opened, reopened, labeled, unlabeled, synchronize ]
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
merge_group:
types: [checks_requested]
workflow_dispatch:
permissions:
contents: read
jobs:
check-artifact-changes:
runs-on: ${{ vars.UBUNTU_LATEST }}
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
fetch-depth: 0
- name: Check for changes in core/dbt/artifacts
# https://github.com/marketplace/actions/paths-changes-filter
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # dorny/paths-filter@v3
id: check_artifact_changes
with:
filters: |
artifacts_changed:
- 'core/dbt/artifacts/**'
list-files: shell
- name: Fail CI if artifacts have changed
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
run: |
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
exit 1
- name: CI check passed
if: steps.check_artifact_changes.outputs.artifacts_changed == 'false'
run: |
echo "No prohibited artifact changes found in core/dbt/artifacts. CI check passed."

View File

@@ -1,44 +0,0 @@
# **what?**
# Label a PR with a `community` label when a PR is opened by a user outside core/adapters
# **why?**
# To streamline triage and ensure that community contributions are recognized and prioritized
# **when?**
# When a PR is opened, not in draft or moved from draft to ready for review
name: Label community PRs
on:
# have to use pull_request_target since community PRs come from forks
pull_request_target:
types: [opened, ready_for_review]
defaults:
run:
shell: bash
permissions:
pull-requests: write # labels PRs
contents: read # reads team membership
jobs:
open_issues:
# If this PR already has the community label, no need to relabel it
# If this PR is opened and not draft, determine if it needs to be labeled
# if the PR is converted out of draft, determine if it needs to be labeled
if: |
(
!contains(github.event.pull_request.labels.*.name, 'community')
&& (
(github.event.action == 'opened' && github.event.pull_request.draft == false)
|| github.event.action == 'ready_for_review'
)
&& github.event.pull_request.user.type != 'Bot'
&& github.event.pull_request.user.login != 'dependabot[bot]'
)
uses: dbt-labs/actions/.github/workflows/label-community.yml@main
with:
github_team: 'core-group'
label: 'community'
secrets: inherit

View File

@@ -1,391 +0,0 @@
# **what?**
# Cuts the `*.latest` branch, bumps dependencies on it, cleans up all files in `.changes/unreleased`
# and `.changes/previous verion on main and bumps main to the input version.
# **why?**
# Clean up the main branch after a release branch is cut and automate cutting the release branch.
# Generally reduces the workload of engineers and reducing error.
# **when?**
# This will run when called manually or when triggered in another workflow.
# Example Usage including required permissions: TODO: update once finalized
# permissions:
# contents: read
# pull-requests: write
#
# name: Cut Release Branch
# jobs:
# changelog:
# uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
# with:
# new_branch_name: 1.7.latest
# PR_title: "Cleanup main after cutting new 1.7.latest branch"
# PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
# secrets:
# FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
# TODOs
# add note to eventually commit changes directly and bypass checks - same as release - when we move to this model run test action after merge
name: Cut new release branch
run-name: "Cutting New Branch: ${{ inputs.new_branch_name }}"
on:
workflow_dispatch:
inputs:
new_branch_name:
description: "The full name of the new branch (ex. 1.5.latest)"
required: true
type: string
defaults:
run:
shell: bash
permissions:
contents: write
pull-requests: write
env:
PYTHON_TARGET_VERSION: "3.10"
PR_TITLE: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
PR_BODY: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
jobs:
prep_work:
name: "Prep Work"
runs-on: ubuntu-latest
steps:
- name: "[DEBUG] Print Inputs"
run: |
echo "new_branch_name: ${{ inputs.new_branch_name }}"
echo "PR_title: ${{ env.PR_TITLE }}"
echo "PR_body: ${{ env.PR_BODY }}"
create_temp_branch:
name: "Create Temp branch off main"
runs-on: ubuntu-latest
outputs:
temp_branch_name: ${{ steps.variables.outputs.BRANCH_NAME }}
steps:
- name: "Set Branch Value"
id: variables
run: |
echo "BRANCH_NAME=cutting_release_branch/main_cleanup_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: "main"
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Create PR Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
git push --set-upstream origin ${{ steps.variables.outputs.BRANCH_NAME }}
- name: "[Notification] Temp branch created"
run: |
message="Temp branch ${{ steps.variables.outputs.BRANCH_NAME }} created"
echo "::notice title="Temporary branch created": $title::$message"
cleanup_changelog:
name: "Clean Up Changelog"
needs: ["create_temp_branch"]
runs-on: ubuntu-latest
outputs:
next-version: ${{ steps.semver-current.outputs.next-minor-alpha-version }}
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Add Homebrew To PATH"
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- name: "Install Homebrew Packages"
run: |
brew install pre-commit
brew tap miniscruff/changie https://github.com/miniscruff/changie
brew install changie
- name: "Check Current Version In Code"
id: determine_version
run: |
current_version=$(grep '^version = ' core/pyproject.toml | sed 's/version = "\(.*\)"/\1/')
echo "current_version=$current_version" >> $GITHUB_OUTPUT
- name: "[Notification] Check Current Version In Code"
run: |
message="The current version is ${{ steps.determine_version.outputs.current_version }}"
echo "::notice title="Version Bump Check": $title::$message"
- name: "Parse Current Version Into Parts for Changelog Directories"
id: semver-current
uses: dbt-labs/actions/parse-semver@main
with:
version: ${{ steps.determine_version.outputs.current_version }}
- name: "[Notification] Next Alpha Version"
run: |
message="The next alpha version is ${{ steps.semver-current.outputs.next-minor-alpha-version }}"
echo "::notice title="Version Bump Check": $title::$message"
- name: "Delete Unreleased Changelog YAMLs"
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
continue-on-error: true
run: |
rm .changes/unreleased/*.yaml || true
- name: "Delete Pre Release Changelogs and YAMLs"
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
continue-on-error: true
run: |
rm .changes/${{ steps.semver-current.outputs.base-version }}/*.yaml || true
rm .changes/${{ steps.semver-current.outputs.major }}.${{ steps.semver-current.outputs.minor }}.*.md || true
- name: "Cleanup CHANGELOG.md"
run: |
changie merge
- name: "Commit Changelog Cleanup to Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git status
git add .
git commit -m "Clean up changelog on main"
git push
- name: "[Notification] Changelog cleaned up"
run: |
message="Changelog on ${{ needs.create_temp_branch.outputs.temp_branch_name }} cleaned up"
echo "::notice title="Changelog cleaned up": $title::$message"
bump_version:
name: "Bump to next minor version"
needs: ["cleanup_changelog", "create_temp_branch"]
runs-on: ubuntu-latest
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
with:
python-version: "${{ env.PYTHON_TARGET_VERSION }}"
- name: "Install Spark Dependencies"
if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }}
run: |
sudo apt-get update
sudo apt-get install libsasl2-dev
- name: "Install Python Dependencies"
run: |
python -m pip install --upgrade pip
python -m pip install hatch
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
run: |
cd core
hatch version ${{ needs.cleanup_changelog.outputs.next-version }}
hatch run dev-req
dbt --version
- name: "Commit Version Bump to Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git status
git add .
git commit -m "Bumping version to ${{ needs.cleanup_changelog.outputs.next-version }}"
git push
- name: "[Notification] Version Bump completed"
run: |
message="Version on ${{ needs.create_temp_branch.outputs.temp_branch_name }} bumped to ${{ needs.cleanup_changelog.outputs.next-version }}"
echo "::notice title="Version Bump Completed": $title::$message"
cleanup:
name: "Cleanup Code Quality"
needs: ["create_temp_branch", "bump_version"]
runs-on: ubuntu-latest
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Add Homebrew To PATH"
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
- name: "brew install pre-commit"
run: |
brew install pre-commit
# this step will fail on whitespace errors but also correct them
- name: "Cleanup - Remove Trailing Whitespace Via Pre-commit"
continue-on-error: true
run: |
pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* || true
# this step will fail on newline errors but also correct them
- name: "Cleanup - Remove Extra Newlines Via Pre-commit"
continue-on-error: true
run: |
pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* || true
- name: "Commit Version Bump to Branch"
run: |
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git status
git add .
git commit -m "Code quality cleanup"
git push
open_pr:
name: "Open PR Against main"
needs: ["cleanup_changelog", "create_temp_branch", "cleanup"]
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create_pr.outputs.pull-request-number }}
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
token: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "Determine PR Title"
id: pr_title
run: |
echo "pr_title=${{ env.PR_TITLE }}" >> $GITHUB_OUTPUT
if [${{ env.PR_TITLE }} == ""]; then
echo "pr_title='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
fi
- name: "Determine PR Body"
id: pr_body
run: |
echo "pr_body=${{ env.PR_BODY }}" >> $GITHUB_OUTPUT
if [${{ env.PR_BODY }} == ""]; then
echo "pr_body='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
fi
- name: "Add Branch Details"
id: pr_body_branch
run: |
branch_details="The workflow that generated this PR also created a new branch: ${{ inputs.new_branch_name }}"
full_body="${{ steps.pr_body.outputs.pr_body }} $branch_details"
echo "pr_full_body=$full_body" >> $GITHUB_OUTPUT
- name: "Open Pull Request"
id: create_pr
run: |
pr_url=$(gh pr create -B main -H ${{ needs.create_temp_branch.outputs.temp_branch_name }} -l "Skip Changelog" -t "${{ steps.pr_title.outputs.pr_title }}" -b "${{ steps.pr_body_branch.outputs.pr_full_body }}")
echo "pr_url=$pr_url" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
- name: "[Notification] Pull Request Opened"
run: |
message="PR opened at ${{ steps.create_pr.outputs.pr_url }}"
echo "::notice title="Pull Request Opened": $title::$message"
cut_new_branch:
# don't cut the new branch until we're done opening the PR against main
name: "Cut New Branch ${{ inputs.new_branch_name }}"
needs: [open_pr]
runs-on: ubuntu-latest
steps:
- name: "Checkout ${{ github.repository }}"
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
with:
token: ${{ secrets.FISHTOWN_BOT_PAT }}
fetch-depth: 0
- name: "Ensure New Branch Does Not Exist"
id: check_new_branch
run: |
title="Check New Branch Existence"
if git show-ref --quiet ${{ inputs.new_branch_name }}; then
message="Branch ${{ inputs.new_branch_name }} already exists. Exiting."
echo "::error $title::$message"
exit 1
fi
- name: "Create New Release Branch"
run: |
git checkout -b ${{ inputs.new_branch_name }}
- name: "Push up New Branch"
run: |
#Data for commit
user="Github Build Bot"
email="buildbot@fishtownanalytics.com"
git config user.name "$user"
git config user.email "$email"
git push --set-upstream origin ${{ inputs.new_branch_name }}
- name: "[Notification] New branch created"
run: |
message="New branch ${{ inputs.new_branch_name }} created"
echo "::notice title="New branch created": $title::$message"
- name: "Bump dependencies via script"
# This bumps the dependency on dbt-core in the adapters
if: ${{ !contains(github.repository, 'dbt-core') }}
run: |
echo ${{ github.repository }}
echo "running update_dependencies script"
bash ${GITHUB_WORKSPACE}/.github/scripts/update_dependencies.sh ${{ inputs.new_branch_name }}
commit_message="bumping .latest branch variable in update_dependencies.sh to ${{ inputs.new_branch_name }}"
git status
git add .
git commit -m "$commit_message"
git push
- name: "Bump env variable via script"
# bumps the RELEASE_BRANCH variable in nightly-release.yml in adapters
if: ${{ !contains(github.repository, 'dbt-core') }}
run: |
file="./.github/scripts/update_release_branch.sh"
if test -f "$file"; then
echo ${{ github.repository }}
echo "running some script yet to be written now"
bash $file ${{ inputs.new_branch_name }}
commit_message="updating env variable to ${{ inputs.new_branch_name }} in nightly-release.yml"
git status
git add .
git commit -m "$commit_message"
git push
else
echo "no $file seen skipping step"
fi

View File

@@ -1,41 +0,0 @@
# **what?**
# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed
# **why?**
# To reduce barriers for keeping docs up to date
# **when?**
# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed.
name: Open issues in docs.getdbt.com repo when an issue is labeled
run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}"
on:
issues:
types: [labeled, closed]
defaults:
run:
shell: bash
permissions:
issues: write # comments on issues
jobs:
open_issues:
# we only want to run this when the issue is closed as completed and the label `user docs` has been assigned.
# If this logic does not exist in this workflow, it runs the
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
# decide if it should run or not.
if: |
(github.event.issue.state == 'closed' &&
github.event.issue.state_reason == 'completed' &&
contains( github.event.issue.labels.*.name, 'user docs'))
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
with:
issue_repository: "dbt-labs/docs.getdbt.com"
issue_title: "[Core] Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated.\n Originating from this issue: https://github.com/dbt-labs/dbt-core/issues/${{ github.event.issue.number }}"
secrets: inherit

26
.github/workflows/jira-creation.yml vendored Normal file
View File

@@ -0,0 +1,26 @@
# **what?**
# Mirrors issues into Jira. Includes the information: title,
# GitHub Issue ID and URL
# **why?**
# Jira is our tool for tracking and we need to see these issues in there
# **when?**
# On issue creation or when an issue is labeled `Jira`
name: Jira Issue Creation
on:
issues:
types: [opened, labeled]
permissions:
issues: write
jobs:
call-label-action:
uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}

26
.github/workflows/jira-label.yml vendored Normal file
View File

@@ -0,0 +1,26 @@
# **what?**
# Calls mirroring Jira label Action. Includes adding a new label
# to an existing issue or removing a label as well
# **why?**
# Jira is our tool for tracking and we need to see these labels in there
# **when?**
# On labels being added or removed from issues
name: Jira Label Mirroring
on:
issues:
types: [labeled, unlabeled]
permissions:
issues: read
jobs:
call-label-action:
uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}

27
.github/workflows/jira-transition.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
# **what?**
# Transition a Jira issue to a new state
# Only supports these GitHub Issue transitions:
# closed, deleted, reopened
# **why?**
# Jira needs to be kept up-to-date
# **when?**
# On issue closing, deletion, reopened
name: Jira Issue Transition
on:
issues:
types: [closed, deleted, reopened]
# no special access is needed
permissions: read-all
jobs:
call-label-action:
uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}

View File

@@ -20,8 +20,6 @@ on:
- "*.latest"
- "releases/*"
pull_request:
merge_group:
types: [checks_requested]
workflow_dispatch:
permissions: read-all
@@ -35,11 +33,6 @@ defaults:
run:
shell: bash
# top-level adjustments can be made here
env:
# number of parallel processes to spawn for python integration testing
PYTHON_INTEGRATION_TEST_WORKERS: 5
jobs:
code-quality:
name: code-quality
@@ -49,33 +42,30 @@ jobs:
steps:
- name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
uses: actions/setup-python@v4.3.0
with:
python-version: "3.10"
python-version: '3.8'
- name: Install python dependencies
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install hatch
cd core
hatch run setup
- name: Verify dbt installation
run: |
cd core
hatch run dbt --version
python -m pip install pre-commit
pre-commit --version
python -m pip install mypy==0.942
mypy --version
python -m pip install -r requirements.txt
python -m pip install -r dev-requirements.txt
dbt --version
- name: Run pre-commit hooks
run: |
cd core
hatch run code-quality
run: pre-commit run --all-files --show-diff-on-failure
unit:
name: "unit test / python ${{ matrix.python-version }}"
name: unit test / python ${{ matrix.python-version }}
runs-on: ubuntu-latest
timeout-minutes: 10
@@ -83,14 +73,18 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
python-version: ["3.7", "3.8", "3.9", "3.10"]
env:
TOXENV: "unit"
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
steps:
- name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
uses: actions/setup-python@v4.3.0
with:
python-version: ${{ matrix.python-version }}
@@ -98,201 +92,64 @@ jobs:
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install hatch
hatch --version
python -m pip install tox
tox --version
- name: Run unit tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with:
timeout_minutes: 10
max_attempts: 3
command: cd core && hatch run ci:unit-tests
- name: Run tox
run: tox
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
- name: Upload Unit Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
- uses: actions/upload-artifact@v2
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: unit
fail_ci_if_error: false
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
path: unit_results.csv
integration-metadata:
name: integration test metadata generation
runs-on: ubuntu-latest
outputs:
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
include: ${{ steps.generate-include.outputs.include }}
steps:
- name: generate split-groups
id: generate-split-groups
run: |
MATRIX_JSON="["
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
done
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
MATRIX_JSON+="]"
echo "split-groups=${MATRIX_JSON}"
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
- name: generate include
id: generate-include
run: |
INCLUDE=('"python-version":"3.10","os":"windows-latest"' '"python-version":"3.10","os":"macos-14"' )
INCLUDE_GROUPS="["
for include in ${INCLUDE[@]}; do
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
done
done
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
INCLUDE_GROUPS+="]"
echo "include=${INCLUDE_GROUPS}"
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
integration-postgres:
name: "(${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}"
integration:
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}
timeout-minutes: 30
needs:
- integration-metadata
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
os: ["ubuntu-latest"]
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
python-version: ["3.7", "3.8", "3.9", "3.10"]
os: [ubuntu-latest]
include:
- python-version: 3.8
os: windows-latest
- python-version: 3.8
os: macos-latest
env:
TOXENV: integration
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2
DBT_TEST_USER_3: dbt_test_user_3
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: ${{ github.event.repository.name }}
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres
# Provide the password for postgres
env:
POSTGRES_PASSWORD: password
POSTGRES_USER: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
uses: actions/setup-python@v4.3.0
with:
python-version: ${{ matrix.python-version }}
- name: Run postgres setup script
run: |
./scripts/setup_db.sh
env:
PGHOST: localhost
PGPORT: 5432
PGPASSWORD: password
- name: Install python tools
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install hatch
hatch --version
- name: Run integration tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
shell: bash
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
if: always()
with:
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
path: ./logs
- name: Upload Integration Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: integration
fail_ci_if_error: false
integration-mac-windows:
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}
timeout-minutes: 30
needs:
- integration-metadata
strategy:
fail-fast: false
matrix:
# already includes split group and runs mac + windows
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
env:
DBT_INVOCATION_ENV: github-actions
DBT_TEST_USER_1: dbt_test_user_1
DBT_TEST_USER_2: dbt_test_user_2
DBT_TEST_USER_3: dbt_test_user_3
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: ${{ github.event.repository.name }}
steps:
- name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Set up postgres (linux)
if: runner.os == 'Linux'
uses: ./.github/actions/setup-postgres-linux
- name: Set up postgres (macos)
if: runner.os == 'macOS'
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with:
timeout_minutes: 10
max_attempts: 3
command: ./scripts/setup_db.sh
uses: ./.github/actions/setup-postgres-macos
- name: Set up postgres (windows)
if: runner.os == 'Windows'
@@ -302,54 +159,28 @@ jobs:
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install hatch
hatch --version
python -m pip install tox
tox --version
- name: Run integration tests
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
shell: bash
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
- name: Run tests
run: tox
- name: Get current date
if: always()
id: date
run: |
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
- uses: actions/upload-artifact@v2
if: always()
with:
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
path: ./logs
- name: Upload Integration Test Coverage to Codecov
if: ${{ matrix.python-version == '3.11' }}
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
- uses: actions/upload-artifact@v2
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: integration
fail_ci_if_error: false
integration-report:
if: ${{ always() }}
name: Integration Test Suite
runs-on: ubuntu-latest
needs: [integration-mac-windows, integration-postgres]
steps:
- name: "Integration Tests Failed"
if: ${{ contains(needs.integration-mac-windows.result, 'failure') || contains(needs.integration-mac-windows.result, 'cancelled') || contains(needs.integration-postgres.result, 'failure') || contains(needs.integration-postgres.result, 'cancelled') }}
# when this is true the next step won't execute
run: |
echo "::notice title='Integration test suite failed'"
exit 1
- name: "Integration Tests Passed"
run: |
echo "::notice title='Integration test suite passed'"
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
path: integration_results.csv
build:
name: build packages
@@ -358,17 +189,17 @@ jobs:
steps:
- name: Check out the repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
uses: actions/setup-python@v4.3.0
with:
python-version: "3.10"
python-version: '3.8'
- name: Install python dependencies
run: |
python -m pip install --user --upgrade pip
python -m pip install --upgrade hatch twine check-wheel-contents
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
python -m pip --version
- name: Build distributions
@@ -377,7 +208,27 @@ jobs:
- name: Show distributions
run: ls -lh dist/
- name: Check and verify distributions
- name: Check distribution descriptions
run: |
cd core
hatch run build:check-all
twine check dist/*
- name: Check wheel contents
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- name: Install wheel distributions
run: |
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
- name: Check wheel distributions
run: |
dbt --version
- name: Install source distributions
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
run: |
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
- name: Check source distributions
run: |
dbt --version

Some files were not shown because too many files have changed in this diff Show More