mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 22:51:28 +00:00
Compare commits
1 Commits
remove-ssl
...
db-setup-w
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03dfb11d2b |
@@ -1,19 +1,13 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0b1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
||||
(?P<prekind>a|b|rc) # pre-release type
|
||||
(?P<num>[\d]+) # pre-release version number
|
||||
current_version = 0.21.0b1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
)?
|
||||
( # optional nightly release indicator
|
||||
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
||||
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||
{major}.{minor}.{patch}.{nightly}
|
||||
{major}.{minor}.{patch}{prekind}{num}
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
@@ -21,27 +15,36 @@ tag = False
|
||||
[bumpversion:part:prekind]
|
||||
first_value = a
|
||||
optional_value = final
|
||||
values =
|
||||
values =
|
||||
a
|
||||
b
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:num]
|
||||
[bumpversion:part:pre]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:nightly]
|
||||
[bumpversion:file:setup.py]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:core/scripts/create_adapter_plugins.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/redshift/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/snowflake/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
[bumpversion:file:docker/Dockerfile]
|
||||
[bumpversion:file:plugins/redshift/dbt/adapters/redshift/__version__.py]
|
||||
|
||||
[bumpversion:file:tests/adapter/setup.py]
|
||||
[bumpversion:file:plugins/snowflake/dbt/adapters/snowflake/__version__.py]
|
||||
|
||||
[bumpversion:file:plugins/bigquery/dbt/adapters/bigquery/__version__.py]
|
||||
|
||||
[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py]
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
||||
* [0.18](https://github.com/dbt-labs/dbt-core/blob/0.18.latest/CHANGELOG.md)
|
||||
* [0.17](https://github.com/dbt-labs/dbt-core/blob/0.17.latest/CHANGELOG.md)
|
||||
* [0.16](https://github.com/dbt-labs/dbt-core/blob/0.16.latest/CHANGELOG.md)
|
||||
* [0.15](https://github.com/dbt-labs/dbt-core/blob/0.15.latest/CHANGELOG.md)
|
||||
* [0.14](https://github.com/dbt-labs/dbt-core/blob/0.14.latest/CHANGELOG.md)
|
||||
* [0.13](https://github.com/dbt-labs/dbt-core/blob/0.13.latest/CHANGELOG.md)
|
||||
* [0.12](https://github.com/dbt-labs/dbt-core/blob/0.12.latest/CHANGELOG.md)
|
||||
* [0.11 and earlier](https://github.com/dbt-labs/dbt-core/blob/0.11.latest/CHANGELOG.md)
|
||||
@@ -1,70 +0,0 @@
|
||||
## dbt-core 1.7.0-b1 - August 17, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Enable re-population of metadata vars post-environment change during programmatic invocation ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- Added support to configure a delimiter for a seed file, defaults to comma ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
- Allow specification of `create_metric: true` on measures ([#8125](https://github.com/dbt-labs/dbt-core/issues/8125))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- Fixed double-underline ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- Copy target_schema from config into snapshot node ([#6745](https://github.com/dbt-labs/dbt-core/issues/6745))
|
||||
- Enable converting deprecation warnings to errors ([#8130](https://github.com/dbt-labs/dbt-core/issues/8130))
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Ensure `warn_error_options` get serialized in `invocation_args_dict` ([#7694](https://github.com/dbt-labs/dbt-core/issues/7694))
|
||||
- Stop detecting materialization macros based on macro name ([#6231](https://github.com/dbt-labs/dbt-core/issues/6231))
|
||||
- Update `dbt deps` download retry logic to handle `EOFError` exceptions ([#6653](https://github.com/dbt-labs/dbt-core/issues/6653))
|
||||
- Improve handling of CTE injection with ephemeral models ([#8213](https://github.com/dbt-labs/dbt-core/issues/8213))
|
||||
- Fix unbound local variable error in `checked_agg_time_dimension_for_measure` ([#8230](https://github.com/dbt-labs/dbt-core/issues/8230))
|
||||
- Ensure runtime errors are raised for graph runnable tasks (compile, show, run, etc) ([#8166](https://github.com/dbt-labs/dbt-core/issues/8166))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- Add connection status into list of statuses for dbt debug ([#8350](https://github.com/dbt-labs/dbt-core/issues/8350))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
|
||||
### Docs
|
||||
|
||||
- Corrected spelling of "Partiton" ([dbt-docs/#8100](https://github.com/dbt-labs/dbt-docs/issues/8100))
|
||||
- Remove static SQL codeblock for metrics ([dbt-docs/#436](https://github.com/dbt-labs/dbt-docs/issues/436))
|
||||
- fixed comment util.py ([dbt-docs/#None](https://github.com/dbt-labs/dbt-docs/issues/None))
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- A way to control maxBytes for a single dbt.log file ([#8199](https://github.com/dbt-labs/dbt-core/issues/8199))
|
||||
- Ref expressions with version can now be processed by the latest version of the high-performance dbt-extractor library. ([#7688](https://github.com/dbt-labs/dbt-core/issues/7688))
|
||||
- Bump manifest schema version to v11, freeze manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Add return values to a number of functions for mypy ([#8389](https://github.com/dbt-labs/dbt-core/issues/8389))
|
||||
- Fix mypy warnings for ManifestLoader.load() ([#8401](https://github.com/dbt-labs/dbt-core/issues/8401))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Bump mypy from 1.3.0 to 1.4.0 ([#7912](https://github.com/dbt-labs/dbt-core/pull/7912))
|
||||
- Bump mypy from 1.4.0 to 1.4.1 ([#8219](https://github.com/dbt-labs/dbt-core/pull/8219))
|
||||
- Update pin for click<9 ([#8232](https://github.com/dbt-labs/dbt-core/pull/8232))
|
||||
- Add upper bound to sqlparse pin of <0.5 ([#8236](https://github.com/dbt-labs/dbt-core/pull/8236))
|
||||
- Support dbt-semantic-interfaces 0.2.0 ([#8250](https://github.com/dbt-labs/dbt-core/pull/8250))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- [@d-kaneshiro](https://github.com/d-kaneshiro) ([#None](https://github.com/dbt-labs/dbt-core/issues/None))
|
||||
- [@gem7318](https://github.com/gem7318) ([#8010](https://github.com/dbt-labs/dbt-core/issues/8010))
|
||||
- [@lllong33](https://github.com/lllong33) ([#5301](https://github.com/dbt-labs/dbt-core/issues/5301))
|
||||
- [@marcodamore](https://github.com/marcodamore) ([#436](https://github.com/dbt-labs/dbt-core/issues/436))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#8100](https://github.com/dbt-labs/dbt-core/issues/8100))
|
||||
- [@ramonvermeulen](https://github.com/ramonvermeulen) ([#3990](https://github.com/dbt-labs/dbt-core/issues/3990))
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Breaking Changes
|
||||
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
|
||||
the existing RunResultError in their place.
|
||||
time: 2023-07-25T17:13:59.441682-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7963"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.4.0 to 1.4.1"
|
||||
time: 2023-07-26T20:17:40.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 8219
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Update pin for click<9
|
||||
time: 2023-07-27T14:57:03.180458-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8232"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Add upper bound to sqlparse pin of <0.5
|
||||
time: 2023-07-27T14:57:26.40416-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
PR: "8236"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Support dbt-semantic-interfaces 0.2.0
|
||||
time: 2023-07-28T13:52:27.207241-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
PR: "8250"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: fixed comment util.py
|
||||
time: 2023-07-27T17:09:00.089237+09:00
|
||||
custom:
|
||||
Author: d-kaneshiro
|
||||
Issue: None
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Display contract and column constraints on the model page
|
||||
time: 2023-08-04T13:18:15.627005-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "433"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Display semantic model details in docs
|
||||
time: 2023-08-07T15:25:48.711627-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "431"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Enable re-population of metadata vars post-environment change during programmatic
|
||||
invocation
|
||||
time: 2023-07-02T12:28:13.416305-04:00
|
||||
custom:
|
||||
Author: gem7318
|
||||
Issue: "8010"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Added support to configure a delimiter for a seed file, defaults to comma
|
||||
time: 2023-07-14T20:24:45.513847165+02:00
|
||||
custom:
|
||||
Author: ramonvermeulen
|
||||
Issue: "3990"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: 'Allow specification of `create_metric: true` on measures'
|
||||
time: 2023-08-03T15:18:24.351003-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8125"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Copy dir during `dbt deps` if symlink fails
|
||||
time: 2023-04-24T21:07:34.336797+05:30
|
||||
custom:
|
||||
Author: anjutiwari
|
||||
Issue: "7428 8223"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Copy target_schema from config into snapshot node
|
||||
time: 2023-07-17T16:06:52.957724-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6745"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||
time: 2023-07-27T12:58:30.673803-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8230"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||
etc)
|
||||
time: 2023-07-28T11:56:20.863718-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8166"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix retry not working with log-file-max-bytes
|
||||
time: 2023-08-02T14:15:56.306027-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8297"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Detect changes to model access, version, or latest_version in state:modified
|
||||
time: 2023-08-06T22:23:19.166334-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8189"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add connection status into list of statuses for dbt debug
|
||||
time: 2023-08-10T18:48:59.221344+01:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "8350"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: fix fqn-selection for external versioned models
|
||||
time: 2023-08-11T20:41:44.725144-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8374"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: 'Fix: DbtInternalError after model that previously ref''d external model is
|
||||
deleted'
|
||||
time: 2023-08-11T21:20:08.145554-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8375"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix using list command with path selector and project-dir
|
||||
time: 2023-08-14T14:57:02.02816-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8385"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Remedy performance regression by only writing run_results.json once.
|
||||
time: 2023-08-15T10:44:44.836991-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "8360"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Use python version 3.10.7 in Docker image.
|
||||
time: 2023-08-17T13:09:15.936349-05:00
|
||||
custom:
|
||||
Author: McKnight-42
|
||||
Issue: "8444"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Bump manifest schema version to v11, freeze manifest v10
|
||||
time: 2023-08-07T16:45:09.712744-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8333"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: add tracking for plugin.get_nodes calls
|
||||
time: 2023-08-09T09:48:34.819445-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8344"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: 'add internal flag: --no-partial-parse-file-diff to inform whether to compute
|
||||
a file diff during partial parsing'
|
||||
time: 2023-08-11T10:09:02.832241-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8363"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Add return values to a number of functions for mypy
|
||||
time: 2023-08-15T17:03:07.895252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8389"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix mypy warnings for ManifestLoader.load()
|
||||
time: 2023-08-17T13:45:48.937252-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8401"
|
||||
@@ -1,53 +0,0 @@
|
||||
# CHANGELOG Automation
|
||||
|
||||
We use [changie](https://changie.dev/) to automate `CHANGELOG` generation. For installation and format/command specifics, see the documentation.
|
||||
|
||||
### Quick Tour
|
||||
|
||||
- All new change entries get generated under `/.changes/unreleased` as a yaml file
|
||||
- `header.tpl.md` contains the contents of the entire CHANGELOG file
|
||||
- `0.0.0.md` contains the contents of the footer for the entire CHANGELOG file. changie looks to be in the process of supporting a footer file the same as it supports a header file. Switch to that when available. For now, the 0.0.0 in the file name forces it to the bottom of the changelog no matter what version we are releasing.
|
||||
- `.changie.yaml` contains the fields in a change, the format of a single change, as well as the format of the Contributors section for each version.
|
||||
|
||||
### Workflow
|
||||
|
||||
#### Daily workflow
|
||||
Almost every code change we make associated with an issue will require a `CHANGELOG` entry. After you have created the PR in GitHub, run `changie new` and follow the command prompts to generate a yaml file with your change details. This only needs to be done once per PR.
|
||||
|
||||
The `changie new` command will ensure correct file format and file name. There is a one to one mapping of issues to changes. Multiple issues cannot be lumped into a single entry. If you make a mistake, the yaml file may be directly modified and saved as long as the format is preserved.
|
||||
|
||||
Note: If your PR has been cleared by the Core Team as not needing a changelog entry, the `Skip Changelog` label may be put on the PR to bypass the GitHub action that blacks PRs from being merged when they are missing a `CHANGELOG` entry.
|
||||
|
||||
#### Prerelease Workflow
|
||||
These commands batch up changes in `/.changes/unreleased` to be included in this prerelease and move those files to a directory named for the release version. The `--move-dir` will be created if it does not exist and is created in `/.changes`.
|
||||
|
||||
```
|
||||
changie batch <version> --move-dir '<version>' --prerelease 'rc1'
|
||||
changie merge
|
||||
```
|
||||
|
||||
Example
|
||||
```
|
||||
changie batch 1.0.5 --move-dir '1.0.5' --prerelease 'rc1'
|
||||
changie merge
|
||||
```
|
||||
|
||||
#### Final Release Workflow
|
||||
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
||||
|
||||
```
|
||||
changie batch <version> --include '<version>' --remove-prereleases
|
||||
changie merge
|
||||
```
|
||||
|
||||
Example
|
||||
```
|
||||
changie batch 1.0.5 --include '1.0.5' --remove-prereleases
|
||||
changie merge
|
||||
```
|
||||
|
||||
### A Note on Manual Edits & Gotchas
|
||||
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
||||
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
||||
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
||||
- changie can modify, create and delete files depending on the command you run. This is expected. Be sure to commit everything that has been modified and deleted.
|
||||
@@ -1,6 +0,0 @@
|
||||
# dbt Core Changelog
|
||||
|
||||
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix newline escapes and improve formatting in docker README
|
||||
time: 2023-07-28T19:34:38.351042747+02:00
|
||||
custom:
|
||||
Author: jamezrin
|
||||
Issue: "8211"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension`
|
||||
time: 2023-08-18T09:53:48.154848-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "8453"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Turn breaking changes to contracted models into warnings for unversioned models
|
||||
time: 2023-08-18T10:38:02.251286-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: 8384 8282
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: fix ambiguous reference error for tests and versions when model name is duplicated across
|
||||
packages
|
||||
time: 2023-08-24T16:10:24.437362-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8327 8493"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: 'Re-organize jinja macros: relation-specific in /macros/adapters/relations/<relation>,
|
||||
relation agnostic in /macros/relations'
|
||||
time: 2023-08-21T13:48:01.474731-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8449"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Update typing to meet mypy standards
|
||||
time: 2023-08-23T19:42:37.130694-04:00
|
||||
custom:
|
||||
Author: mikealfare
|
||||
Issue: "8396"
|
||||
140
.changie.yaml
140
.changie.yaml
@@ -1,140 +0,0 @@
|
||||
changesDir: .changes
|
||||
unreleasedDir: unreleased
|
||||
headerPath: header.tpl.md
|
||||
versionHeaderPath: ""
|
||||
changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
envPrefix: "CHANGIE_"
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: |-
|
||||
{{- $IssueList := list }}
|
||||
{{- $changes := splitList " " $.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
|
||||
kinds:
|
||||
- label: Breaking Changes
|
||||
- label: Features
|
||||
- label: Fixes
|
||||
- label: Docs
|
||||
changeFormat: |-
|
||||
{{- $IssueList := list }}
|
||||
{{- $changes := splitList " " $.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
- label: Security
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
newlines:
|
||||
afterChangelogHeader: 1
|
||||
afterKind: 1
|
||||
afterChangelogVersion: 1
|
||||
beforeKind: 1
|
||||
endOfVersion: 1
|
||||
|
||||
custom:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: Issue
|
||||
label: GitHub Issue Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
||||
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
||||
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
||||
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
||||
{{- range $team_member := $core_team }}
|
||||
{{- $team_member_lower := lower $team_member }}
|
||||
{{- $maintainers = append $maintainers $team_member_lower }}
|
||||
{{- end }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a single changelog */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $maintainers)}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $contributionList := get $contributorDict $author }}
|
||||
{{- $contributionList = concat $contributionList $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- else }}
|
||||
{{- $contributionList := $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
{{- end }}
|
||||
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
||||
{{- if $contributorDict}}
|
||||
### Contributors
|
||||
{{- range $k,$v := $contributorDict }}
|
||||
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
12
.flake8
12
.flake8
@@ -1,12 +0,0 @@
|
||||
[flake8]
|
||||
select =
|
||||
E
|
||||
W
|
||||
F
|
||||
ignore =
|
||||
W503 # makes Flake8 work like black
|
||||
W504
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
@@ -1,2 +0,0 @@
|
||||
# Reformatting dbt-core via black, flake8, mypy, and assorted pre-commit hooks.
|
||||
43e3fc22c4eae4d3d901faba05e33c40f1f1dc5a
|
||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +0,0 @@
|
||||
core/dbt/include/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
performance/runner/Cargo.lock binary
|
||||
core/dbt/events/types_pb2.py binary
|
||||
60
.github/CODEOWNERS
vendored
60
.github/CODEOWNERS
vendored
@@ -1,60 +0,0 @@
|
||||
# This file contains the code owners for the dbt-core repo.
|
||||
# PRs will be automatically assigned for review to the associated
|
||||
# team(s) or person(s) that touches any files that are mapped to them.
|
||||
#
|
||||
# A statement takes precedence over the statements above it so more general
|
||||
# assignments are found at the top with specific assignments being lower in
|
||||
# the ordering (i.e. catch all assignment should be the first item)
|
||||
#
|
||||
# Consult GitHub documentation for formatting guidelines:
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#example-of-a-codeowners-file
|
||||
|
||||
# As a default for areas with no assignment,
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
|
||||
### OSS Tooling Guild
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
# Adapter interface ("base" + "sql" adapter defaults, cache)
|
||||
/core/dbt/adapters @dbt-labs/core-adapters
|
||||
|
||||
# Global project (default macros + materializations), starter project
|
||||
/core/dbt/include @dbt-labs/core-adapters
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
|
||||
### TESTS
|
||||
|
||||
# Overlapping ownership for vast majority of unit + functional tests
|
||||
|
||||
# Perf regression testing framework
|
||||
# This excludes the test project files itself since those aren't specific
|
||||
# framework changes (excluded by not setting an owner next to it- no owner)
|
||||
/performance @nathaniel-may
|
||||
/performance/projects
|
||||
|
||||
### ARTIFACTS
|
||||
|
||||
/schemas/dbt @dbt-labs/cloud-artifacts
|
||||
27
.github/ISSUE_TEMPLATE/beta-minor-version-release.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/beta-minor-version-release.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: Beta minor version release
|
||||
about: Creates a tracking checklist of items for a Beta minor version release
|
||||
title: "[Tracking] v#.##.#B# release "
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] Follow [dbt-release workflow](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#03ff37da697d4d8ba63d24fae1bfa817)
|
||||
- [ ] [Engineering] Verify new release branch is created in the repo
|
||||
- [ ] [Product] Finalize migration guide (next.docs.getdbt.com)
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#f01854e8da3641179fbcbe505bdf515c) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] [Spark](https://github.com/dbt-labs/dbt-spark)
|
||||
- [ ] [Presto](https://github.com/dbt-labs/dbt-presto)
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Create an epic for the RC release
|
||||
97
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
97
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -1,97 +0,0 @@
|
||||
name: 🐞 Bug
|
||||
description: Report a bug or an issue you've found with dbt
|
||||
title: "[Bug] <title>"
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is this a new bug in dbt-core?
|
||||
description: >
|
||||
In other words, is this an error, flaw, failure or fault in our software?
|
||||
|
||||
If this is a bug that broke existing functionality that used to work, please open a regression issue.
|
||||
If this is a bug in an adapter plugin, please open an issue in the adapter's repository.
|
||||
If this is a bug experienced while using dbt Cloud, please report to [support](mailto:support@getdbt.com).
|
||||
If this is a request for help or troubleshooting code in your own dbt project, please join our [dbt Community Slack](https://www.getdbt.com/community/join-the-community/) or open a [Discussion question](https://github.com/dbt-labs/docs.getdbt.com/discussions).
|
||||
|
||||
Please search to see if an issue already exists for the bug you encountered.
|
||||
options:
|
||||
- label: I believe this is a new bug in dbt-core
|
||||
required: true
|
||||
- label: I have searched the existing issues, and I could not find an existing issue for this bug
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current Behavior
|
||||
description: A concise description of what you're experiencing.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Steps to reproduce the behavior.
|
||||
placeholder: |
|
||||
1. In this environment...
|
||||
2. With this config...
|
||||
3. Run '...'
|
||||
4. See error...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
If applicable, log output to help explain your problem.
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **Python**: 3.9.12 (`python3 --version`)
|
||||
- **dbt-core**: 1.1.1 (`dbt --version`)
|
||||
value: |
|
||||
- OS:
|
||||
- Python:
|
||||
- dbt:
|
||||
render: markdown
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: database
|
||||
attributes:
|
||||
label: Which database adapter are you using with dbt?
|
||||
description: If the bug is specific to the database or adapter, please open the issue in that adapter's repository instead
|
||||
multiple: true
|
||||
options:
|
||||
- postgres
|
||||
- redshift
|
||||
- snowflake
|
||||
- bigquery
|
||||
- spark
|
||||
- other (mention it in "Additional Context")
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||
|
||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
41
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a bug or an issue you've found with dbt
|
||||
title: ''
|
||||
labels: bug, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the bug
|
||||
A clear and concise description of what the bug is. What command did you run? What happened?
|
||||
|
||||
### Steps To Reproduce
|
||||
In as much detail as possible, please provide steps to reproduce the issue. Sample data that triggers the issue, example model code, etc is all very helpful here.
|
||||
|
||||
### Expected behavior
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
### Screenshots and log output
|
||||
If applicable, add screenshots or log output to help explain your problem.
|
||||
|
||||
### System information
|
||||
**Which database are you using dbt with?**
|
||||
- [ ] postgres
|
||||
- [ ] redshift
|
||||
- [ ] bigquery
|
||||
- [ ] snowflake
|
||||
- [ ] other (specify: ____________)
|
||||
|
||||
|
||||
**The output of `dbt --version`:**
|
||||
```
|
||||
<output goes here>
|
||||
```
|
||||
|
||||
**The operating system you're using:**
|
||||
|
||||
**The output of `python --version`:**
|
||||
|
||||
### Additional context
|
||||
Add any other context about the problem here.
|
||||
23
.github/ISSUE_TEMPLATE/config.yml
vendored
23
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,23 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Ask the community for help
|
||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||
about: Need help troubleshooting? Check out our guide on how to ask
|
||||
- name: Contact dbt Cloud support
|
||||
url: mailto:support@getdbt.com
|
||||
about: Are you using dbt Cloud? Contact our support team for help!
|
||||
- name: Participate in Discussions
|
||||
url: https://github.com/dbt-labs/dbt-core/discussions
|
||||
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
|
||||
- name: Create an issue for dbt-redshift
|
||||
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-redshift
|
||||
- name: Create an issue for dbt-bigquery
|
||||
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-bigquery
|
||||
- name: Create an issue for dbt-snowflake
|
||||
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-snowflake
|
||||
- name: Create an issue for dbt-spark
|
||||
url: https://github.com/dbt-labs/dbt-spark/issues/new/choose
|
||||
about: Report a bug or request a feature for dbt-spark
|
||||
59
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
59
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,59 +0,0 @@
|
||||
name: ✨ Feature
|
||||
description: Propose a straightforward extension of dbt functionality
|
||||
title: "[Feature] <title>"
|
||||
labels: ["enhancement", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this feature request!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is this your first time submitting a feature request?
|
||||
description: >
|
||||
We want to make sure that features are distinct and discoverable,
|
||||
so that other members of the community can find them and offer their thoughts.
|
||||
|
||||
Issues are the right place to request straightforward extensions of existing dbt functionality.
|
||||
For "big ideas" about future capabilities of dbt, we ask that you open a
|
||||
[discussion](https://github.com/dbt-labs/dbt-core/discussions) in the "Ideas" category instead.
|
||||
options:
|
||||
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
|
||||
required: true
|
||||
- label: I have searched the existing issues, and I could not find an existing issue for this feature
|
||||
required: true
|
||||
- label: I am requesting a straightforward extension of existing dbt functionality, rather than a Big Idea better suited to a discussion
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the feature
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Who will this benefit?
|
||||
description: |
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
attributes:
|
||||
label: Are you interested in contributing this feature?
|
||||
description: Let us know if you want to write some code, and how we can help.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else?
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the feature you are suggesting!
|
||||
validations:
|
||||
required: false
|
||||
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for dbt
|
||||
title: ''
|
||||
labels: enhancement, triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Describe the feature
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
### Describe alternatives you've considered
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
### Additional context
|
||||
Is this feature database-specific? Which database(s) is/are relevant? Please include any other relevant context here.
|
||||
|
||||
### Who will this benefit?
|
||||
What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
|
||||
|
||||
### Are you interested in contributing this feature?
|
||||
Let us know if you want to write some code, and how we can help.
|
||||
28
.github/ISSUE_TEMPLATE/final-minor-version-release.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/final-minor-version-release.md
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
name: Final minor version release
|
||||
about: Creates a tracking checklist of items for a final minor version release
|
||||
title: "[Tracking] v#.##.# final release "
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] Verify all necessary changes exist on the release branch
|
||||
- [ ] [Engineering] Follow [dbt-release workflow](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#03ff37da697d4d8ba63d24fae1bfa817)
|
||||
- [ ] [Product] Merge `next` into `current` for docs.getdbt.com
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Update discourse
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#f01854e8da3641179fbcbe505bdf515c) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] [Spark](https://github.com/dbt-labs/dbt-spark)
|
||||
- [ ] [Presto](https://github.com/dbt-labs/dbt-presto)
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Product] Release new version of dbt-utils with new dbt version compatibility. If there are breaking changes requiring a minor version, plan upgrades of other packages that depend on dbt-utils.
|
||||
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
58
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
@@ -1,58 +0,0 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance criteria
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Impact to Other Teams
|
||||
description: |
|
||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
||||
placeholder: |
|
||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Will backports be required?
|
||||
description: |
|
||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
||||
placeholder: |
|
||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
29
.github/ISSUE_TEMPLATE/rc-minor-version-release copy.md
vendored
Normal file
29
.github/ISSUE_TEMPLATE/rc-minor-version-release copy.md
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
---
|
||||
name: RC minor version release
|
||||
about: Creates a tracking checklist of items for a RC minor version release
|
||||
title: "[Tracking] v#.##.#RC# release "
|
||||
labels: 'release'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Release Core
|
||||
- [ ] [Engineering] Verify all necessary changes exist on the release branch
|
||||
- [ ] [Engineering] Follow [dbt-release workflow](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#03ff37da697d4d8ba63d24fae1bfa817)
|
||||
- [ ] [Product] Update migration guide (next.docs.getdbt.com)
|
||||
|
||||
### Release Cloud
|
||||
- [ ] [Engineering] Create a platform issue to update dbt Cloud and verify it is completed. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Engineering] Determine if schemas have changed. If so, generate new schemas and push to schemas.getdbt.com
|
||||
|
||||
### Announce
|
||||
- [ ] [Product] Publish discourse
|
||||
- [ ] [Product] Announce in dbt Slack
|
||||
|
||||
### Post-release
|
||||
- [ ] [Engineering] [Bump plugin versions](https://www.notion.so/dbtlabs/Releasing-b97c5ea9a02949e79e81db3566bbc8ef#f01854e8da3641179fbcbe505bdf515c) (dbt-spark + dbt-presto), add compatibility as needed
|
||||
- [ ] [Spark](https://github.com/dbt-labs/dbt-spark)
|
||||
- [ ] [Presto](https://github.com/dbt-labs/dbt-presto)
|
||||
- [ ] [Engineering] Create a platform issue to update dbt-spark versions to dbt Cloud. [Example issue](https://github.com/dbt-labs/dbt-cloud/issues/3481)
|
||||
- [ ] [Product] Release new version of dbt-utils with new dbt version compatibility. If there are breaking changes requiring a minor version, plan upgrades of other packages that depend on dbt-utils.
|
||||
- [ ] [Engineering] Create an epic for the final release
|
||||
93
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
93
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
@@ -1,93 +0,0 @@
|
||||
name: ☣️ Regression
|
||||
description: Report a regression you've observed in a newer version of dbt
|
||||
title: "[Regression] <title>"
|
||||
labels: ["bug", "regression", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this regression report!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is this a regression in a recent version of dbt-core?
|
||||
description: >
|
||||
A regression is when documented functionality works as expected in an older version of dbt-core,
|
||||
and no longer works after upgrading to a newer version of dbt-core
|
||||
options:
|
||||
- label: I believe this is a regression in dbt-core functionality
|
||||
required: true
|
||||
- label: I have searched the existing issues, and I could not find an existing issue for this regression
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current Behavior
|
||||
description: A concise description of what you're experiencing.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected/Previous Behavior
|
||||
description: A concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Steps to reproduce the behavior.
|
||||
placeholder: |
|
||||
1. In this environment...
|
||||
2. With this config...
|
||||
3. Run '...'
|
||||
4. See error...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
If applicable, log output to help explain your problem.
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **Python**: 3.9.12 (`python3 --version`)
|
||||
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
|
||||
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
|
||||
value: |
|
||||
- OS:
|
||||
- Python:
|
||||
- dbt (working version):
|
||||
- dbt (regression version):
|
||||
render: markdown
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: database
|
||||
attributes:
|
||||
label: Which database adapter are you using with dbt?
|
||||
description: If the regression is specific to the database or adapter, please open the issue in that adapter's repository instead
|
||||
multiple: true
|
||||
options:
|
||||
- postgres
|
||||
- redshift
|
||||
- snowflake
|
||||
- bigquery
|
||||
- spark
|
||||
- other (mention it in "Additional Context")
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||
|
||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
216
.github/_README.md
vendored
216
.github/_README.md
vendored
@@ -1,216 +0,0 @@
|
||||
<!-- GitHub will publish this readme on the main repo page if the name is `README.md` so we've added the leading underscore to prevent this -->
|
||||
<!-- Do not rename this file `README.md` -->
|
||||
<!-- See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-readmes -->
|
||||
|
||||
## What are GitHub Actions?
|
||||
|
||||
GitHub Actions are used for many different purposes. We use them to run tests in CI, validate PRs are in an expected state, and automate processes.
|
||||
|
||||
- [Overview of GitHub Actions](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions)
|
||||
- [What's a workflow?](https://docs.github.com/en/actions/using-workflows/about-workflows)
|
||||
- [GitHub Actions guides](https://docs.github.com/en/actions/guides)
|
||||
|
||||
___
|
||||
|
||||
## Where do actions and workflows live
|
||||
|
||||
We try to maintain actions that are shared across repositories in a single place so that necesary changes can be made in a single place.
|
||||
|
||||
[dbt-labs/actions](https://github.com/dbt-labs/actions/) is the central repository of actions and workflows we use across repositories.
|
||||
|
||||
GitHub Actions also live locally within a repository. The workflows can be found at `.github/workflows` from the root of the repository. These should be specific to that code base.
|
||||
|
||||
Note: We are actively moving actions into the central Action repository so there is currently some duplication across repositories.
|
||||
|
||||
___
|
||||
|
||||
## Basics of Using Actions
|
||||
|
||||
### Viewing Output
|
||||
|
||||
- View the detailed action output for your PR in the **Checks** tab of the PR. This only shows the most recent run. You can also view high level **Checks** output at the bottom on the PR.
|
||||
|
||||
- View _all_ action output for a repository from the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab. Workflow results last 1 year. Artifacts last 90 days, unless specified otherwise in individual workflows.
|
||||
|
||||
This view often shows what seem like duplicates of the same workflow. This occurs when files are renamed but the workflow name has not changed. These are in fact _not_ duplicates.
|
||||
|
||||
You can see the branch the workflow runs from in this view. It is listed in the table between the workflow name and the time/duration of the run. When blank, the workflow is running in the context of the `main` branch.
|
||||
|
||||
### How to view what workflow file is being referenced from a run
|
||||
|
||||
- When viewing the output of a specific workflow run, click the 3 dots at the top right of the display. There will be an option to `View workflow file`.
|
||||
|
||||
### How to manually run a workflow
|
||||
|
||||
- If a workflow has the `on: workflow_dispatch` trigger, it can be manually triggered
|
||||
- From the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab, find the workflow you want to run, select it and fill in any inputs requied. That's it!
|
||||
|
||||
### How to re-run jobs
|
||||
|
||||
- Some actions cannot be rerun in the GitHub UI. Namely the snyk checks and the cla check. Snyk checks are rerun by closing and reopening the PR. You can retrigger the cla check by commenting on the PR with `@cla-bot check`
|
||||
|
||||
___
|
||||
|
||||
## General Standards
|
||||
|
||||
### Permissions
|
||||
- By default, workflows have read permissions in the repository for the contents scope only when no permissions are explicitly set.
|
||||
- It is best practice to always define the permissions explicitly. This will allow actions to continue to work when the default permissions on the repository are changed. It also allows explicit grants of the least permissions possible.
|
||||
- There are a lot of permissions available. [Read up on them](https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs) if you're unsure what to use.
|
||||
|
||||
```yaml
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
```
|
||||
|
||||
### Secrets
|
||||
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
||||
|
||||
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
||||
|
||||
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
||||
|
||||
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
||||
|
||||
### Triggers
|
||||
You can configure your workflows to run when specific activity on GitHub happens, at a scheduled time, or when an event outside of GitHub occurs. Read more details in the [GitHub docs](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows).
|
||||
|
||||
These triggers are under the `on` key of the workflow and more than one can be listed.
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
# catch when the PR is opened with the label or when the label is added
|
||||
types: [opened, labeled]
|
||||
workflow_dispatch:
|
||||
```
|
||||
|
||||
Some triggers of note that we use:
|
||||
|
||||
- `push` - Runs your workflow when you push a commit or tag.
|
||||
- `pull_request` - Runs your workflow when activity on a pull request in the workflow's repository occurs. Takes in a list of activity types (opened, labeled, etc) if appropriate.
|
||||
- `pull_request_target` - Same as `pull_request` but runs in the context of the PR target branch.
|
||||
- `workflow_call` - used with reusable workflows. Triggered by another workflow calling it.
|
||||
- `workflow_dispatch` - Gives the ability to manually trigger a workflow from the GitHub API, GitHub CLI, or GitHub browser interface.
|
||||
|
||||
|
||||
### Basic Formatting
|
||||
- Add a description of what your workflow does at the top in this format
|
||||
|
||||
```
|
||||
# **what?**
|
||||
# Describe what the action does.
|
||||
|
||||
# **why?**
|
||||
# Why does this action exist?
|
||||
|
||||
# **when?**
|
||||
# How/when will it be triggered?
|
||||
```
|
||||
|
||||
- Leave blank lines between steps and jobs
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
dependency_changelog:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Get File Name Timestamp
|
||||
id: filename_time
|
||||
uses: nanzm/get-time-action@v1.1
|
||||
with:
|
||||
format: 'YYYYMMDD-HHmmss'
|
||||
|
||||
- name: Get File Content Timestamp
|
||||
id: file_content_time
|
||||
uses: nanzm/get-time-action@v1.1
|
||||
with:
|
||||
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
|
||||
|
||||
- name: Generate Filepath
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
||||
```
|
||||
|
||||
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
||||
|
||||
When possible, generate variables at the top of your workflow in a single place to reference later. This is not always strictly possible since you may generate a value to be used later mid-workflow.
|
||||
|
||||
Be sure to use quotes around these logs so special characters are not interpreted.
|
||||
|
||||
```yaml
|
||||
job1:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo "all variables defined as inputs"
|
||||
echo "The last commit sha in the release: ${{ inputs.sha }}"
|
||||
echo "The release version number: ${{ inputs.version_number }}"
|
||||
echo "The changelog_path: ${{ inputs.changelog_path }}"
|
||||
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
||||
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
||||
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set Variables
|
||||
id: variables
|
||||
run: |
|
||||
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
||||
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
||||
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
||||
|
||||
job2:
|
||||
needs: [job1]
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo "all variables defined in job1 > Set Variables > outputs"
|
||||
echo "important_path: ${{ needs.job1.outputs.important_path }}"
|
||||
echo "release_id: ${{ needs.job1.outputs.release_id }}"
|
||||
echo "open_prs: ${{ needs.job1.outputs.open_prs }}"
|
||||
```
|
||||
|
||||
- When it's not obvious what something does, add a comment!
|
||||
|
||||
___
|
||||
|
||||
## Tips
|
||||
|
||||
### Context
|
||||
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
|
||||
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
|
||||
|
||||
### Actions from the Marketplace
|
||||
- Don’t use external actions for things that can easily be accomplished manually.
|
||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||
- Pin actions _we don't control_ to tags.
|
||||
|
||||
### Connecting to AWS
|
||||
- Authenticate with the aws managed workflow
|
||||
|
||||
```yaml
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
```
|
||||
|
||||
- Then access with the aws command that comes installed on the action runner machines
|
||||
|
||||
```yaml
|
||||
- name: Copy Artifacts from S3 via CLI
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
- Depending on what your action does, you may be able to use [`act`](https://github.com/nektos/act) to test the action locally. Some features of GitHub Actions do not work with `act`, among those are reusable workflows. If you can't use `act`, you'll have to push your changes up before being able to test. This can be slow.
|
||||
14
.github/actions/latest-wrangler/Dockerfile
vendored
14
.github/actions/latest-wrangler/Dockerfile
vendored
@@ -1,14 +0,0 @@
|
||||
FROM python:3-slim AS builder
|
||||
ADD . /app
|
||||
WORKDIR /app
|
||||
|
||||
# We are installing a dependency here directly into our app source dir
|
||||
RUN pip install --target=/app requests packaging
|
||||
|
||||
# A distroless container image with Python and some basics like SSL certificates
|
||||
# https://github.com/GoogleContainerTools/distroless
|
||||
FROM gcr.io/distroless/python3-debian10
|
||||
COPY --from=builder /app /app
|
||||
WORKDIR /app
|
||||
ENV PYTHONPATH /app
|
||||
CMD ["/app/main.py"]
|
||||
50
.github/actions/latest-wrangler/README.md
vendored
50
.github/actions/latest-wrangler/README.md
vendored
@@ -1,50 +0,0 @@
|
||||
# Github package 'latest' tag wrangler for containers
|
||||
## Usage
|
||||
|
||||
Plug in the necessary inputs to determine if the container being built should be tagged 'latest; at the package level, for example `dbt-redshift:latest`.
|
||||
|
||||
## Inputs
|
||||
| Input | Description |
|
||||
| - | - |
|
||||
| `package` | Name of the GH package to check against |
|
||||
| `new_version` | Semver of new container |
|
||||
| `gh_token` | GH token with package read scope|
|
||||
| `halt_on_missing` | Return non-zero exit code if requested package does not exist. (defaults to false)|
|
||||
|
||||
|
||||
## Outputs
|
||||
| Output | Description |
|
||||
| - | - |
|
||||
| `latest` | Wether or not the new container should be tagged 'latest'|
|
||||
| `minor_latest` | Wether or not the new container should be tagged major.minor.latest |
|
||||
|
||||
## Example workflow
|
||||
```yaml
|
||||
name: Ship it!
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to publish
|
||||
required: true
|
||||
version_number:
|
||||
description: The version number
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.new_version }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Print the results
|
||||
run: |
|
||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||
echo "Is it minor.latest? Survey says: ${{ steps.is_latest.outputs.minor_latest }} !"
|
||||
```
|
||||
20
.github/actions/latest-wrangler/action.yml
vendored
20
.github/actions/latest-wrangler/action.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: "Github package 'latest' tag wrangler for containers"
|
||||
description: "Determines wether or not a given dbt container should be given a bare 'latest' tag (I.E. dbt-core:latest)"
|
||||
inputs:
|
||||
package_name:
|
||||
description: "Package to check (I.E. dbt-core, dbt-redshift, etc)"
|
||||
required: true
|
||||
new_version:
|
||||
description: "Semver of the container being built (I.E. 1.0.4)"
|
||||
required: true
|
||||
gh_token:
|
||||
description: "Auth token for github (must have view packages scope)"
|
||||
required: true
|
||||
outputs:
|
||||
latest:
|
||||
description: "Wether or not built container should be tagged latest (bool)"
|
||||
minor_latest:
|
||||
description: "Wether or not built container should be tagged minor.latest (bool)"
|
||||
runs:
|
||||
using: "docker"
|
||||
image: "Dockerfile"
|
||||
@@ -1,26 +0,0 @@
|
||||
name: Ship it!
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to publish
|
||||
required: true
|
||||
version_number:
|
||||
description: The version number
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.new_version }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Print the results
|
||||
run: |
|
||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"inputs": {
|
||||
"version_number": "1.0.1",
|
||||
"package": "dbt-redshift"
|
||||
}
|
||||
}
|
||||
98
.github/actions/latest-wrangler/main.py
vendored
98
.github/actions/latest-wrangler/main.py
vendored
@@ -1,98 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
from distutils.util import strtobool
|
||||
from typing import Union
|
||||
from packaging.version import parse, Version
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# get inputs
|
||||
package = os.environ["INPUT_PACKAGE"]
|
||||
new_version = parse(os.environ["INPUT_NEW_VERSION"])
|
||||
gh_token = os.environ["INPUT_GH_TOKEN"]
|
||||
halt_on_missing = strtobool(os.environ.get("INPUT_HALT_ON_MISSING", "False"))
|
||||
|
||||
# get package metadata from github
|
||||
package_request = requests.get(
|
||||
f"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions",
|
||||
auth=("", gh_token),
|
||||
)
|
||||
package_meta = package_request.json()
|
||||
|
||||
# Log info if we don't get a 200
|
||||
if package_request.status_code != 200:
|
||||
print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}")
|
||||
|
||||
# Make an early exit if there is no matching package in github
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
# everything is the latest if the package doesn't exist
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write("latest=True")
|
||||
gh_output.write("minor_latest=True")
|
||||
sys.exit(0)
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
|
||||
# map versions and tags
|
||||
version_tag_map = {
|
||||
version["id"]: version["metadata"]["container"]["tags"] for version in package_meta
|
||||
}
|
||||
|
||||
# is pre-release
|
||||
pre_rel = True if any(x in str(new_version) for x in ["a", "b", "rc"]) else False
|
||||
|
||||
# semver of current latest
|
||||
for version, tags in version_tag_map.items():
|
||||
if "latest" in tags:
|
||||
# N.B. This seems counterintuitive, but we expect any version tagged
|
||||
# 'latest' to have exactly three associated tags:
|
||||
# latest, major.minor.latest, and major.minor.patch.
|
||||
# Subtracting everything that contains the string 'latest' gets us
|
||||
# the major.minor.patch which is what's needed for comparison.
|
||||
current_latest = parse([tag for tag in tags if "latest" not in tag][0])
|
||||
else:
|
||||
current_latest = False
|
||||
|
||||
# semver of current_minor_latest
|
||||
for version, tags in version_tag_map.items():
|
||||
if f"{new_version.major}.{new_version.minor}.latest" in tags:
|
||||
# Similar to above, only now we expect exactly two tags:
|
||||
# major.minor.patch and major.minor.latest
|
||||
current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0])
|
||||
else:
|
||||
current_minor_latest = False
|
||||
|
||||
def is_latest(
|
||||
pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version]
|
||||
) -> bool:
|
||||
"""Determine if a given contaier should be tagged 'latest' based on:
|
||||
- it's pre-release status
|
||||
- it's version
|
||||
- the version of a previously identified container tagged 'latest'
|
||||
|
||||
:param pre_rel: Wether or not the version of the new container is a pre-release
|
||||
:param new_version: The version of the new container
|
||||
:param remote_latest: The version of the previously identified container that's
|
||||
already tagged latest or False
|
||||
"""
|
||||
# is a pre-release = not latest
|
||||
if pre_rel:
|
||||
return False
|
||||
# + no latest tag found = is latest
|
||||
if not remote_latest:
|
||||
return True
|
||||
# + if remote version is lower than current = is latest, else not latest
|
||||
return True if remote_latest <= new_version else False
|
||||
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write(f"latest={latest}")
|
||||
gh_output.write(f"minor_latest={minor_latest}")
|
||||
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@@ -11,11 +11,26 @@ updates:
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/bigquery"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/postgres"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/redshift"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/plugins/snowflake"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
# docker dependencies
|
||||
- package-ecosystem: "docker"
|
||||
|
||||
30
.github/pull_request_template.md
vendored
30
.github/pull_request_template.md
vendored
@@ -1,35 +1,21 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
Example:
|
||||
resolves #1234
|
||||
-->
|
||||
|
||||
### Problem
|
||||
### Description
|
||||
|
||||
<!---
|
||||
Describe the problem this PR is solving. What is the application state
|
||||
before this PR is merged?
|
||||
-->
|
||||
|
||||
### Solution
|
||||
|
||||
<!---
|
||||
Describe the way this PR solves the above problem. Add as much detail as you
|
||||
can to help reviewers understand your changes. Include any alternatives and
|
||||
tradeoffs you considered.
|
||||
-->
|
||||
<!--- Describe the Pull Request here -->
|
||||
|
||||
### Checklist
|
||||
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section.
|
||||
|
||||
95
.github/scripts/integration-test-matrix.js
vendored
Normal file
95
.github/scripts/integration-test-matrix.js
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
module.exports = ({ context }) => {
|
||||
const defaultPythonVersion = "3.8";
|
||||
const supportedPythonVersions = ["3.6", "3.7", "3.8", "3.9"];
|
||||
const supportedAdapters = ["snowflake", "postgres", "bigquery", "redshift"];
|
||||
|
||||
// if PR, generate matrix based on files changed and PR labels
|
||||
if (context.eventName.includes("pull_request")) {
|
||||
// `changes` is a list of adapter names that have related
|
||||
// file changes in the PR
|
||||
// ex: ['postgres', 'snowflake']
|
||||
const changes = JSON.parse(process.env.CHANGES);
|
||||
const labels = context.payload.pull_request.labels.map(({ name }) => name);
|
||||
console.log("labels", labels);
|
||||
console.log("changes", changes);
|
||||
const testAllLabel = labels.includes("test all");
|
||||
const include = [];
|
||||
|
||||
for (const adapter of supportedAdapters) {
|
||||
if (
|
||||
changes.includes(adapter) ||
|
||||
testAllLabel ||
|
||||
labels.includes(`test ${adapter}`)
|
||||
) {
|
||||
for (const pythonVersion of supportedPythonVersions) {
|
||||
if (
|
||||
pythonVersion === defaultPythonVersion ||
|
||||
labels.includes(`test python${pythonVersion}`) ||
|
||||
testAllLabel
|
||||
) {
|
||||
// always run tests on ubuntu by default
|
||||
include.push({
|
||||
os: "ubuntu-latest",
|
||||
adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
|
||||
if (labels.includes("test windows") || testAllLabel) {
|
||||
include.push({
|
||||
os: "windows-latest",
|
||||
adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
}
|
||||
|
||||
if (labels.includes("test macos") || testAllLabel) {
|
||||
include.push({
|
||||
os: "macos-latest",
|
||||
adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log("matrix", { include });
|
||||
|
||||
return {
|
||||
include,
|
||||
};
|
||||
}
|
||||
// if not PR, generate matrix of python version, adapter, and operating
|
||||
// system to run integration tests on
|
||||
|
||||
const include = [];
|
||||
// run for all adapters and python versions on ubuntu
|
||||
for (const adapter of supportedAdapters) {
|
||||
for (const pythonVersion of supportedPythonVersions) {
|
||||
include.push({
|
||||
os: 'ubuntu-latest',
|
||||
adapter: adapter,
|
||||
"python-version": pythonVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// additionally include runs for all adapters, on macos and windows,
|
||||
// but only for the default python version
|
||||
for (const adapter of supportedAdapters) {
|
||||
for (const operatingSystem of ["windows-latest", "macos-latest"]) {
|
||||
include.push({
|
||||
os: operatingSystem,
|
||||
adapter: adapter,
|
||||
"python-version": defaultPythonVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log("matrix", { include });
|
||||
|
||||
return {
|
||||
include,
|
||||
};
|
||||
};
|
||||
40
.github/workflows/backport.yml
vendored
40
.github/workflows/backport.yml
vendored
@@ -1,40 +0,0 @@
|
||||
# **what?**
|
||||
# When a PR is merged, if it has the backport label, it will create
|
||||
# a new PR to backport those changes to the given branch. If it can't
|
||||
# cleanly do a backport, it will comment on the merged PR of the failure.
|
||||
#
|
||||
# Label naming convention: "backport <branch name to backport to>"
|
||||
# Example: backport 1.0.latest
|
||||
#
|
||||
# You MUST "Squash and merge" the original PR or this won't work.
|
||||
|
||||
# **why?**
|
||||
# Changes sometimes need to be backported to release branches.
|
||||
# This automates the backporting process
|
||||
|
||||
# **when?**
|
||||
# Once a PR is "Squash and merge"'d, by adding a backport label, this is triggered
|
||||
|
||||
name: Backport
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport
|
||||
runs-on: ubuntu-latest
|
||||
# Only react to merged PRs for security reasons.
|
||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
||||
if: >
|
||||
github.event.pull_request.merged
|
||||
&& contains(github.event.label.name, 'backport')
|
||||
steps:
|
||||
- uses: tibdex/backport@v2.0.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
61
.github/workflows/bot-changelog.yml
vendored
61
.github/workflows/bot-changelog.yml
vendored
@@ -1,61 +0,0 @@
|
||||
# **what?**
|
||||
# When bots create a PR, this action will add a corresponding changie yaml file to that
|
||||
# PR when a specific label is added.
|
||||
#
|
||||
# The file is created off a template:
|
||||
#
|
||||
# kind: <per action matrix>
|
||||
# body: <PR title>
|
||||
# time: <current timestamp>
|
||||
# custom:
|
||||
# Author: <PR User Login (generally the bot)>
|
||||
# Issue: 4904
|
||||
# PR: <PR number>
|
||||
#
|
||||
# **why?**
|
||||
# Automate changelog generation for more visability with automated bot PRs.
|
||||
#
|
||||
# **when?**
|
||||
# Once a PR is created, label should be added to PR before or after creation. You can also
|
||||
# manually trigger this by adding the appropriate label at any time.
|
||||
#
|
||||
# **how to add another bot?**
|
||||
# Add the label and changie kind to the include matrix. That's it!
|
||||
#
|
||||
|
||||
name: Bot Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
# catch when the PR is opened with the label or when the label is added
|
||||
types: [labeled]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
generate_changelog:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- label: "dependencies"
|
||||
changie_kind: "Dependencies"
|
||||
- label: "snyk"
|
||||
changie_kind: "Security"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
- name: Create and commit changelog on bot PR
|
||||
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
|
||||
id: bot_changelog
|
||||
uses: emmyoop/changie_bot@v1.1.0
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
commit_author_name: "Github Build Bot"
|
||||
commit_author_email: "<buildbot@fishtownanalytics.com>"
|
||||
commit_message: "Add automated changelog yaml from template for bot PR"
|
||||
changie_kind: ${{ matrix.changie_kind }}
|
||||
label: ${{ matrix.label }}
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
|
||||
38
.github/workflows/changelog-existence.yml
vendored
38
.github/workflows/changelog-existence.yml
vendored
@@ -1,38 +0,0 @@
|
||||
# **what?**
|
||||
# Checks that a file has been committed under the /.changes directory
|
||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||
# it is dynamically generated by change type and timestamp.
|
||||
# This workflow runs on pull_request_target because it requires
|
||||
# secrets to post comments.
|
||||
|
||||
# **why?**
|
||||
# Ensure code change gets reflected in the CHANGELOG.
|
||||
|
||||
# **when?**
|
||||
# This will run for all PRs going into main and *.latest. It will
|
||||
# run when they are opened, reopened, when any label is added or removed
|
||||
# and when new code is pushed to the branch. The action will then get
|
||||
# skipped if the 'Skip Changelog' label is present is any of the labels.
|
||||
|
||||
name: Check Changelog Entry
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
workflow_dispatch:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main
|
||||
with:
|
||||
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
|
||||
skip_label: 'Skip Changelog'
|
||||
secrets: inherit
|
||||
41
.github/workflows/cut-release-branch.yml
vendored
41
.github/workflows/cut-release-branch.yml
vendored
@@ -1,41 +0,0 @@
|
||||
# **what?**
|
||||
# Cuts a new `*.latest` branch
|
||||
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
||||
# `main` and bumps `main` to the input version.
|
||||
|
||||
# **why?**
|
||||
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
||||
|
||||
# **when?**
|
||||
# This will run when called manually.
|
||||
|
||||
name: Cut new release branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_to_bump_main:
|
||||
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
||||
required: true
|
||||
new_branch_name:
|
||||
description: 'The full name of the new branch (ex. 1.5.latest)'
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cut_branch:
|
||||
name: "Cut branch and clean up main for dbt-core"
|
||||
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||
with:
|
||||
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
||||
new_branch_name: ${{ inputs.new_branch_name }}
|
||||
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||
secrets:
|
||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
37
.github/workflows/docs-issue.yml
vendored
37
.github/workflows/docs-issue.yml
vendored
@@ -1,37 +0,0 @@
|
||||
# **what?**
|
||||
# Open an issue in docs.getdbt.com when a PR is labeled `user docs`
|
||||
|
||||
# **why?**
|
||||
# To reduce barriers for keeping docs up to date
|
||||
|
||||
# **when?**
|
||||
# When a PR is labeled `user docs` and is merged. Runs on pull_request_target to run off the workflow already merged,
|
||||
# not the workflow that existed on the PR branch. This allows old PRs to get comments.
|
||||
|
||||
|
||||
name: Open issues in docs.getdbt.com repo when a PR is labeled
|
||||
run-name: "Open an issue in docs.getdbt.com for PR #${{ github.event.pull_request.number }}"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, closed]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
issues: write # opens new issues
|
||||
pull-requests: write # comments on PRs
|
||||
|
||||
|
||||
jobs:
|
||||
open_issues:
|
||||
if: contains( github.event.pull_request.labels.*.name, 'user docs') && github.event.pull_request.merged == true
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} PR #${{ github.event.pull_request.number }}"
|
||||
issue_labels: "content,improvement,dbt Core"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
266
.github/workflows/integration.yml
vendored
Normal file
266
.github/workflows/integration.yml
vendored
Normal file
@@ -0,0 +1,266 @@
|
||||
# **what?**
|
||||
# This workflow runs all integration tests for supported OS
|
||||
# and python versions and core adapters. If triggered by PR,
|
||||
# the workflow will only run tests for adapters related
|
||||
# to code changes. Use the `test all` and `test ${adapter}`
|
||||
# label to run all or additional tests. Use `ok to test`
|
||||
# label to mark PRs from forked repositories that are safe
|
||||
# to run integration tests for. Requires secrets to run
|
||||
# against different warehouses.
|
||||
|
||||
# **why?**
|
||||
# This checks the functionality of dbt from a user's perspective
|
||||
# and attempts to catch functional regressions.
|
||||
|
||||
# **when?**
|
||||
# This workflow will run on every push to a protected branch
|
||||
# and when manually triggered. It will also run for all PRs, including
|
||||
# PRs from forks. The workflow will be skipped until there is a label
|
||||
# to mark the PR as safe to run.
|
||||
|
||||
name: Adapter Integration Tests
|
||||
|
||||
on:
|
||||
# pushes to release branches
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
# all PRs, important to note that `pull_request_target` workflows
|
||||
# will run in the context of the target branch of a PR
|
||||
pull_request_target:
|
||||
# manual tigger
|
||||
workflow_dispatch:
|
||||
|
||||
# explicitly turn off permissions for `GITHUB_TOKEN`
|
||||
permissions: read-all
|
||||
|
||||
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# sets default shell to bash, for all operating systems
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# generate test metadata about what files changed and the testing matrix to use
|
||||
test-metadata:
|
||||
# run if not a PR from a forked repository or has a label to mark as safe to test
|
||||
if: >-
|
||||
github.event_name != 'pull_request_target' ||
|
||||
github.event.pull_request.head.repo.full_name == github.repository ||
|
||||
contains(github.event.pull_request.labels.*.name, 'ok to test')
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
matrix: ${{ steps.generate-matrix.outputs.result }}
|
||||
|
||||
steps:
|
||||
- name: Check out the repository (non-PR)
|
||||
if: github.event_name != 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check out the repository (PR)
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Check if relevant files changed
|
||||
# https://github.com/marketplace/actions/paths-changes-filter
|
||||
# For each filter, it sets output variable named by the filter to the text:
|
||||
# 'true' - if any of changed files matches any of filter rules
|
||||
# 'false' - if none of changed files matches any of filter rules
|
||||
# also, returns:
|
||||
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||
uses: dorny/paths-filter@v2
|
||||
id: get-changes
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filters: |
|
||||
postgres:
|
||||
- 'core/**'
|
||||
- 'plugins/postgres/**'
|
||||
- 'dev-requirements.txt'
|
||||
snowflake:
|
||||
- 'core/**'
|
||||
- 'plugins/snowflake/**'
|
||||
bigquery:
|
||||
- 'core/**'
|
||||
- 'plugins/bigquery/**'
|
||||
redshift:
|
||||
- 'core/**'
|
||||
- 'plugins/redshift/**'
|
||||
- 'plugins/postgres/**'
|
||||
|
||||
- name: Generate integration test matrix
|
||||
id: generate-matrix
|
||||
uses: actions/github-script@v4
|
||||
env:
|
||||
CHANGES: ${{ steps.get-changes.outputs.changes }}
|
||||
with:
|
||||
script: |
|
||||
const script = require('./.github/scripts/integration-test-matrix.js')
|
||||
const matrix = script({ context })
|
||||
console.log(matrix)
|
||||
return matrix
|
||||
|
||||
test:
|
||||
name: ${{ matrix.adapter }} / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
# run if not a PR from a forked repository or has a label to mark as safe to test
|
||||
# also checks that the matrix generated is not empty
|
||||
if: >-
|
||||
needs.test-metadata.outputs.matrix &&
|
||||
fromJSON( needs.test-metadata.outputs.matrix ).include[0] &&
|
||||
(
|
||||
github.event_name != 'pull_request_target' ||
|
||||
github.event.pull_request.head.repo.full_name == github.repository ||
|
||||
contains(github.event.pull_request.labels.*.name, 'ok to test')
|
||||
)
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
needs: test-metadata
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJSON(needs.test-metadata.outputs.matrix) }}
|
||||
|
||||
env:
|
||||
TOXENV: integration-${{ matrix.adapter }}
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
if: github.event_name != 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# explicity checkout the branch for the PR,
|
||||
# this is necessary for the `pull_request_target` event
|
||||
- name: Check out the repository (PR)
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Set up postgres (linux)
|
||||
if: |
|
||||
matrix.adapter == 'postgres' &&
|
||||
runner.os == 'Linux'
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: Set up postgres (macos)
|
||||
if: |
|
||||
matrix.adapter == 'postgres' &&
|
||||
runner.os == 'macOS'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: Set up postgres (windows)
|
||||
if: |
|
||||
matrix.adapter == 'postgres' &&
|
||||
runner.os == 'Windows'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run tox (postgres)
|
||||
if: matrix.adapter == 'postgres'
|
||||
run: tox
|
||||
|
||||
- name: Run tox (redshift)
|
||||
if: matrix.adapter == 'redshift'
|
||||
env:
|
||||
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
|
||||
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
|
||||
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
|
||||
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
|
||||
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
|
||||
run: tox
|
||||
|
||||
- name: Run tox (snowflake)
|
||||
if: matrix.adapter == 'snowflake'
|
||||
env:
|
||||
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
|
||||
SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
|
||||
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
|
||||
SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
|
||||
SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }}
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }}
|
||||
SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }}
|
||||
SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }}
|
||||
SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }}
|
||||
SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
|
||||
SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }}
|
||||
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
|
||||
run: tox
|
||||
|
||||
- name: Run tox (bigquery)
|
||||
if: matrix.adapter == 'bigquery'
|
||||
env:
|
||||
BIGQUERY_TEST_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_TEST_SERVICE_ACCOUNT_JSON }}
|
||||
BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }}
|
||||
run: tox
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: logs
|
||||
path: ./logs
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv
|
||||
path: integration_results.csv
|
||||
|
||||
require-label-comment:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
needs: test
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Needs permission PR comment
|
||||
if: >-
|
||||
needs.test.result == 'skipped' &&
|
||||
github.event_name == 'pull_request_target' &&
|
||||
github.event.pull_request.head.repo.full_name != github.repository
|
||||
uses: unsplash/comment-on-pr@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
msg: |
|
||||
"You do not have permissions to run integration tests, @dbt-labs/core "\
|
||||
"needs to label this PR with `ok to test` in order to run integration tests!"
|
||||
check_for_duplicate_msg: true
|
||||
26
.github/workflows/jira-creation.yml
vendored
26
.github/workflows/jira-creation.yml
vendored
@@ -1,26 +0,0 @@
|
||||
# **what?**
|
||||
# Mirrors issues into Jira. Includes the information: title,
|
||||
# GitHub Issue ID and URL
|
||||
|
||||
# **why?**
|
||||
# Jira is our tool for tracking and we need to see these issues in there
|
||||
|
||||
# **when?**
|
||||
# On issue creation or when an issue is labeled `Jira`
|
||||
|
||||
name: Jira Issue Creation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
call-creation-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-creation-actions.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
26
.github/workflows/jira-label.yml
vendored
26
.github/workflows/jira-label.yml
vendored
@@ -1,26 +0,0 @@
|
||||
# **what?**
|
||||
# Calls mirroring Jira label Action. Includes adding a new label
|
||||
# to an existing issue or removing a label as well
|
||||
|
||||
# **why?**
|
||||
# Jira is our tool for tracking and we need to see these labels in there
|
||||
|
||||
# **when?**
|
||||
# On labels being added or removed from issues
|
||||
|
||||
name: Jira Label Mirroring
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
permissions:
|
||||
issues: read
|
||||
|
||||
jobs:
|
||||
call-label-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-label-actions.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
27
.github/workflows/jira-transition.yml
vendored
27
.github/workflows/jira-transition.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# **what?**
|
||||
# Transition a Jira issue to a new state
|
||||
# Only supports these GitHub Issue transitions:
|
||||
# closed, deleted, reopened
|
||||
|
||||
# **why?**
|
||||
# Jira needs to be kept up-to-date
|
||||
|
||||
# **when?**
|
||||
# On issue closing, deletion, reopened
|
||||
|
||||
name: Jira Issue Transition
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [closed, deleted, reopened]
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
call-transition-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-transition-actions.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
|
||||
263
.github/workflows/main.yml
vendored
263
.github/workflows/main.yml
vendored
@@ -1,8 +1,9 @@
|
||||
# **what?**
|
||||
# Runs code quality checks, unit tests, integration tests and
|
||||
# verifies python build on all code commited to the repository. This workflow
|
||||
# should not require any secrets since it runs for PRs from forked repos. By
|
||||
# default, secrets are not passed to workflows running from a forked repos.
|
||||
# Runs code quality checks, unit tests, and verifies python build on
|
||||
# all code commited to the repository. This workflow should not
|
||||
# require any secrets since it runs for PRs from forked repos.
|
||||
# By default, secrets are not passed to workflows running from
|
||||
# a forked repo.
|
||||
|
||||
# **why?**
|
||||
# Ensure code for dbt meets a certain quality standard.
|
||||
@@ -17,6 +18,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
@@ -33,66 +35,70 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
name: ${{ matrix.toxenv }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
toxenv: [flake8, mypy]
|
||||
|
||||
env:
|
||||
TOXENV: ${{ matrix.toxenv }}
|
||||
PYTEST_ADDOPTS: "-v --color=yes"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
make dev
|
||||
mypy --version
|
||||
dbt --version
|
||||
pip install --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: pre-commit run --all-files --show-diff-on-failure
|
||||
- name: Run tox
|
||||
run: tox
|
||||
|
||||
unit:
|
||||
name: unit test / python ${{ matrix.python-version }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: [3.6, 3.7, 3.8] # TODO: support unit testing for python 3.9 (https://github.com/dbt-labs/dbt/issues/3689)
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
pip install --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
|
||||
- name: Run tox
|
||||
@@ -101,147 +107,13 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Set up postgres (linux)
|
||||
if: runner.os == 'Linux'
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: Set up postgres (macos)
|
||||
if: runner.os == 'macOS'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: Set up postgres (windows)
|
||||
if: runner.os == 'Windows'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: Install python tools
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
python -m pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||
path: unit_results.csv
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
@@ -250,18 +122,20 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
python -m pip --version
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
@@ -277,18 +151,55 @@ jobs:
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
test-build:
|
||||
name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
needs: build
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||
run: |
|
||||
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
|
||||
265
.github/workflows/model_performance.yml
vendored
265
.github/workflows/model_performance.yml
vendored
@@ -1,265 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow models the performance characteristics of a point in time in dbt.
|
||||
# It runs specific dbt commands on committed projects multiple times to create and
|
||||
# commit information about the distribution to the current branch. For more information
|
||||
# see the readme in the performance module at /performance/README.md.
|
||||
#
|
||||
# **why?**
|
||||
# When developing new features, we can take quick performance samples and compare
|
||||
# them against the commited baseline measurements produced by this workflow to detect
|
||||
# some performance regressions at development time before they reach users.
|
||||
#
|
||||
# **when?**
|
||||
# This is only run once directly after each release (for non-prereleases). If for some
|
||||
# reason the results of a run are not satisfactory, it can also be triggered manually.
|
||||
|
||||
name: Model Performance Characteristics
|
||||
|
||||
on:
|
||||
# runs after non-prereleases are published.
|
||||
release:
|
||||
types: [released]
|
||||
# run manually from the actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_id:
|
||||
description: 'dbt version to model (must be non-prerelease in Pypi)'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
env:
|
||||
RUNNER_CACHE_PATH: performance/runner/target/release/runner
|
||||
|
||||
# both jobs need to write
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
set-variables:
|
||||
name: Setting Variables
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
cache_key: ${{ steps.variables.outputs.cache_key }}
|
||||
release_id: ${{ steps.semver.outputs.base-version }}
|
||||
release_branch: ${{ steps.variables.outputs.release_branch }}
|
||||
steps:
|
||||
|
||||
# explicitly checkout the performance runner from main regardless of which
|
||||
# version we are modeling.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Parse version into parts
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1
|
||||
with:
|
||||
version: ${{ github.event.inputs.release_id || github.event.release.tag_name }}
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set variables
|
||||
id: variables
|
||||
run: |
|
||||
# create a cache key that will be used in the next job. without this the
|
||||
# next job would have to checkout from main and hash the files itself.
|
||||
echo "cache_key=${{ runner.os }}-${{ hashFiles('performance/runner/Cargo.toml')}}-${{ hashFiles('performance/runner/src/*') }}" >> $GITHUB_OUTPUT
|
||||
|
||||
branch_name="${{steps.semver.outputs.major}}.${{steps.semver.outputs.minor}}.latest"
|
||||
echo "release_branch=$branch_name" >> $GITHUB_OUTPUT
|
||||
echo "release branch is inferred to be ${branch_name}"
|
||||
|
||||
latest-runner:
|
||||
name: Build or Fetch Runner
|
||||
runs-on: ubuntu-latest
|
||||
needs: [set-variables]
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- name: '[DEBUG] print variables'
|
||||
run: |
|
||||
echo "all variables defined in set-variables"
|
||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
# explicitly checkout the performance runner from main regardless of which
|
||||
# version we are modeling.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
|
||||
# attempts to access a previously cached runner
|
||||
- uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||
key: ${{ needs.set-variables.outputs.cache_key }}
|
||||
|
||||
- name: Fetch Rust Toolchain
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Add fmt
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- name: Cargo fmt
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||
|
||||
- name: Test
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --manifest-path performance/runner/Cargo.toml
|
||||
|
||||
- name: Build (optimized)
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --manifest-path performance/runner/Cargo.toml
|
||||
# the cache action automatically caches this binary at the end of the job
|
||||
|
||||
model:
|
||||
# depends on `latest-runner` as a separate job so that failures in this job do not prevent
|
||||
# a successfully tested and built binary from being cached.
|
||||
needs: [set-variables, latest-runner]
|
||||
name: Model a release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: '[DEBUG] print variables'
|
||||
run: |
|
||||
echo "all variables defined in set-variables"
|
||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install dbt
|
||||
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
||||
|
||||
- name: Install Hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
|
||||
# explicitly checkout main to get the latest project definitions
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
|
||||
# this was built in the previous job so it will be there.
|
||||
- name: Fetch Runner
|
||||
uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||
key: ${{ needs.set-variables.outputs.cache_key }}
|
||||
|
||||
- name: Move Runner
|
||||
run: mv performance/runner/target/release/runner performance/app
|
||||
|
||||
- name: Change Runner Permissions
|
||||
run: chmod +x ./performance/app
|
||||
|
||||
- name: '[DEBUG] ls baseline directory before run'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
# `${{ github.workspace }}` is used to pass the absolute path
|
||||
- name: Create directories
|
||||
run: |
|
||||
mkdir ${{ github.workspace }}/performance/tmp/
|
||||
mkdir -p performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||
|
||||
# Run modeling with taking 20 samples
|
||||
- name: Run Measurement
|
||||
run: |
|
||||
performance/app model -v ${{ needs.set-variables.outputs.release_id }} -b ${{ github.workspace }}/performance/baselines/ -p ${{ github.workspace }}/performance/projects/ -t ${{ github.workspace }}/performance/tmp/ -n 20
|
||||
|
||||
- name: '[DEBUG] ls baseline directory after run'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||
|
||||
create-pr:
|
||||
name: Open PR for ${{ matrix.base-branch }}
|
||||
|
||||
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
|
||||
# i.e. release branch and main
|
||||
needs: [set-variables, latest-runner, model]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- base-branch: refs/heads/main
|
||||
target-branch: performance-bot/main_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
||||
- base-branch: refs/heads/${{ needs.set-variables.outputs.release_branch }}
|
||||
target-branch: performance-bot/release_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
||||
|
||||
steps:
|
||||
- name: '[DEBUG] print variables'
|
||||
run: |
|
||||
echo "all variables defined in set-variables"
|
||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ matrix.base-branch }}
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
git checkout -b ${{ matrix.target-branch }}
|
||||
git push origin ${{ matrix.target-branch }}
|
||||
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
|
||||
|
||||
- name: '[DEBUG] ls baselines after artifact download'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
- name: Commit baseline
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
add: 'performance/baselines/*'
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'adding performance baseline for ${{ needs.set-variables.outputs.release_id }}'
|
||||
push: 'origin origin/${{ matrix.target-branch }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{ matrix.base-branch }}
|
||||
branch: '${{ matrix.target-branch }}'
|
||||
title: 'Adding performance modeling for ${{needs.set-variables.outputs.release_id}} to ${{ matrix.base-branch }}'
|
||||
body: 'Committing perf results for tracking for the ${{needs.set-variables.outputs.release_id}}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
Performance
|
||||
109
.github/workflows/nightly-release.yml
vendored
109
.github/workflows/nightly-release.yml
vendored
@@ -1,109 +0,0 @@
|
||||
# **what?**
|
||||
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||
# - pass data to release workflow;
|
||||
# - night release will be pushed to GitHub as a draft release;
|
||||
# - night build will be pushed to test PyPI;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process for nightly builds
|
||||
#
|
||||
# **when?**
|
||||
# This workflow runs on schedule or can be run manually on demand.
|
||||
|
||||
name: Nightly Test Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch: # for manual triggering
|
||||
schedule:
|
||||
- cron: 0 9 * * *
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
RELEASE_BRANCH: "main"
|
||||
|
||||
jobs:
|
||||
aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.RELEASE_BRANCH }}
|
||||
|
||||
- name: "Resolve Commit To Release"
|
||||
id: resolve-commit-sha
|
||||
run: |
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Current Version Number"
|
||||
id: version-number-sources
|
||||
run: |
|
||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||
|
||||
- name: "Get Current Date"
|
||||
id: current-date
|
||||
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Generate Nightly Release Version Number"
|
||||
id: nightly-release-version
|
||||
run: |
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
||||
echo "number=$number" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||
|
||||
- name: "Set Release Branch"
|
||||
id: release-branch
|
||||
run: |
|
||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
log-outputs-aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] Log Outputs"
|
||||
run: |
|
||||
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
|
||||
release-github-pypi:
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: true
|
||||
nightly_release: true
|
||||
secrets: inherit
|
||||
174
.github/workflows/performance.yml
vendored
Normal file
174
.github/workflows/performance.yml
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
name: Performance Regression Tests
|
||||
# Schedule triggers
|
||||
on:
|
||||
# runs twice a day at 10:05am and 10:05pm
|
||||
schedule:
|
||||
- cron: "5 10,22 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# checks fmt of runner code
|
||||
# purposefully not a dependency of any other job
|
||||
# will block merging, but not prevent developing
|
||||
fmt:
|
||||
name: Cargo fmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- run: rustup component add rustfmt
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||
|
||||
# runs any tests associated with the runner
|
||||
# these tests make sure the runner logic is correct
|
||||
test-runner:
|
||||
name: Test Runner
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# turns errors into warnings
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --manifest-path performance/runner/Cargo.toml
|
||||
|
||||
# build an optimized binary to be used as the runner in later steps
|
||||
build-runner:
|
||||
needs: [test-runner]
|
||||
name: Build Runner
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --manifest-path performance/runner/Cargo.toml
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
path: performance/runner/target/release/runner
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
measure-dev:
|
||||
needs: [build-runner]
|
||||
name: Measure Dev Branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: install dbt
|
||||
run: pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
- name: install hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run
|
||||
run: ./runner measure -b dev -p ${{ github.workspace }}/performance/projects/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dev-results
|
||||
path: performance/results/
|
||||
|
||||
# run the performance measurements on the release branch which we use
|
||||
# as a performance baseline. This part takes by far the longest, so
|
||||
# we do everything we can first so the job fails fast.
|
||||
# -----
|
||||
# we need to checkout dbt twice in this job: once for the baseline dbt
|
||||
# version, and once to get the latest regression testing projects,
|
||||
# metrics, and runner code from the develop or current branch so that
|
||||
# the calculations match for both versions of dbt we are comparing.
|
||||
measure-baseline:
|
||||
needs: [build-runner]
|
||||
name: Measure Baseline Branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout latest
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: "0.20.latest"
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
- name: move repo up a level
|
||||
run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline
|
||||
- name: "[debug] ls new dbt location"
|
||||
run: ls ${{ github.workspace }}/../baseline/dbt/
|
||||
# installation creates egg-links so we have to preserve source
|
||||
- name: install dbt from new location
|
||||
run: cd ${{ github.workspace }}/../baseline/dbt/ && pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
# checkout the current branch to get all the target projects
|
||||
# this deletes the old checked out code which is why we had to copy before
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
- name: install hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run runner
|
||||
run: ./runner measure -b baseline -p ${{ github.workspace }}/performance/projects/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: baseline-results
|
||||
path: performance/results/
|
||||
|
||||
# detect regressions on the output generated from measuring
|
||||
# the two branches. Exits with non-zero code if a regression is detected.
|
||||
calculate-regressions:
|
||||
needs: [measure-dev, measure-baseline]
|
||||
name: Compare Results
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dev-results
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: baseline-results
|
||||
- name: "[debug] ls result files"
|
||||
run: ls
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner
|
||||
- name: change permissions
|
||||
run: chmod +x ./runner
|
||||
- name: run calculation
|
||||
run: ./runner calculate -r ./
|
||||
# always attempt to upload the results even if there were regressions found
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: final-calculations
|
||||
path: ./final_calculations.json
|
||||
31
.github/workflows/release-branch-tests.yml
vendored
31
.github/workflows/release-branch-tests.yml
vendored
@@ -1,31 +0,0 @@
|
||||
# **what?**
|
||||
# The purpose of this workflow is to trigger CI to run for each
|
||||
# release branch and main branch on a regular cadence. If the CI workflow
|
||||
# fails for a branch, it will post to #dev-core-alerts to raise awareness.
|
||||
|
||||
# **why?**
|
||||
# Ensures release branches and main are always shippable and not broken.
|
||||
# Also, can catch any dependencies shifting beneath us that might
|
||||
# introduce breaking changes (could also impact Cloud).
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 9:00, 13:00, 18:00 UTC everyday.
|
||||
# Manual trigger can also test on demand
|
||||
|
||||
name: Release branch scheduled testing
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 9,13,18 * * *' # 9:00, 13:00, 18:00 UTC
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
run_tests:
|
||||
uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main
|
||||
with:
|
||||
workflows_to_run: '["main.yml"]'
|
||||
secrets: inherit
|
||||
118
.github/workflows/release-docker.yml
vendored
118
.github/workflows/release-docker.yml
vendored
@@ -1,118 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow will generate a series of docker images for dbt and push them to the github container registry
|
||||
|
||||
# **why?**
|
||||
# Docker images for dbt are used in a number of important places throughout the dbt ecosystem. This is how we keep those images up-to-date.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually
|
||||
|
||||
# **next steps**
|
||||
# - build this into the release workflow (or conversly, break out the different release methods into their own workflow files)
|
||||
|
||||
name: Docker release
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: The package to release. _One_ of [dbt-core, dbt-redshift, dbt-bigquery, dbt-snowflake, dbt-spark, dbt-postgres]
|
||||
required: true
|
||||
version_number:
|
||||
description: The release version number (i.e. 1.0.0b1). Do not include `latest` tags or a leading `v`!
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
get_version_meta:
|
||||
name: Get version meta
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
major: ${{ steps.version.outputs.major }}
|
||||
minor: ${{ steps.version.outputs.minor }}
|
||||
patch: ${{ steps.version.outputs.patch }}
|
||||
latest: ${{ steps.latest.outputs.latest }}
|
||||
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Split version
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
with:
|
||||
package: ${{ github.event.inputs.package }}
|
||||
new_version: ${{ github.event.inputs.version_number }}
|
||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
halt_on_missing: False
|
||||
|
||||
setup_image_builder:
|
||||
name: Set up docker image builder
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get_version_meta]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
build_and_push:
|
||||
name: Build images and push to GHCR
|
||||
runs-on: ubuntu-latest
|
||||
needs: [setup_image_builder, get_version_meta]
|
||||
steps:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
target: ${{ github.event.inputs.package }}
|
||||
build-args: |
|
||||
${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }}
|
||||
tags: |
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:latest
|
||||
229
.github/workflows/release.yml
vendored
229
.github/workflows/release.yml
vendored
@@ -1,229 +0,0 @@
|
||||
# **what?**
|
||||
# Release workflow provides the following steps:
|
||||
# - checkout the given commit;
|
||||
# - validate version in sources and changelog file for given version;
|
||||
# - bump the version and generate a changelog if needed;
|
||||
# - merge all changes to the target branch if needed;
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
#
|
||||
# **when?**
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
log-inputs:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||
echo The branch to release from: ${{ inputs.target_branch }}
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Build script path: ${{ inputs.build_script_path }}
|
||||
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||
echo Package test command: ${{ inputs.package_test_command }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ inputs.sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets: inherit
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Print variables
|
||||
run: |
|
||||
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: ${{ inputs.build_script_path }}
|
||||
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||
package_test_command: ${{ inputs.package_test_command }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
pypi-release:
|
||||
name: PyPI Release
|
||||
|
||||
needs: [github-release]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
secrets:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
|
||||
needs:
|
||||
[
|
||||
bump-version-generate-changelog,
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
90
.github/workflows/schema-check.yml
vendored
90
.github/workflows/schema-check.yml
vendored
@@ -1,90 +0,0 @@
|
||||
# **what?**
|
||||
# Compares the schema of the dbt version of the given ref vs
|
||||
# the latest official schema releases found in schemas.getdbt.com.
|
||||
# If there are differences, the workflow will fail and upload the
|
||||
# diff as an artifact. The metadata team should be alerted to the change.
|
||||
#
|
||||
# **why?**
|
||||
# Reaction work may need to be done if artifact schema changes
|
||||
# occur so we want to proactively alert to it.
|
||||
#
|
||||
# **when?**
|
||||
# On pushes to `develop` and release branches. Manual runs are also enabled.
|
||||
name: Artifact Schema Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request: #TODO: remove before merging
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
env:
|
||||
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
||||
DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt
|
||||
SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com
|
||||
|
||||
jobs:
|
||||
checking-schemas:
|
||||
name: "Checking schemas"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
|
||||
- name: Checkout schemas.getdbt.com repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: dbt-labs/schemas.getdbt.com
|
||||
ref: 'main'
|
||||
ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }}
|
||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
|
||||
- name: Generate current schema
|
||||
run: |
|
||||
cd ${{ env.DBT_REPO_DIRECTORY }}
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||
|
||||
# Copy generated schema files into the schemas.getdbt.com repo
|
||||
# Do a git diff to find any changes
|
||||
# Ignore any date or version changes though
|
||||
- name: Compare schemas
|
||||
run: |
|
||||
cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
cd ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||
diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary)
|
||||
if [[ $(echo diff_results) ]]; then
|
||||
echo $diff_results
|
||||
echo "Schema changes detected!"
|
||||
git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||
exit 1
|
||||
else
|
||||
echo "No schema changes detected"
|
||||
fi
|
||||
|
||||
- name: Upload schema diff
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
name: 'schema_schanges.txt'
|
||||
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
||||
12
.github/workflows/stale.yml
vendored
12
.github/workflows/stale.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "Close stale issues and PRs"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
||||
@@ -1,108 +0,0 @@
|
||||
# This Action checks makes a dbt run to sample json structured logs
|
||||
# and checks that they conform to the currently documented schema.
|
||||
#
|
||||
# If this action fails it either means we have unintentionally deviated
|
||||
# from our documented structured logging schema, or we need to bump the
|
||||
# version of our structured logging and add new documentation to
|
||||
# communicate these changes.
|
||||
|
||||
name: Structured Logging Schema Check
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "*.latest"
|
||||
- "releases/*"
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
# points tests to the log file
|
||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||
# tells integration tests to output into json format
|
||||
DBT_LOG_FORMAT: "json"
|
||||
# tell eventmgr to convert logging events into bytes
|
||||
DBT_TEST_BINARY_SERIALIZATION: "true"
|
||||
# Additional test users
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip --version
|
||||
pip install tox
|
||||
tox --version
|
||||
|
||||
- name: Set up postgres
|
||||
uses: ./.github/actions/setup-postgres-linux
|
||||
|
||||
- name: ls
|
||||
run: ls
|
||||
|
||||
# integration tests generate a ton of logs in different files. the next step will find them all.
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
155
.github/workflows/test-repeater.yml
vendored
155
.github/workflows/test-repeater.yml
vendored
@@ -1,155 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow will test all test(s) at the input path given number of times to determine if it's flaky or not. You can test with any supported OS/Python combination.
|
||||
# This is batched in 10 to allow more test iterations faster.
|
||||
|
||||
# **why?**
|
||||
# Testing if a test is flaky and if a previously flaky test has been fixed. This allows easy testing on supported python versions and OS combinations.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually from dbt-core.
|
||||
|
||||
name: Flaky Tester
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Branch to check out'
|
||||
type: string
|
||||
required: true
|
||||
default: 'main'
|
||||
test_path:
|
||||
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
||||
type: string
|
||||
required: true
|
||||
default: 'tests/functional/...'
|
||||
python_version:
|
||||
description: 'Version of Python to Test Against'
|
||||
type: choice
|
||||
options:
|
||||
- '3.8'
|
||||
- '3.9'
|
||||
- '3.10'
|
||||
- '3.11'
|
||||
os:
|
||||
description: 'OS to run test in'
|
||||
type: choice
|
||||
options:
|
||||
- 'ubuntu-latest'
|
||||
- 'macos-latest'
|
||||
- 'windows-latest'
|
||||
num_runs_per_batch:
|
||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||
type: number
|
||||
required: true
|
||||
default: '50'
|
||||
|
||||
permissions: read-all
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Output Inputs"
|
||||
run: |
|
||||
echo "Branch: ${{ inputs.branch }}"
|
||||
echo "test_path: ${{ inputs.test_path }}"
|
||||
echo "python_version: ${{ inputs.python_version }}"
|
||||
echo "os: ${{ inputs.os }}"
|
||||
echo "num_runs_per_batch: ${{ inputs.num_runs_per_batch }}"
|
||||
|
||||
pytest:
|
||||
runs-on: ${{ inputs.os }}
|
||||
strategy:
|
||||
# run all batches, even if one fails. This informs how flaky the test may be.
|
||||
fail-fast: false
|
||||
# using a matrix to speed up the jobs since the matrix will run in parallel when runners are available
|
||||
matrix:
|
||||
batch: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
|
||||
env:
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
|
||||
- name: "Setup Python"
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "${{ inputs.python_version }}"
|
||||
|
||||
- name: "Setup Dev Environment"
|
||||
run: make dev
|
||||
|
||||
- name: "Set up postgres (linux)"
|
||||
if: inputs.os == 'ubuntu-latest'
|
||||
run: make setup-db
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: "Set up postgres (macos)"
|
||||
if: inputs.os == 'macos-latest'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: "Set up postgres (windows)"
|
||||
if: inputs.os == 'windows-latest'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: "Test Command"
|
||||
id: command
|
||||
run: |
|
||||
test_command="python -m pytest ${{ inputs.test_path }}"
|
||||
echo "test_command=$test_command" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Run test ${{ inputs.num_runs_per_batch }} times"
|
||||
id: pytest
|
||||
run: |
|
||||
set +e
|
||||
for ((i=1; i<=${{ inputs.num_runs_per_batch }}; i++))
|
||||
do
|
||||
echo "Running pytest iteration $i..."
|
||||
python -m pytest --ddtrace ${{ inputs.test_path }}
|
||||
exit_code=$?
|
||||
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
success=$((success + 1))
|
||||
echo "Iteration $i: Success"
|
||||
else
|
||||
failure=$((failure + 1))
|
||||
echo "Iteration $i: Failure"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "==========================="
|
||||
echo "Successful runs: $success"
|
||||
echo "Failed runs: $failure"
|
||||
echo "==========================="
|
||||
echo
|
||||
done
|
||||
|
||||
echo "failure=$failure" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Success and Failure Summary: ${{ inputs.os }}/Python ${{ inputs.python_version }}"
|
||||
run: |
|
||||
echo "Batch: ${{ matrix.batch }}"
|
||||
echo "Successful runs: ${{ steps.pytest.outputs.success }}"
|
||||
echo "Failed runs: ${{ steps.pytest.outputs.failure }}"
|
||||
|
||||
- name: "Error for Failures"
|
||||
if: ${{ steps.pytest.outputs.failure }}
|
||||
run: |
|
||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||
exit 1
|
||||
1
.github/workflows/test/.actrc
vendored
1
.github/workflows/test/.actrc
vendored
@@ -1 +0,0 @@
|
||||
-P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest
|
||||
1
.github/workflows/test/.gitignore
vendored
1
.github/workflows/test/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
.secrets
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user