mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-20 12:21:27 +00:00
Compare commits
70 Commits
enable-pos
...
v1.0.5rc2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a1d8a2986 | ||
|
|
64ff87d7e4 | ||
|
|
5d0ebd502b | ||
|
|
7aa7259b1a | ||
|
|
7d1410acc9 | ||
|
|
88fc45b156 | ||
|
|
c6cde6ee2d | ||
|
|
c8f3f22e15 | ||
|
|
2748e4b822 | ||
|
|
7fca9ec2c9 | ||
|
|
ad3063a612 | ||
|
|
5218438704 | ||
|
|
33d08f8faa | ||
|
|
9ff2c8024c | ||
|
|
75696a1797 | ||
|
|
5b41b12779 | ||
|
|
27ed2f961b | ||
|
|
f2dcb6f23c | ||
|
|
77afe63c7c | ||
|
|
ca7c4c147a | ||
|
|
4145834c5b | ||
|
|
aaeb94d683 | ||
|
|
a2662b2f83 | ||
|
|
056db408cf | ||
|
|
bec6becd18 | ||
|
|
3be057b6a4 | ||
|
|
e2a6c25a6d | ||
|
|
92b3fc470d | ||
|
|
1e9fe67393 | ||
|
|
d9361259f4 | ||
|
|
7990974bd8 | ||
|
|
544d3e7a3a | ||
|
|
31962beb14 | ||
|
|
f6a0853901 | ||
|
|
336a3d4987 | ||
|
|
74dc5c49ae | ||
|
|
29fa687349 | ||
|
|
39d4e729c9 | ||
|
|
406bdcc89c | ||
|
|
9702aa733f | ||
|
|
44265716f9 | ||
|
|
20b27fd3b6 | ||
|
|
76c2e182ba | ||
|
|
791625ddf5 | ||
|
|
1baa05a764 | ||
|
|
1b47b53aff | ||
|
|
ec1f609f3e | ||
|
|
b4ea003559 | ||
|
|
23e1a9aa4f | ||
|
|
9882d08a24 | ||
|
|
79cc811a68 | ||
|
|
c82572f745 | ||
|
|
42a38e4deb | ||
|
|
ecf0ffe68c | ||
|
|
e9f26ef494 | ||
|
|
c77dc59af8 | ||
|
|
a5ebe4ff59 | ||
|
|
5c01f9006c | ||
|
|
c92e1ed9f2 | ||
|
|
85dee41a9f | ||
|
|
a4456feff0 | ||
|
|
8d27764b0f | ||
|
|
e56256d968 | ||
|
|
86cb3ba6fa | ||
|
|
4d0d2d0d6f | ||
|
|
f8a3c27fb8 | ||
|
|
30f05b0213 | ||
|
|
f1bebb3629 | ||
|
|
e7a40345ad | ||
|
|
ba94b8212c |
39
.bumpversion.cfg
Normal file
39
.bumpversion.cfg
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
[bumpversion]
|
||||||
|
current_version = 1.0.5rc2
|
||||||
|
parse = (?P<major>\d+)
|
||||||
|
\.(?P<minor>\d+)
|
||||||
|
\.(?P<patch>\d+)
|
||||||
|
((?P<prekind>a|b|rc)
|
||||||
|
(?P<pre>\d+) # pre-release version num
|
||||||
|
)?
|
||||||
|
serialize =
|
||||||
|
{major}.{minor}.{patch}{prekind}{pre}
|
||||||
|
{major}.{minor}.{patch}
|
||||||
|
commit = False
|
||||||
|
tag = False
|
||||||
|
|
||||||
|
[bumpversion:part:prekind]
|
||||||
|
first_value = a
|
||||||
|
optional_value = final
|
||||||
|
values =
|
||||||
|
a
|
||||||
|
b
|
||||||
|
rc
|
||||||
|
final
|
||||||
|
|
||||||
|
[bumpversion:part:pre]
|
||||||
|
first_value = 1
|
||||||
|
|
||||||
|
[bumpversion:file:setup.py]
|
||||||
|
|
||||||
|
[bumpversion:file:core/setup.py]
|
||||||
|
|
||||||
|
[bumpversion:file:core/dbt/version.py]
|
||||||
|
|
||||||
|
[bumpversion:file:core/scripts/create_adapter_plugins.py]
|
||||||
|
|
||||||
|
[bumpversion:file:plugins/postgres/setup.py]
|
||||||
|
|
||||||
|
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||||
|
|
||||||
|
[bumpversion:file:docker/requirements/requirements.txt]
|
||||||
@@ -2,18 +2,6 @@
|
|||||||
|
|
||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
* [1.10](https://github.com/dbt-labs/dbt-core/blob/1.10.latest/CHANGELOG.md)
|
|
||||||
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
|
||||||
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
|
||||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
|
||||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
|
||||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
|
||||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
|
||||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
|
||||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
|
||||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
|
||||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
|
||||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||||
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
* [0.20](https://github.com/dbt-labs/dbt-core/blob/0.20.latest/CHANGELOG.md)
|
||||||
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
* [0.19](https://github.com/dbt-labs/dbt-core/blob/0.19.latest/CHANGELOG.md)
|
||||||
|
|||||||
250
.changes/1.0.3.md
Normal file
250
.changes/1.0.3.md
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
## dbt-core 1.0.3 (February 21, 2022)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix bug accessing target fields in deps and clean commands ([#4752](https://github.com/dbt-labs/dbt-core/issues/4752), [#4758](https://github.com/dbt-labs/dbt-core/issues/4758))
|
||||||
|
|
||||||
|
## dbt-core 1.0.2 (February 18, 2022)
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
- Pin `MarkupSafe==2.0.1`. Deprecation of `soft_unicode` in `MarkupSafe==2.1.0` is not supported by `Jinja2==2.11`
|
||||||
|
|
||||||
|
## dbt-core 1.0.2rc1 (February 4, 2022)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Projects created using `dbt init` now have the correct `seeds` directory created (instead of `data`) ([#4588](https://github.com/dbt-labs/dbt-core/issues/4588), [#4599](https://github.com/dbt-labs/dbt-core/pull/4589))
|
||||||
|
- Don't require a profile for dbt deps and clean commands ([#4554](https://github.com/dbt-labs/dbt-core/issues/4554), [#4610](https://github.com/dbt-labs/dbt-core/pull/4610))
|
||||||
|
- Select modified.body works correctly when new model added([#4570](https://github.com/dbt-labs/dbt-core/issues/4570), [#4631](https://github.com/dbt-labs/dbt-core/pull/4631))
|
||||||
|
- Fix bug in retry logic for bad response from hub and when there is a bad git tarball download. ([#4577](https://github.com/dbt-labs/dbt-core/issues/4577), [#4579](https://github.com/dbt-labs/dbt-core/issues/4579), [#4609](https://github.com/dbt-labs/dbt-core/pull/4609))
|
||||||
|
- Restore previous log level (DEBUG) when a test depends on a disabled resource. Still WARN if the resource is missing ([#4594](https://github.com/dbt-labs/dbt-core/issues/4594), [#4647](https://github.com/dbt-labs/dbt-core/pull/4647))
|
||||||
|
- User wasn't asked for permission to overwite a profile entry when running init inside an existing project ([#4375](https://github.com/dbt-labs/dbt-core/issues/4375), [#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||||
|
- A change in secret environment variables won't trigger a full reparse [#4650](https://github.com/dbt-labs/dbt-core/issues/4650) [4665](https://github.com/dbt-labs/dbt-core/pull/4665)
|
||||||
|
- adapter compability messaging added([#4438](https://github.com/dbt-labs/dbt-core/pull/4438) [#4565](https://github.com/dbt-labs/dbt-core/pull/4565))
|
||||||
|
- Add project name validation to `dbt init` ([#4490](https://github.com/dbt-labs/dbt-core/issues/4490),[#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [@NiallRees](https://github.com/NiallRees) ([#4447](https://github.com/dbt-labs/dbt-core/pull/4447))
|
||||||
|
- [@amirkdv](https://github.com/amirkdv) ([#4536](https://github.com/dbt-labs/dbt-core/pull/4536))
|
||||||
|
- [@nkyuray](https://github.com/nkyuray) ([#4565](https://github.com/dbt-labs/dbt-core/pull/4565))
|
||||||
|
|
||||||
|
## dbt-core 1.0.1 (January 03, 2022)
|
||||||
|
|
||||||
|
|
||||||
|
## dbt-core 1.0.1rc1 (December 20, 2021)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix wrong url in the dbt docs overview homepage ([#4442](https://github.com/dbt-labs/dbt-core/pull/4442))
|
||||||
|
- Fix redefined status param of SQLQueryStatus to typecheck the string which passes on `._message` value of `AdapterResponse` or the `str` value sent by adapter plugin. ([#4463](https://github.com/dbt-labs/dbt-core/pull/4463#issuecomment-990174166))
|
||||||
|
- Fix `DepsStartPackageInstall` event to use package name instead of version number. ([#4482](https://github.com/dbt-labs/dbt-core/pull/4482))
|
||||||
|
- Reimplement log message to use adapter name instead of the object method. ([#4501](https://github.com/dbt-labs/dbt-core/pull/4501))
|
||||||
|
- Issue better error message for incompatible schemas ([#4470](https://github.com/dbt-labs/dbt-core/pull/4442), [#4497](https://github.com/dbt-labs/dbt-core/pull/4497))
|
||||||
|
- Remove secrets from error related to packages. ([#4507](https://github.com/dbt-labs/dbt-core/pull/4507))
|
||||||
|
- Prevent coercion of boolean values (`True`, `False`) to numeric values (`0`, `1`) in query results ([#4511](https://github.com/dbt-labs/dbt-core/issues/4511), [#4512](https://github.com/dbt-labs/dbt-core/pull/4512))
|
||||||
|
- Fix error with an env_var in a project hook ([#4523](https://github.com/dbt-labs/dbt-core/issues/4523), [#4524](https://github.com/dbt-labs/dbt-core/pull/4524))
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
- Fix missing data on exposures in docs ([#4467](https://github.com/dbt-labs/dbt-core/issues/4467))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [remoyson](https://github.com/remoyson) ([#4442](https://github.com/dbt-labs/dbt-core/pull/4442))
|
||||||
|
|
||||||
|
## dbt-core 1.0.0 (December 3, 2021)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Configure the CLI logger destination to use stdout instead of stderr ([#4368](https://github.com/dbt-labs/dbt-core/pull/4368))
|
||||||
|
- Make the size of `EVENT_HISTORY` configurable, via `EVENT_BUFFER_SIZE` global config ([#4411](https://github.com/dbt-labs/dbt-core/pull/4411), [#4416](https://github.com/dbt-labs/dbt-core/pull/4416))
|
||||||
|
- Change type of `log_format` in `profiles.yml` user config to be string, not boolean ([#4394](https://github.com/dbt-labs/dbt-core/pull/4394))
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
- Only log cache events if `LOG_CACHE_EVENTS` is enabled, and disable by default. This restores previous behavior ([#4369](https://github.com/dbt-labs/dbt-core/pull/4369))
|
||||||
|
- Move event codes to be a top-level attribute of JSON-formatted logs, rather than nested in `data` ([#4381](https://github.com/dbt-labs/dbt-core/pull/4381))
|
||||||
|
- Fix failing integration test on Windows ([#4380](https://github.com/dbt-labs/dbt-core/pull/4380))
|
||||||
|
- Clean up warning messages for `clean` + `deps` ([#4366](https://github.com/dbt-labs/dbt-core/pull/4366))
|
||||||
|
- Use RFC3339 timestamps for log messages ([#4384](https://github.com/dbt-labs/dbt-core/pull/4384))
|
||||||
|
- Different text output for console (info) and file (debug) logs ([#4379](https://github.com/dbt-labs/dbt-core/pull/4379), [#4418](https://github.com/dbt-labs/dbt-core/pull/4418))
|
||||||
|
- Remove unused events. More structured `ConcurrencyLine`. Replace `\n` message starts/ends with `EmptyLine` events, and exclude `EmptyLine` from JSON-formatted output ([#4388](https://github.com/dbt-labs/dbt-core/pull/4388))
|
||||||
|
- Update `events` module README ([#4395](https://github.com/dbt-labs/dbt-core/pull/4395))
|
||||||
|
- Rework approach to JSON serialization for events with non-standard properties ([#4396](https://github.com/dbt-labs/dbt-core/pull/4396))
|
||||||
|
- Update legacy logger file name to `dbt.log.legacy` ([#4402](https://github.com/dbt-labs/dbt-core/pull/4402))
|
||||||
|
- Rollover `dbt.log` at 10 MB, and keep up to 5 backups, restoring previous behavior ([#4405](https://github.com/dbt-labs/dbt-core/pull/4405))
|
||||||
|
- Use reference keys instead of full relation objects in cache events ([#4410](https://github.com/dbt-labs/dbt-core/pull/4410))
|
||||||
|
- Add `node_type` contextual info to more events ([#4378](https://github.com/dbt-labs/dbt-core/pull/4378))
|
||||||
|
- Make `materialized` config optional in `node_type` ([#4417](https://github.com/dbt-labs/dbt-core/pull/4417))
|
||||||
|
- Stringify exception in `GenericExceptionOnRun` to support JSON serialization ([#4424](https://github.com/dbt-labs/dbt-core/pull/4424))
|
||||||
|
- Add "interop" tests for machine consumption of structured log output ([#4327](https://github.com/dbt-labs/dbt-core/pull/4327))
|
||||||
|
- Relax version specifier for `dbt-extractor` to `~=0.4.0`, to support compiled wheels for additional architectures when available ([#4427](https://github.com/dbt-labs/dbt-core/pull/4427))
|
||||||
|
|
||||||
|
## dbt-core 1.0.0rc3 (November 30, 2021)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Support partial parsing of env_vars in metrics ([#4253](https://github.com/dbt-labs/dbt-core/issues/4293), [#4322](https://github.com/dbt-labs/dbt-core/pull/4322))
|
||||||
|
- Fix typo in `UnparsedSourceDefinition.__post_serialize__` ([#3545](https://github.com/dbt-labs/dbt-core/issues/3545), [#4349](https://github.com/dbt-labs/dbt-core/pull/4349))
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
- Change some CompilationExceptions to ParsingExceptions ([#4254](http://github.com/dbt-labs/dbt-core/issues/4254), [#4328](https://github.com/dbt-core/pull/4328))
|
||||||
|
- Reorder logic for static parser sampling to speed up model parsing ([#4332](https://github.com/dbt-labs/dbt-core/pull/4332))
|
||||||
|
- Use more augmented assignment statements ([#4315](https://github.com/dbt-labs/dbt-core/issues/4315)), ([#4311](https://github.com/dbt-labs/dbt-core/pull/4331))
|
||||||
|
- Adjust logic when finding approximate matches for models and tests ([#3835](https://github.com/dbt-labs/dbt-core/issues/3835)), [#4076](https://github.com/dbt-labs/dbt-core/pull/4076))
|
||||||
|
- Restore small previous behaviors for logging: JSON formatting for first few events; `WARN`-level stdout for `list` task; include tracking events in `dbt.log` ([#4341](https://github.com/dbt-labs/dbt-core/pull/4341))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [@sarah-weatherbee](https://github.com/sarah-weatherbee) ([#4331](https://github.com/dbt-labs/dbt-core/pull/4331))
|
||||||
|
- [@emilieschario](https://github.com/emilieschario) ([#4076](https://github.com/dbt-labs/dbt-core/pull/4076))
|
||||||
|
- [@sneznaj](https://github.com/sneznaj) ([#4349](https://github.com/dbt-labs/dbt-core/pull/4349))
|
||||||
|
|
||||||
|
## dbt-core 1.0.0rc2 (November 22, 2021)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
- Restrict secret env vars (prefixed `DBT_ENV_SECRET_`) to `profiles.yml` + `packages.yml` _only_. Raise an exception if a secret env var is used elsewhere ([#4310](https://github.com/dbt-labs/dbt-core/issues/4310), [#4311](https://github.com/dbt-labs/dbt-core/pull/4311))
|
||||||
|
- Reorder arguments to `config.get()` so that `default` is second ([#4273](https://github.com/dbt-labs/dbt-core/issues/4273), [#4297](https://github.com/dbt-labs/dbt-core/pull/4297))
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Avoid error when missing column in YAML description ([#4151](https://github.com/dbt-labs/dbt-core/issues/4151), [#4285](https://github.com/dbt-labs/dbt-core/pull/4285))
|
||||||
|
- Allow `--defer` flag to `dbt snapshot` ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4296](https://github.com/dbt-labs/dbt-core/pull/4296))
|
||||||
|
- Install prerelease packages when `version` explicitly references a prerelease version, regardless of `install-prerelease` status ([#4243](https://github.com/dbt-labs/dbt-core/issues/4243), [#4295](https://github.com/dbt-labs/dbt-core/pull/4295))
|
||||||
|
- Add data attributes to json log messages ([#4301](https://github.com/dbt-labs/dbt-core/pull/4301))
|
||||||
|
- Add event codes to all log events ([#4319](https://github.com/dbt-labs/dbt-core/pull/4319))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Fix serialization error with missing quotes in metrics model ref ([#4252](https://github.com/dbt-labs/dbt-core/issues/4252), [#4287](https://github.com/dbt-labs/dbt-core/pull/4289))
|
||||||
|
- Correct definition of 'created_at' in ParsedMetric nodes ([#4298](http://github.com/dbt-labs/dbt-core/issues/4298), [#4299](https://github.com/dbt-labs/dbt-core/pull/4299))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Allow specifying default in Jinja config.get with default keyword ([#4273](https://github.com/dbt-labs/dbt-core/issues/4273), [#4297](https://github.com/dbt-labs/dbt-core/pull/4297))
|
||||||
|
- Fix serialization error with missing quotes in metrics model ref ([#4252](https://github.com/dbt-labs/dbt-core/issues/4252), [#4287](https://github.com/dbt-labs/dbt-core/pull/4289))
|
||||||
|
- Correct definition of 'created_at' in ParsedMetric nodes ([#4298](https://github.com/dbt-labs/dbt-core/issues/4298), [#4299](https://github.com/dbt-labs/dbt-core/pull/4299))
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
- Add --indirect-selection parameter to profiles.yml and builtin DBT_ env vars; stringified parameter to enable multi-modal use ([#3997](https://github.com/dbt-labs/dbt-core/issues/3997), [#4270](https://github.com/dbt-labs/dbt-core/pull/4270))
|
||||||
|
- Fix filesystem searcher test failure on Python 3.9 ([#3689](https://github.com/dbt-labs/dbt-core/issues/3689), [#4271](https://github.com/dbt-labs/dbt-core/pull/4271))
|
||||||
|
- Clean up deprecation warnings shown for `dbt_project.yml` config renames ([#4276](https://github.com/dbt-labs/dbt-core/issues/4276), [#4291](https://github.com/dbt-labs/dbt-core/pull/4291))
|
||||||
|
- Fix metrics count in compiled project stats ([#4290](https://github.com/dbt-labs/dbt-core/issues/4290), [#4292](https://github.com/dbt-labs/dbt-core/pull/4292))
|
||||||
|
- First pass at supporting more dbt tasks via python lib ([#4200](https://github.com/dbt-labs/dbt-core/pull/4200))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [@kadero](https://github.com/kadero) ([#4285](https://github.com/dbt-labs/dbt-core/pull/4285), [#4296](https://github.com/dbt-labs/dbt-core/pull/4296))
|
||||||
|
- [@joellabes](https://github.com/joellabes) ([#4295](https://github.com/dbt-labs/dbt-core/pull/4295))
|
||||||
|
|
||||||
|
## dbt-core 1.0.0rc1 (November 10, 2021)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
- Replace `greedy` flag/property for test selection with `indirect_selection: eager/cautious` flag/property. Set to `eager` by default. **Note:** This reverts test selection to its pre-v0.20 behavior by default. `dbt test -s my_model` _will_ select multi-parent tests, such as `relationships`, that depend on unselected resources. To achieve the behavior change in v0.20 + v0.21, set `--indirect-selection=cautious` on the CLI or `indirect_selection: cautious` in yaml selectors. ([#4082](https://github.com/dbt-labs/dbt-core/issues/4082), [#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
||||||
|
- In v1.0.0, **`pip install dbt` will raise an explicit error.** Instead, please use `pip install dbt-<adapter>` (to use dbt with that database adapter), or `pip install dbt-core` (for core functionality). For parity with the previous behavior of `pip install dbt`, you can use: `pip install dbt-core dbt-postgres dbt-redshift dbt-snowflake dbt-bigquery` ([#4100](https://github.com/dbt-labs/dbt-core/issues/4100), [#4133](https://github.com/dbt-labs/dbt-core/pull/4133))
|
||||||
|
- Reorganize the `global_project` (macros) into smaller files with clearer names. Remove unused global macros: `column_list`, `column_list_for_create_table`, `incremental_upsert` ([#4154](https://github.com/dbt-labs/dbt-core/pull/4154))
|
||||||
|
- Introduce structured event interface, and begin conversion of all legacy logging ([#3359](https://github.com/dbt-labs/dbt-core/issues/3359), [#4055](https://github.com/dbt-labs/dbt-core/pull/4055))
|
||||||
|
- **This is a breaking change for adapter plugins, requiring a very simple migration.** See [`events` module README](core/dbt/events/README.md#adapter-maintainers) for details.
|
||||||
|
- If you maintain another kind of dbt-core plugin that makes heavy use of legacy logging, and you need time to cut over to the new event interface, you can re-enable the legacy logger via an environment variable shim, `DBT_ENABLE_LEGACY_LOGGER=True`. Be advised that we will remove this capability in a future version of dbt-core.
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Allow nullable `error_after` in source freshness ([#3874](https://github.com/dbt-labs/dbt-core/issues/3874), [#3955](https://github.com/dbt-labs/dbt-core/pull/3955))
|
||||||
|
- Add `metrics` nodes ([#4071](https://github.com/dbt-labs/dbt-core/issues/4071), [#4235](https://github.com/dbt-labs/dbt-core/pull/4235))
|
||||||
|
- Add support for `dbt init <project_name>`, and support for `skip_profile_setup` argument (`dbt init -s`) ([#4156](https://github.com/dbt-labs/dbt-core/issues/4156), [#4249](https://github.com/dbt-labs/dbt-core/pull/4249))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Changes unit tests using `assertRaisesRegexp` to `assertRaisesRegex` ([#4136](https://github.com/dbt-labs/dbt-core/issues/4132), [#4136](https://github.com/dbt-labs/dbt-core/pull/4136))
|
||||||
|
- Allow retries when the answer from a `dbt deps` is `None` ([#4178](https://github.com/dbt-labs/dbt-core/issues/4178), [#4225](https://github.com/dbt-labs/dbt-core/pull/4225))
|
||||||
|
|
||||||
|
### Docs
|
||||||
|
|
||||||
|
- Fix non-alphabetical sort of Source Tables in source overview page ([docs#81](https://github.com/dbt-labs/dbt-docs/issues/81), [docs#218](https://github.com/dbt-labs/dbt-docs/pull/218))
|
||||||
|
- Add title tag to node elements in tree ([docs#202](https://github.com/dbt-labs/dbt-docs/issues/202), [docs#203](https://github.com/dbt-labs/dbt-docs/pull/203))
|
||||||
|
- Account for test rename: `schema` → `generic`, `data` →` singular`. Use `test_metadata` instead of `schema`/`data` tags to differentiate ([docs#216](https://github.com/dbt-labs/dbt-docs/issues/216), [docs#222](https://github.com/dbt-labs/dbt-docs/pull/222))
|
||||||
|
- Add `metrics` ([core#216](https://github.com/dbt-labs/dbt-core/issues/4235), [docs#223](https://github.com/dbt-labs/dbt-docs/pull/223))
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
- Bump artifact schema versions for 1.0.0: manifest v4, run results v4, sources v3. Notable changes: added `metrics` nodes; schema test + data test nodes are renamed to generic test + singular test nodes; freshness threshold default values ([#4191](https://github.com/dbt-labs/dbt-core/pull/4191))
|
||||||
|
- Speed up node selection by skipping `incorporate_indirect_nodes` if not needed ([#4213](https://github.com/dbt-labs/dbt-core/issues/4213), [#4214](https://github.com/dbt-labs/dbt-core/issues/4214))
|
||||||
|
- When `on_schema_change` is set, pass common columns as `dest_columns` in incremental merge macros ([#4144](https://github.com/dbt-labs/dbt-core/issues/4144), [#4170](https://github.com/dbt-labs/dbt-core/pull/4170))
|
||||||
|
- Clear adapters before registering in `lib` module config generation ([#4218](https://github.com/dbt-labs/dbt-core/pull/4218))
|
||||||
|
- Remove official support for python 3.6, which is reaching end of life on December 23, 2021 ([#4134](https://github.com/dbt-labs/dbt-core/issues/4134), [#4223](https://github.com/dbt-labs/dbt-core/pull/4223))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [@kadero](https://github.com/kadero) ([#3955](https://github.com/dbt-labs/dbt-core/pull/3955), [#4249](https://github.com/dbt-labs/dbt-core/pull/4249))
|
||||||
|
- [@frankcash](https://github.com/frankcash) ([#4136](https://github.com/dbt-labs/dbt-core/pull/4136))
|
||||||
|
- [@Kayrnt](https://github.com/Kayrnt) ([#4136](https://github.com/dbt-labs/dbt-core/pull/4170))
|
||||||
|
- [@VersusFacit](https://github.com/VersusFacit) ([#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
||||||
|
- [@joellabes](https://github.com/joellabes) ([#4104](https://github.com/dbt-labs/dbt-core/pull/4104))
|
||||||
|
- [@b-per](https://github.com/b-per) ([#4225](https://github.com/dbt-labs/dbt-core/pull/4225))
|
||||||
|
- [@salmonsd](https://github.com/salmonsd) ([docs#218](https://github.com/dbt-labs/dbt-docs/pull/218))
|
||||||
|
- [@miike](https://github.com/miike) ([docs#203](https://github.com/dbt-labs/dbt-docs/pull/203))
|
||||||
|
|
||||||
|
|
||||||
|
## dbt-core 1.0.0b2 (October 25, 2021)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
- Enable `on-run-start` and `on-run-end` hooks for `dbt test`. Add `flags.WHICH` to execution context, representing current task ([#3463](https://github.com/dbt-labs/dbt-core/issues/3463), [#4004](https://github.com/dbt-labs/dbt-core/pull/4004))
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Normalize global CLI arguments/flags ([#2990](https://github.com/dbt-labs/dbt/issues/2990), [#3839](https://github.com/dbt-labs/dbt/pull/3839))
|
||||||
|
- Turns on the static parser by default and adds the flag `--no-static-parser` to disable it. ([#3377](https://github.com/dbt-labs/dbt/issues/3377), [#3939](https://github.com/dbt-labs/dbt/pull/3939))
|
||||||
|
- Generic test FQNs have changed to include the relative path, resource, and column (if applicable) where they are defined. This makes it easier to configure them from the `tests` block in `dbt_project.yml` ([#3259](https://github.com/dbt-labs/dbt/pull/3259), [#3880](https://github.com/dbt-labs/dbt/pull/3880)
|
||||||
|
- Turn on partial parsing by default ([#3867](https://github.com/dbt-labs/dbt/issues/3867), [#3989](https://github.com/dbt-labs/dbt/issues/3989))
|
||||||
|
- Add `result:<status>` selectors to automatically rerun failed tests and erroneous models. This makes it easier to rerun failed dbt jobs with a simple selector flag instead of restarting from the beginning or manually running the dbt models in scope. ([#3859](https://github.com/dbt-labs/dbt/issues/3891), [#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
||||||
|
- `dbt init` is now interactive, generating profiles.yml when run inside existing project ([#3625](https://github.com/dbt-labs/dbt/pull/3625))
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
|
||||||
|
- Fix intermittent errors in partial parsing tests ([#4060](https://github.com/dbt-labs/dbt-core/issues/4060), [#4068](https://github.com/dbt-labs/dbt-core/pull/4068))
|
||||||
|
- Make finding disabled nodes more consistent ([#4069](https://github.com/dbt-labs/dbt-core/issues/4069), [#4073](https://github.com/dbt-labas/dbt-core/pull/4073))
|
||||||
|
- Remove connection from `render_with_context` during parsing, thereby removing misleading log message ([#3137](https://github.com/dbt-labs/dbt-core/issues/3137), [#4062](https://github.com/dbt-labas/dbt-core/pull/4062))
|
||||||
|
- Wait for postgres docker container to be ready in `setup_db.sh`. ([#3876](https://github.com/dbt-labs/dbt-core/issues/3876), [#3908](https://github.com/dbt-labs/dbt-core/pull/3908))
|
||||||
|
- Prefer macros defined in the project over the ones in a package by default ([#4106](https://github.com/dbt-labs/dbt-core/issues/4106), [#4114](https://github.com/dbt-labs/dbt-core/pull/4114))
|
||||||
|
- Dependency updates ([#4079](https://github.com/dbt-labs/dbt-core/pull/4079)), ([#3532](https://github.com/dbt-labs/dbt-core/pull/3532)
|
||||||
|
- Schedule partial parsing for SQL files with env_var changes ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4101](https://github.com/dbt-labs/dbt-core/pull/4101))
|
||||||
|
- Schedule partial parsing for schema files with env_var changes ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4162](https://github.com/dbt-labs/dbt-core/pull/4162))
|
||||||
|
- Skip partial parsing when env_vars change in dbt_project or profile ([#3885](https://github.com/dbt-labs/dbt-core/issues/3885), [#4212](https://github.com/dbt-labs/dbt-core/pull/4212))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [@sungchun12](https://github.com/sungchun12) ([#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
||||||
|
- [@matt-winkler](https://github.com/matt-winkler) ([#4017](https://github.com/dbt-labs/dbt/pull/4017))
|
||||||
|
- [@NiallRees](https://github.com/NiallRees) ([#3625](https://github.com/dbt-labs/dbt/pull/3625))
|
||||||
|
- [@rvacaru](https://github.com/rvacaru) ([#3908](https://github.com/dbt-labs/dbt/pull/3908))
|
||||||
|
- [@JCZuurmond](https://github.com/jczuurmond) ([#4114](https://github.com/dbt-labs/dbt-core/pull/4114))
|
||||||
|
- [@ljhopkins2](https://github.com/dbt-labs/dbt-core/pull/4079)
|
||||||
|
|
||||||
|
## dbt-core 1.0.0b1 (October 11, 2021)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
- The two type of test definitions are now "singular" and "generic" (instead of "data" and "schema", respectively). The `test_type:` selection method accepts `test_type:singular` and `test_type:generic`. (It will also accept `test_type:schema` and `test_type:data` for backwards compatibility) ([#3234](https://github.com/dbt-labs/dbt-core/issues/3234), [#3880](https://github.com/dbt-labs/dbt-core/pull/3880)). **Not backwards compatible:** The `--data` and `--schema` flags to `dbt test` are no longer supported, and tests no longer have the tags `'data'` and `'schema'` automatically applied.
|
||||||
|
- Deprecated the use of the `packages` arg `adapter.dispatch` in favor of the `macro_namespace` arg. ([#3895](https://github.com/dbt-labs/dbt-core/issues/3895))
|
||||||
|
|
||||||
|
### Features
|
||||||
|
- Normalize global CLI arguments/flags ([#2990](https://github.com/dbt-labs/dbt-core/issues/2990), [#3839](https://github.com/dbt-labs/dbt-core/pull/3839))
|
||||||
|
- Turns on the static parser by default and adds the flag `--no-static-parser` to disable it. ([#3377](https://github.com/dbt-labs/dbt-core/issues/3377), [#3939](https://github.com/dbt-labs/dbt-core/pull/3939))
|
||||||
|
- Generic test FQNs have changed to include the relative path, resource, and column (if applicable) where they are defined. This makes it easier to configure them from the `tests` block in `dbt_project.yml` ([#3259](https://github.com/dbt-labs/dbt-core/pull/3259), [#3880](https://github.com/dbt-labs/dbt-core/pull/3880)
|
||||||
|
- Turn on partial parsing by default ([#3867](https://github.com/dbt-labs/dbt-core/issues/3867), [#3989](https://github.com/dbt-labs/dbt-core/issues/3989))
|
||||||
|
- Generic test can now be added under a `generic` subfolder in the `test-paths` directory. ([#4052](https://github.com/dbt-labs/dbt-core/pull/4052))
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
- Add generic tests defined on sources to the manifest once, not twice ([#3347](https://github.com/dbt-labs/dbt/issues/3347), [#3880](https://github.com/dbt-labs/dbt/pull/3880))
|
||||||
|
- Skip partial parsing if certain macros have changed ([#3810](https://github.com/dbt-labs/dbt/issues/3810), [#3982](https://github.com/dbt-labs/dbt/pull/3892))
|
||||||
|
- Enable cataloging of unlogged Postgres tables ([3961](https://github.com/dbt-labs/dbt/issues/3961), [#3993](https://github.com/dbt-labs/dbt/pull/3993))
|
||||||
|
- Fix multiple disabled nodes ([#4013](https://github.com/dbt-labs/dbt/issues/4013), [#4018](https://github.com/dbt-labs/dbt/pull/4018))
|
||||||
|
- Fix multiple partial parsing errors ([#3996](https://github.com/dbt-labs/dbt/issues/3006), [#4020](https://github.com/dbt-labs/dbt/pull/4018))
|
||||||
|
- Return an error instead of a warning when runing with `--warn-error` and no models are selected ([#4006](https://github.com/dbt-labs/dbt/issues/4006), [#4019](https://github.com/dbt-labs/dbt/pull/4019))
|
||||||
|
- Fixed bug with `error_if` test option ([#4070](https://github.com/dbt-labs/dbt-core/pull/4070))
|
||||||
|
|
||||||
|
### Under the hood
|
||||||
|
- Enact deprecation for `materialization-return` and replace deprecation warning with an exception. ([#3896](https://github.com/dbt-labs/dbt-core/issues/3896))
|
||||||
|
- Build catalog for only relational, non-ephemeral nodes in the graph ([#3920](https://github.com/dbt-labs/dbt-core/issues/3920))
|
||||||
|
- Enact deprecation to remove the `release` arg from the `execute_macro` method. ([#3900](https://github.com/dbt-labs/dbt-core/issues/3900))
|
||||||
|
- Enact deprecation for default quoting to be True. Override for the `dbt-snowflake` adapter so it stays `False`. ([#3898](https://github.com/dbt-labs/dbt-core/issues/3898))
|
||||||
|
- Enact deprecation for object used as dictionaries when they should be dataclasses. Replace deprecation warning with an exception for the dunder methods of `__iter__` and `__len__` for all superclasses of FakeAPIObject. ([#3897](https://github.com/dbt-labs/dbt-core/issues/3897))
|
||||||
|
- Enact deprecation for `adapter-macro` and replace deprecation warning with an exception. ([#3901](https://github.com/dbt-labs/dbt-core/issues/3901))
|
||||||
|
- Add warning when trying to put a node under the wrong key. ie. A seed under models in a `schema.yml` file. ([#3899](https://github.com/dbt-labs/dbt-core/issues/3899))
|
||||||
|
- Plugins for `redshift`, `snowflake`, and `bigquery` have moved to separate repos: [`dbt-redshift`](https://github.com/dbt-labs/dbt-redshift), [`dbt-snowflake`](https://github.com/dbt-labs/dbt-snowflake), [`dbt-bigquery`](https://github.com/dbt-labs/dbt-bigquery)
|
||||||
|
- Change the default dbt packages installation directory to `dbt_packages` from `dbt_modules`. Also rename `module-path` to `packages-install-path` to allow default overrides of package install directory. Deprecation warning added for projects using the old `dbt_modules` name without specifying a `packages-install-path`. ([#3523](https://github.com/dbt-labs/dbt-core/issues/3523))
|
||||||
|
- Update the default project paths to be `analysis-paths = ['analyses']` and `test-paths = ['tests]`. Also have starter project set `analysis-paths: ['analyses']` from now on. ([#2659](https://github.com/dbt-labs/dbt-core/issues/2659))
|
||||||
|
- Define the data type of `sources` as an array of arrays of string in the manifest artifacts. ([#3966](https://github.com/dbt-labs/dbt-core/issues/3966), [#3967](https://github.com/dbt-labs/dbt-core/pull/3967))
|
||||||
|
- Marked `source-paths` and `data-paths` as deprecated keys in `dbt_project.yml` in favor of `model-paths` and `seed-paths` respectively.([#1607](https://github.com/dbt-labs/dbt-core/issues/1607))
|
||||||
|
- Surface git errors to `stdout` when cloning dbt packages from Github. ([#3167](https://github.com/dbt-labs/dbt-core/issues/3167))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
|
||||||
|
- [@dave-connors-3](https://github.com/dave-connors-3) ([#3920](https://github.com/dbt-labs/dbt-core/pull/3922))
|
||||||
|
- [@kadero](https://github.com/kadero) ([#3952](https://github.com/dbt-labs/dbt-core/pull/3953))
|
||||||
|
- [@samlader](https://github.com/samlader) ([#3993](https://github.com/dbt-labs/dbt-core/pull/3993))
|
||||||
|
- [@yu-iskw](https://github.com/yu-iskw) ([#3967](https://github.com/dbt-labs/dbt-core/pull/3967))
|
||||||
|
- [@laxjesse](https://github.com/laxjesse) ([#4019](https://github.com/dbt-labs/dbt-core/pull/4019))
|
||||||
|
- [@gitznik](https://github.com/Gitznik) ([#4124](https://github.com/dbt-labs/dbt-core/pull/4124))
|
||||||
3
.changes/1.0.4.md
Normal file
3
.changes/1.0.4.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
## dbt-core 1.0.4 - March 18, 2022
|
||||||
|
### Fixes
|
||||||
|
- Depend on new dbt-extractor version with fixed GitHub links to resolve Homebrew installation issues ([#4891](https://github.com/dbt-labs/dbt-core/issues/4891), [#4890](https://github.com/dbt-labs/dbt-core/pull/4890))
|
||||||
16
.changes/1.0.5-rc1.md
Normal file
16
.changes/1.0.5-rc1.md
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
## dbt-core 1.0.5-rc1 - March 21, 2022
|
||||||
|
### Fixes
|
||||||
|
- Fix bug causing empty node level meta, snapshot config errors ([#4459](https://github.com/dbt-labs/dbt-core/issues/4459), [#4726](https://github.com/dbt-labs/dbt-core/pull/4726))
|
||||||
|
- Support click versions in the v7.x series ([#4566](https://github.com/dbt-labs/dbt-core/issues/4566), [#4681](https://github.com/dbt-labs/dbt-core/pull/4681))
|
||||||
|
- Fixed a bug where nodes that depend on multiple macros couldn't be selected using `-s state:modified` ([#4678](https://github.com/dbt-labs/dbt-core/issues/4678), [#4820](https://github.com/dbt-labs/dbt-core/pull/4820))
|
||||||
|
- Catch all Requests Exceptions on deps install to attempt retries. Also log the exceptions hit. ([#4849](https://github.com/dbt-labs/dbt-core/issues/4849), [#4865](https://github.com/dbt-labs/dbt-core/pull/4865))
|
||||||
|
- Fix partial parsing bug with multiple snapshot blocks ([#4771](https://github.com/dbt-labs/dbt-core/issues/4771), [#4773](https://github.com/dbt-labs/dbt-core/pull/4773))
|
||||||
|
- Use cli_vars instead of context to create package and selector renderers ([#4876](https://github.com/dbt-labs/dbt-core/issues/4876), [#4878](https://github.com/dbt-labs/dbt-core/pull/4878))
|
||||||
|
### Under the Hood
|
||||||
|
- Automate changelog generation with changie ([#4652](https://github.com/dbt-labs/dbt-core/issues/4652), [#4743](https://github.com/dbt-labs/dbt-core/pull/4743))
|
||||||
|
- Fix broken links for changelog generation and tweak GHA to only post a comment once when changelog entry is missing. ([#4848](https://github.com/dbt-labs/dbt-core/issues/4848), [#4857](https://github.com/dbt-labs/dbt-core/pull/4857))
|
||||||
|
### Docs
|
||||||
|
- Resolve errors related to operations preventing DAG from generating in the docs. Also patch a spark issue to allow search to filter accurately past the missing columns. ([#4578](https://github.com/dbt-labs/dbt-core/issues/4578), [#4763](https://github.com/dbt-labs/dbt-core/pull/4763))
|
||||||
|
|
||||||
|
Contributors:
|
||||||
|
- [twilly](https://github.com/twilly) ([#4681](https://github.com/dbt-labs/dbt-core/pull/4681))
|
||||||
4
.changes/1.0.5-rc2.md
Normal file
4
.changes/1.0.5-rc2.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
## dbt-core 1.0.5-rc2 - April 08, 2022
|
||||||
|
### Fixes
|
||||||
|
- Catch more cases to retry package retrieval for deps pointing to the hub. Also start to cache the package requests. ([#4849](https://github.com/dbt-labs/dbt-core/issues/4849), [#4982](https://github.com/dbt-labs/dbt-core/pull/4982))
|
||||||
|
Contributors:
|
||||||
8
.changes/1.0.5/Docs-20220307-203105.yaml
Normal file
8
.changes/1.0.5/Docs-20220307-203105.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Resolve errors related to operations preventing DAG from generating in the docs. Also
|
||||||
|
patch a spark issue to allow search to filter accurately past the missing columns.
|
||||||
|
time: 2022-03-07T20:31:05.557064-06:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "4578"
|
||||||
|
PR: "4763"
|
||||||
7
.changes/1.0.5/Fixes-20220307-203022.yaml
Normal file
7
.changes/1.0.5/Fixes-20220307-203022.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix bug causing empty node level meta, snapshot config errors
|
||||||
|
time: 2022-03-07T20:30:22.624709-06:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "4459"
|
||||||
|
PR: "4726"
|
||||||
7
.changes/1.0.5/Fixes-20220309-100530.yaml
Normal file
7
.changes/1.0.5/Fixes-20220309-100530.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Support click versions in the v7.x series
|
||||||
|
time: 2022-03-09T10:05:30.796158-06:00
|
||||||
|
custom:
|
||||||
|
Author: twilly
|
||||||
|
Issue: "4566"
|
||||||
|
PR: "4681"
|
||||||
8
.changes/1.0.5/Fixes-20220309-100751.yaml
Normal file
8
.changes/1.0.5/Fixes-20220309-100751.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fixed a bug where nodes that depend on multiple macros couldn't be selected
|
||||||
|
using `-s state:modified`
|
||||||
|
time: 2022-03-09T10:07:51.735463-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "4678"
|
||||||
|
PR: "4820"
|
||||||
8
.changes/1.0.5/Fixes-20220315-105331.yaml
Normal file
8
.changes/1.0.5/Fixes-20220315-105331.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Catch all Requests Exceptions on deps install to attempt retries. Also log
|
||||||
|
the exceptions hit.
|
||||||
|
time: 2022-03-15T10:53:31.637963-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "4849"
|
||||||
|
PR: "4865"
|
||||||
7
.changes/1.0.5/Fixes-20220316-143959.yaml
Normal file
7
.changes/1.0.5/Fixes-20220316-143959.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix partial parsing bug with multiple snapshot blocks
|
||||||
|
time: 2022-03-16T14:39:59.16756-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "4771"
|
||||||
|
PR: "4773"
|
||||||
7
.changes/1.0.5/Fixes-20220316-155420.yaml
Normal file
7
.changes/1.0.5/Fixes-20220316-155420.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Use cli_vars instead of context to create package and selector renderers
|
||||||
|
time: 2022-03-16T15:54:20.608384-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "4876"
|
||||||
|
PR: "4878"
|
||||||
7
.changes/1.0.5/Fixes-20220331-143923.yaml
Normal file
7
.changes/1.0.5/Fixes-20220331-143923.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Catch more cases to retry package retrieval for deps pointing to the hub. Also start to cache the package requests.
|
||||||
|
time: 2022-03-31T14:39:23.952705-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "4849"
|
||||||
|
PR: "4982"
|
||||||
7
.changes/1.0.5/Under the Hood-20220218-161319.yaml
Normal file
7
.changes/1.0.5/Under the Hood-20220218-161319.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Automate changelog generation with changie
|
||||||
|
time: 2022-02-18T16:13:19.882436-06:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "4652"
|
||||||
|
PR: "4743"
|
||||||
8
.changes/1.0.5/Under the Hood-20220311-101851.yaml
Normal file
8
.changes/1.0.5/Under the Hood-20220311-101851.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Fix broken links for changelog generation and tweak GHA to only post a comment
|
||||||
|
once when changelog entry is missing.
|
||||||
|
time: 2022-03-11T10:18:51.404524-06:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "4848"
|
||||||
|
PR: "4857"
|
||||||
@@ -26,12 +26,6 @@ changie batch <version> --move-dir '<version>' --prerelease 'rc1'
|
|||||||
changie merge
|
changie merge
|
||||||
```
|
```
|
||||||
|
|
||||||
Example
|
|
||||||
```
|
|
||||||
changie batch 1.0.5 --move-dir '1.0.5' --prerelease 'rc1'
|
|
||||||
changie merge
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Final Release Workflow
|
#### Final Release Workflow
|
||||||
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
These commands batch up changes in `/.changes/unreleased` as well as `/.changes/<version>` to be included in this final release and delete all prereleases. This rolls all prereleases up into a single final release. All `yaml` files in `/unreleased` and `<version>` will be deleted at this point.
|
||||||
|
|
||||||
@@ -40,14 +34,7 @@ changie batch <version> --include '<version>' --remove-prereleases
|
|||||||
changie merge
|
changie merge
|
||||||
```
|
```
|
||||||
|
|
||||||
Example
|
|
||||||
```
|
|
||||||
changie batch 1.0.5 --include '1.0.5' --remove-prereleases
|
|
||||||
changie merge
|
|
||||||
```
|
|
||||||
|
|
||||||
### A Note on Manual Edits & Gotchas
|
### A Note on Manual Edits & Gotchas
|
||||||
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
- Changie generates markdown files in the `.changes` directory that are parsed together with the `changie merge` command. Every time `changie merge` is run, it regenerates the entire file. For this reason, any changes made directly to `CHANGELOG.md` will be overwritten on the next run of `changie merge`.
|
||||||
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
- If changes need to be made to the `CHANGELOG.md`, make the changes to the relevant `<version>.md` file located in the `/.changes` directory. You will then run `changie merge` to regenerate the `CHANGELOG.MD`.
|
||||||
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
- Do not run `changie batch` again on released versions. Our final release workflow deletes all of the yaml files associated with individual changes. If for some reason modifications to the `CHANGELOG.md` are required after we've generated the final release `CHANGELOG.md`, the modifications need to be done manually to the `<version>.md` file in the `/.changes` directory.
|
||||||
- changie can modify, create and delete files depending on the command you run. This is expected. Be sure to commit everything that has been modified and deleted.
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# dbt Core Changelog
|
# dbt Core Changelog
|
||||||
|
|
||||||
- This file provides a full account of all changes to `dbt-core`
|
- This file provides a full account of all changes to `dbt-core` and `dbt-postgres`
|
||||||
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
- Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases.
|
||||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Dependencies
|
|
||||||
body: Use EventCatcher from dbt-common instead of maintaining a local copy
|
|
||||||
time: 2025-11-18T15:53:54.284561+05:30
|
|
||||||
custom:
|
|
||||||
Author: 3loka
|
|
||||||
Issue: "12124"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Support partial parsing for function nodes
|
|
||||||
time: 2025-10-06T14:03:52.258104-05:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "12072"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Allow for defining funciton arguments with default values
|
|
||||||
time: 2025-11-17T14:10:53.860178-06:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "12044"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Raise jsonschema-based deprecation warnings by default
|
|
||||||
time: 2025-12-01T16:52:09.354436-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: 12240
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: ':bug: :snowman: Disable unit tests whose model is disabled'
|
|
||||||
time: 2025-12-03T12:29:26.209248-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "10540"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Features
|
|
||||||
body: Implement config.meta_get and config.meta_require
|
|
||||||
time: 2025-12-10T20:20:01.354288-05:00
|
|
||||||
custom:
|
|
||||||
Author: gshank
|
|
||||||
Issue: "12012"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Address Click 8.2+ deprecation warning
|
|
||||||
time: 2025-09-22T15:17:26.983151-06:00
|
|
||||||
custom:
|
|
||||||
Author: edgarrmondragon
|
|
||||||
Issue: "12038"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Include macros in unit test parsing
|
|
||||||
time: 2025-11-17T14:06:49.518566-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark nathanskone
|
|
||||||
Issue: "10157"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Allow dbt deps to run when vars lack defaults in dbt_project.yml
|
|
||||||
time: 2025-11-17T18:50:25.759091+05:30
|
|
||||||
custom:
|
|
||||||
Author: 3loka
|
|
||||||
Issue: "8913"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Restore DuplicateResourceNameError for intra-project node name duplication, behind behavior flag `require_unique_project_resource_names`
|
|
||||||
time: 2025-11-18T17:11:06.454784-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "12152"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Allow the usage of `function` with `--exclude-resource-type` flag
|
|
||||||
time: 2025-11-19T19:50:34.703236-06:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "12143"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix bug where schemas of functions weren't guaranteed to exist
|
|
||||||
time: 2025-11-24T15:56:29.467004-06:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "12142"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix generation of deprecations summary
|
|
||||||
time: 2025-11-24T15:57:56.544123-08:00
|
|
||||||
custom:
|
|
||||||
Author: asiunov
|
|
||||||
Issue: "12146"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Correctly reference foreign key references when --defer and --state provided'
|
|
||||||
time: 2025-11-24T17:08:55.387946-05:00
|
|
||||||
custom:
|
|
||||||
Author: michellark
|
|
||||||
Issue: "11885"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Add exception when using --state and referring to a removed
|
|
||||||
test'
|
|
||||||
time: 2025-11-25T12:02:46.635026-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "10630"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Stop emitting `NoNodesForSelectionCriteria` three times during `build` command'
|
|
||||||
time: 2025-11-25T12:20:20.132379-06:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "11627"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ":bug: :snowman: Fix long Python stack traces appearing when package dependencies have incompatible version requirements"
|
|
||||||
time: 2025-11-27T14:13:08.082542-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "12049"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Fixed issue where changing data type size/precision/scale (e.g.,
|
|
||||||
varchar(3) to varchar(10)) incorrectly triggered a breaking change error fo'
|
|
||||||
time: 2025-11-27T14:59:29.256274-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "11186"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Support unit testing models that depend on sources with the same name'
|
|
||||||
time: 2025-11-27T17:01:24.193516-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: 11975 10433
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Fix bug in partial parsing when updating a model with a schema file that is referenced by a singular test
|
|
||||||
time: 2025-11-28T10:21:29.911147Z
|
|
||||||
custom:
|
|
||||||
Author: mattogburke
|
|
||||||
Issue: "12223"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Avoid retrying successful run-operation commands'
|
|
||||||
time: 2025-11-28T12:28:38.546261-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "11850"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Fix `dbt deps --add-package` crash when packages.yml contains `warn-unpinned:
|
|
||||||
false`'
|
|
||||||
time: 2025-11-28T16:19:37.608722-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "9104"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Improve `dbt deps --add-package` duplicate detection with better
|
|
||||||
cross-source matching and word boundaries'
|
|
||||||
time: 2025-11-28T16:31:44.344099-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "12239"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: ':bug: :snowman: Fix false positive deprecation warning of pre/post-hook SQL configs'
|
|
||||||
time: 2025-12-02T13:37:05.012112-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "12244"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Ensure recent deprecation warnings include event name in message
|
|
||||||
time: 2025-12-09T17:50:31.334618-06:00
|
|
||||||
custom:
|
|
||||||
Author: QMalcolm
|
|
||||||
Issue: "12264"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Fixes
|
|
||||||
body: Improve error message clarity when detecting nodes with space in name
|
|
||||||
time: 2025-12-10T14:39:35.107841-08:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "11835"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Update jsonschemas for schema.yml and dbt_project.yml deprecations
|
|
||||||
time: 2025-11-19T11:01:10.616676-05:00
|
|
||||||
custom:
|
|
||||||
Author: michelleark
|
|
||||||
Issue: "12180"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Replace setuptools and tox with hatch for build, test, and environment management.
|
|
||||||
time: 2025-11-21T14:05:15.838252-05:00
|
|
||||||
custom:
|
|
||||||
Author: emmyoop
|
|
||||||
Issue: "12151"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Under the Hood
|
|
||||||
body: Add add_catalog_integration call even if we have a pre-existing manifest
|
|
||||||
time: 2025-12-09T13:18:57.043254-08:00
|
|
||||||
custom:
|
|
||||||
Author: colin-rogers-dbt
|
|
||||||
Issue: "12262"
|
|
||||||
106
.changie.yaml
Normal file → Executable file
106
.changie.yaml
Normal file → Executable file
@@ -4,87 +4,47 @@ headerPath: header.tpl.md
|
|||||||
versionHeaderPath: ""
|
versionHeaderPath: ""
|
||||||
changelogPath: CHANGELOG.md
|
changelogPath: CHANGELOG.md
|
||||||
versionExt: md
|
versionExt: md
|
||||||
envPrefix: "CHANGIE_"
|
|
||||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||||
kindFormat: '### {{.Kind}}'
|
kindFormat: '### {{.Kind}}'
|
||||||
changeFormat: |-
|
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||||
{{- $IssueList := list }}
|
|
||||||
{{- $changes := splitList " " $.Custom.Issue }}
|
|
||||||
{{- range $issueNbr := $changes }}
|
|
||||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
|
||||||
{{- $IssueList = append $IssueList $changeLink }}
|
|
||||||
{{- end -}}
|
|
||||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
|
||||||
|
|
||||||
kinds:
|
kinds:
|
||||||
- label: Breaking Changes
|
- label: Fixes
|
||||||
- label: Features
|
- label: Features
|
||||||
- label: Fixes
|
- label: Under the Hood
|
||||||
- label: Docs
|
- label: Breaking Changes
|
||||||
changeFormat: |-
|
- label: Docs
|
||||||
{{- $IssueList := list }}
|
- label: Dependencies
|
||||||
{{- $changes := splitList " " $.Custom.Issue }}
|
|
||||||
{{- range $issueNbr := $changes }}
|
|
||||||
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
|
|
||||||
{{- $IssueList = append $IssueList $changeLink }}
|
|
||||||
{{- end -}}
|
|
||||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
|
||||||
- label: Under the Hood
|
|
||||||
- label: Dependencies
|
|
||||||
- label: Security
|
|
||||||
|
|
||||||
newlines:
|
|
||||||
afterChangelogHeader: 1
|
|
||||||
afterKind: 1
|
|
||||||
afterChangelogVersion: 1
|
|
||||||
beforeKind: 1
|
|
||||||
endOfVersion: 1
|
|
||||||
|
|
||||||
custom:
|
custom:
|
||||||
- key: Author
|
- key: Author
|
||||||
label: GitHub Username(s) (separated by a single space if multiple)
|
label: GitHub Name
|
||||||
type: string
|
type: string
|
||||||
minLength: 3
|
minLength: 3
|
||||||
- key: Issue
|
- key: Issue
|
||||||
label: GitHub Issue Number (separated by a single space if multiple)
|
label: GitHub Issue Number
|
||||||
type: string
|
type: int
|
||||||
minLength: 1
|
minLength: 4
|
||||||
|
- key: PR
|
||||||
|
label: GitHub Pull Request Number
|
||||||
|
type: int
|
||||||
|
minLength: 4
|
||||||
footerFormat: |
|
footerFormat: |
|
||||||
|
Contributors:
|
||||||
{{- $contributorDict := dict }}
|
{{- $contributorDict := dict }}
|
||||||
{{- /* ensure we always skip snyk and dependabot */}}
|
{{- $core_team := list "emmyoop" "nathaniel-may" "gshank" "leahwicz" "ChenyuLInx" "stu-k" "iknox-fa" "VersusFacit" "McKnight-42" "jtcohen6" }}
|
||||||
{{- $bots := list "dependabot[bot]" "snyk-bot"}}
|
|
||||||
{{- range $change := .Changes }}
|
{{- range $change := .Changes }}
|
||||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
{{- $author := $change.Custom.Author }}
|
||||||
{{- /* loop through all authors for a single changelog */}}
|
{{- if not (has $author $core_team)}}
|
||||||
{{- range $author := $authorList }}
|
{{- $pr := $change.Custom.PR }}
|
||||||
{{- $authorLower := lower $author }}
|
{{- if hasKey $contributorDict $author }}
|
||||||
{{- /* we only want to include non-bot contributors */}}
|
{{- $prList := get $contributorDict $author }}
|
||||||
{{- if not (has $authorLower $bots)}}
|
{{- $prList = append $prList $pr }}
|
||||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||||
{{- $IssueList := list }}
|
{{- else }}
|
||||||
{{- $changeLink := $change.Kind }}
|
{{- $prList := list $change.Custom.PR }}
|
||||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||||
{{- range $issueNbr := $changes }}
|
{{- end }}
|
||||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
|
||||||
{{- $IssueList = append $IssueList $changeLink }}
|
|
||||||
{{- end }}
|
|
||||||
{{- /* check if this contributor has other changes associated with them already */}}
|
|
||||||
{{- if hasKey $contributorDict $author }}
|
|
||||||
{{- $contributionList := get $contributorDict $author }}
|
|
||||||
{{- $contributionList = concat $contributionList $IssueList }}
|
|
||||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
|
||||||
{{- else }}
|
|
||||||
{{- $contributionList := $IssueList }}
|
|
||||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end}}
|
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
|
||||||
{{- if $contributorDict}}
|
|
||||||
### Contributors
|
|
||||||
{{- range $k,$v := $contributorDict }}
|
{{- range $k,$v := $contributorDict }}
|
||||||
- [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}})
|
- [{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}[#{{$element}}](https://github.com/dbt-labs/dbt-core/pull/{{$element}}){{end}})
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|||||||
14
.flake8
14
.flake8
@@ -1,14 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
select =
|
|
||||||
E
|
|
||||||
W
|
|
||||||
F
|
|
||||||
ignore =
|
|
||||||
W503 # makes Flake8 work like black
|
|
||||||
W504
|
|
||||||
E203 # makes Flake8 work like black
|
|
||||||
E704 # makes Flake8 work like black
|
|
||||||
E741
|
|
||||||
E501 # long line checking is done in black
|
|
||||||
per-file-ignores =
|
|
||||||
*/__init__.py: F401
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
# Reformatting dbt-core via black, flake8, mypy, and assorted pre-commit hooks.
|
|
||||||
43e3fc22c4eae4d3d901faba05e33c40f1f1dc5a
|
|
||||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +0,0 @@
|
|||||||
core/dbt/task/docs/index.html binary
|
|
||||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
|
||||||
core/dbt/docs/build/html/searchindex.js binary
|
|
||||||
core/dbt/docs/build/html/index.html binary
|
|
||||||
performance/runner/Cargo.lock binary
|
|
||||||
core/dbt/events/types_pb2.py binary
|
|
||||||
18
.github/CODEOWNERS
vendored
18
.github/CODEOWNERS
vendored
@@ -1,18 +0,0 @@
|
|||||||
# This file contains the code owners for the dbt-core repo.
|
|
||||||
# PRs will be automatically assigned for review to the associated
|
|
||||||
# team(s) or person(s) that touches any files that are mapped to them.
|
|
||||||
#
|
|
||||||
# A statement takes precedence over the statements above it so more general
|
|
||||||
# assignments are found at the top with specific assignments being lower in
|
|
||||||
# the ordering (i.e. catch all assignment should be the first item)
|
|
||||||
#
|
|
||||||
# Consult GitHub documentation for formatting guidelines:
|
|
||||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#example-of-a-codeowners-file
|
|
||||||
|
|
||||||
# As a default for areas with no assignment,
|
|
||||||
# the core team as a whole will be assigned
|
|
||||||
* @dbt-labs/core-team
|
|
||||||
|
|
||||||
### ARTIFACTS
|
|
||||||
|
|
||||||
/schemas/dbt @dbt-labs/cloud-artifacts
|
|
||||||
32
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
32
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -9,33 +9,23 @@ body:
|
|||||||
Thanks for taking the time to fill out this bug report!
|
Thanks for taking the time to fill out this bug report!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: Is this a new bug in dbt-core?
|
label: Is there an existing issue for this?
|
||||||
description: >
|
description: Please search to see if an issue already exists for the bug you encountered.
|
||||||
In other words, is this an error, flaw, failure or fault in our software?
|
|
||||||
|
|
||||||
If this is a bug that broke existing functionality that used to work, please open a regression issue.
|
|
||||||
If this is a bug in an adapter plugin, please open an issue in the adapter's repository.
|
|
||||||
If this is a bug experienced while using dbt Cloud, please report to [support](mailto:support@getdbt.com).
|
|
||||||
If this is a request for help or troubleshooting code in your own dbt project, please join our [dbt Community Slack](https://www.getdbt.com/community/join-the-community/) or open a [Discussion question](https://github.com/dbt-labs/docs.getdbt.com/discussions).
|
|
||||||
|
|
||||||
Please search to see if an issue already exists for the bug you encountered.
|
|
||||||
options:
|
options:
|
||||||
- label: I believe this is a new bug in dbt-core
|
- label: I have searched the existing issues
|
||||||
required: true
|
|
||||||
- label: I have searched the existing issues, and I could not find an existing issue for this bug
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Current Behavior
|
label: Current Behavior
|
||||||
description: A concise description of what you're experiencing.
|
description: A concise description of what you're experiencing.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Expected Behavior
|
label: Expected Behavior
|
||||||
description: A concise description of what you expected to happen.
|
description: A concise description of what you expected to happen.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Steps To Reproduce
|
label: Steps To Reproduce
|
||||||
@@ -46,7 +36,7 @@ body:
|
|||||||
3. Run '...'
|
3. Run '...'
|
||||||
4. See error...
|
4. See error...
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: logs
|
id: logs
|
||||||
attributes:
|
attributes:
|
||||||
@@ -61,9 +51,9 @@ body:
|
|||||||
label: Environment
|
label: Environment
|
||||||
description: |
|
description: |
|
||||||
examples:
|
examples:
|
||||||
- **OS**: Ubuntu 24.04
|
- **OS**: Ubuntu 20.04
|
||||||
- **Python**: 3.10.12 (`python3 --version`)
|
- **Python**: 3.7.2 (`python --version`)
|
||||||
- **dbt-core**: 1.1.1 (`dbt --version`)
|
- **dbt**: 0.21.0 (`dbt --version`)
|
||||||
value: |
|
value: |
|
||||||
- OS:
|
- OS:
|
||||||
- Python:
|
- Python:
|
||||||
@@ -74,15 +64,13 @@ body:
|
|||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: database
|
id: database
|
||||||
attributes:
|
attributes:
|
||||||
label: Which database adapter are you using with dbt?
|
label: What database are you using dbt with?
|
||||||
description: If the bug is specific to the database or adapter, please open the issue in that adapter's repository instead
|
|
||||||
multiple: true
|
multiple: true
|
||||||
options:
|
options:
|
||||||
- postgres
|
- postgres
|
||||||
- redshift
|
- redshift
|
||||||
- snowflake
|
- snowflake
|
||||||
- bigquery
|
- bigquery
|
||||||
- spark
|
|
||||||
- other (mention it in "Additional Context")
|
- other (mention it in "Additional Context")
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
|
|||||||
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
@@ -1,18 +0,0 @@
|
|||||||
name: 📄 Code docs
|
|
||||||
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
|
|
||||||
title: "[Code docs] <title>"
|
|
||||||
labels: ["triage"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
Thanks for taking the time to fill out this code docs issue!
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Please describe the issue and your proposals.
|
|
||||||
description: |
|
|
||||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
|
||||||
|
|
||||||
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
31
.github/ISSUE_TEMPLATE/config.yml
vendored
31
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,17 +1,16 @@
|
|||||||
blank_issues_enabled: false
|
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Documentation
|
- name: Create an issue for dbt-redshift
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
|
url: https://github.com/dbt-labs/dbt-redshift/issues/new/choose
|
||||||
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
|
about: Report a bug or request a feature for dbt-redshift
|
||||||
- name: Ask the community for help
|
- name: Create an issue for dbt-bigquery
|
||||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
url: https://github.com/dbt-labs/dbt-bigquery/issues/new/choose
|
||||||
about: Need help troubleshooting? Check out our guide on how to ask
|
about: Report a bug or request a feature for dbt-bigquery
|
||||||
- name: Contact dbt Cloud support
|
- name: Create an issue for dbt-snowflake
|
||||||
url: mailto:support@getdbt.com
|
url: https://github.com/dbt-labs/dbt-snowflake/issues/new/choose
|
||||||
about: Are you using dbt Cloud? Contact our support team for help!
|
about: Report a bug or request a feature for dbt-snowflake
|
||||||
- name: Participate in Discussions
|
- name: Ask a question or get support
|
||||||
url: https://github.com/dbt-labs/dbt-core/discussions
|
url: https://docs.getdbt.com/docs/guides/getting-help
|
||||||
about: Do you have a Big Idea for dbt? Read open discussions, or start a new one
|
about: Ask a question or request support
|
||||||
- name: Create an issue for adapters
|
- name: Questions on Stack Overflow
|
||||||
url: https://github.com/dbt-labs/dbt-adapters/issues/new/choose
|
url: https://stackoverflow.com/questions/tagged/dbt
|
||||||
about: Report a bug or request a feature for an adapter
|
about: Look at questions/answers at Stack Overflow
|
||||||
|
|||||||
22
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
22
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,32 +1,22 @@
|
|||||||
name: ✨ Feature
|
name: ✨ Feature
|
||||||
description: Propose a straightforward extension of dbt functionality
|
description: Suggest an idea for dbt
|
||||||
title: "[Feature] <title>"
|
title: "[Feature] <title>"
|
||||||
labels: ["enhancement", "triage"]
|
labels: ["enhancement", "triage"]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Thanks for taking the time to fill out this feature request!
|
Thanks for taking the time to fill out this feature requests!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: Is this your first time submitting a feature request?
|
label: Is there an existing feature request for this?
|
||||||
description: >
|
description: Please search to see if an issue already exists for the feature you would like.
|
||||||
We want to make sure that features are distinct and discoverable,
|
|
||||||
so that other members of the community can find them and offer their thoughts.
|
|
||||||
|
|
||||||
Issues are the right place to request straightforward extensions of existing dbt functionality.
|
|
||||||
For "big ideas" about future capabilities of dbt, we ask that you open a
|
|
||||||
[discussion](https://github.com/dbt-labs/dbt-core/discussions) in the "Ideas" category instead.
|
|
||||||
options:
|
options:
|
||||||
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
|
- label: I have searched the existing issues
|
||||||
required: true
|
|
||||||
- label: I have searched the existing issues, and I could not find an existing issue for this feature
|
|
||||||
required: true
|
|
||||||
- label: I am requesting a straightforward extension of existing dbt functionality, rather than a Big Idea better suited to a discussion
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Describe the feature
|
label: Describe the Feature
|
||||||
description: A clear and concise description of what you want to happen.
|
description: A clear and concise description of what you want to happen.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
67
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
67
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
@@ -1,67 +0,0 @@
|
|||||||
name: 🛠️ Implementation
|
|
||||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
|
||||||
title: "[<project>] <title>"
|
|
||||||
labels: ["user docs"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Housekeeping
|
|
||||||
description: >
|
|
||||||
A couple friendly reminders:
|
|
||||||
1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
|
||||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
|
||||||
options:
|
|
||||||
- label: I am a maintainer of dbt-core
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Short description
|
|
||||||
description: |
|
|
||||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Acceptance criteria
|
|
||||||
description: |
|
|
||||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Suggested Tests
|
|
||||||
description: |
|
|
||||||
Provide scenarios to test. Link to existing similar tests if appropriate.
|
|
||||||
placeholder: |
|
|
||||||
1. Test with no version specified in the schema file and use selection logic on a versioned model for a specific version. Expect pass.
|
|
||||||
2. Test with a version specified in the schema file that is no valid. Expect ParsingError.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Impact to Other Teams
|
|
||||||
description: |
|
|
||||||
Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_.
|
|
||||||
placeholder: |
|
|
||||||
Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Will backports be required?
|
|
||||||
description: |
|
|
||||||
Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_
|
|
||||||
placeholder: |
|
|
||||||
Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Context
|
|
||||||
description: |
|
|
||||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes and documentation as appropriate
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
93
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
93
.github/ISSUE_TEMPLATE/regression-report.yml
vendored
@@ -1,93 +0,0 @@
|
|||||||
name: ☣️ Regression
|
|
||||||
description: Report a regression you've observed in a newer version of dbt
|
|
||||||
title: "[Regression] <title>"
|
|
||||||
labels: ["bug", "regression", "triage"]
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
Thanks for taking the time to fill out this regression report!
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Is this a regression in a recent version of dbt-core?
|
|
||||||
description: >
|
|
||||||
A regression is when documented functionality works as expected in an older version of dbt-core,
|
|
||||||
and no longer works after upgrading to a newer version of dbt-core
|
|
||||||
options:
|
|
||||||
- label: I believe this is a regression in dbt-core functionality
|
|
||||||
required: true
|
|
||||||
- label: I have searched the existing issues, and I could not find an existing issue for this regression
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Current Behavior
|
|
||||||
description: A concise description of what you're experiencing.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Expected/Previous Behavior
|
|
||||||
description: A concise description of what you expected to happen.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Steps To Reproduce
|
|
||||||
description: Steps to reproduce the behavior.
|
|
||||||
placeholder: |
|
|
||||||
1. In this environment...
|
|
||||||
2. With this config...
|
|
||||||
3. Run '...'
|
|
||||||
4. See error...
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: logs
|
|
||||||
attributes:
|
|
||||||
label: Relevant log output
|
|
||||||
description: |
|
|
||||||
If applicable, log output to help explain your problem.
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Environment
|
|
||||||
description: |
|
|
||||||
examples:
|
|
||||||
- **OS**: Ubuntu 24.04
|
|
||||||
- **Python**: 3.10.12 (`python3 --version`)
|
|
||||||
- **dbt-core (working version)**: 1.1.1 (`dbt --version`)
|
|
||||||
- **dbt-core (regression version)**: 1.2.0 (`dbt --version`)
|
|
||||||
value: |
|
|
||||||
- OS:
|
|
||||||
- Python:
|
|
||||||
- dbt (working version):
|
|
||||||
- dbt (regression version):
|
|
||||||
render: markdown
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: database
|
|
||||||
attributes:
|
|
||||||
label: Which database adapter are you using with dbt?
|
|
||||||
description: If the regression is specific to the database or adapter, please open the issue in that adapter's repository instead
|
|
||||||
multiple: true
|
|
||||||
options:
|
|
||||||
- postgres
|
|
||||||
- redshift
|
|
||||||
- snowflake
|
|
||||||
- bigquery
|
|
||||||
- spark
|
|
||||||
- other (mention it in "Additional Context")
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Additional Context
|
|
||||||
description: |
|
|
||||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
|
||||||
|
|
||||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
223
.github/_README.md
vendored
223
.github/_README.md
vendored
@@ -1,223 +0,0 @@
|
|||||||
<!-- GitHub will publish this readme on the main repo page if the name is `README.md` so we've added the leading underscore to prevent this -->
|
|
||||||
<!-- Do not rename this file `README.md` -->
|
|
||||||
<!-- See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-readmes -->
|
|
||||||
|
|
||||||
## What are GitHub Actions?
|
|
||||||
|
|
||||||
GitHub Actions are used for many different purposes. We use them to run tests in CI, validate PRs are in an expected state, and automate processes.
|
|
||||||
|
|
||||||
- [Overview of GitHub Actions](https://docs.github.com/en/actions/learn-github-actions/understanding-github-actions)
|
|
||||||
- [What's a workflow?](https://docs.github.com/en/actions/using-workflows/about-workflows)
|
|
||||||
- [GitHub Actions guides](https://docs.github.com/en/actions/guides)
|
|
||||||
|
|
||||||
___
|
|
||||||
|
|
||||||
## Where do actions and workflows live
|
|
||||||
|
|
||||||
We try to maintain actions that are shared across repositories in a single place so that necesary changes can be made in a single place.
|
|
||||||
|
|
||||||
[dbt-labs/actions](https://github.com/dbt-labs/actions/) is the central repository of actions and workflows we use across repositories.
|
|
||||||
|
|
||||||
GitHub Actions also live locally within a repository. The workflows can be found at `.github/workflows` from the root of the repository. These should be specific to that code base.
|
|
||||||
|
|
||||||
Note: We are actively moving actions into the central Action repository so there is currently some duplication across repositories.
|
|
||||||
|
|
||||||
___
|
|
||||||
|
|
||||||
## Basics of Using Actions
|
|
||||||
|
|
||||||
### Viewing Output
|
|
||||||
|
|
||||||
- View the detailed action output for your PR in the **Checks** tab of the PR. This only shows the most recent run. You can also view high level **Checks** output at the bottom on the PR.
|
|
||||||
|
|
||||||
- View _all_ action output for a repository from the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab. Workflow results last 1 year. Artifacts last 90 days, unless specified otherwise in individual workflows.
|
|
||||||
|
|
||||||
This view often shows what seem like duplicates of the same workflow. This occurs when files are renamed but the workflow name has not changed. These are in fact _not_ duplicates.
|
|
||||||
|
|
||||||
You can see the branch the workflow runs from in this view. It is listed in the table between the workflow name and the time/duration of the run. When blank, the workflow is running in the context of the `main` branch.
|
|
||||||
|
|
||||||
### How to view what workflow file is being referenced from a run
|
|
||||||
|
|
||||||
- When viewing the output of a specific workflow run, click the 3 dots at the top right of the display. There will be an option to `View workflow file`.
|
|
||||||
|
|
||||||
### How to manually run a workflow
|
|
||||||
|
|
||||||
- If a workflow has the `on: workflow_dispatch` trigger, it can be manually triggered
|
|
||||||
- From the [**Actions**](https://github.com/dbt-labs/dbt-core/actions) tab, find the workflow you want to run, select it and fill in any inputs requied. That's it!
|
|
||||||
|
|
||||||
### How to re-run jobs
|
|
||||||
|
|
||||||
- From the UI you can rerun from failure
|
|
||||||
- You can retrigger the cla check by commenting on the PR with `@cla-bot check`
|
|
||||||
|
|
||||||
___
|
|
||||||
|
|
||||||
## General Standards
|
|
||||||
|
|
||||||
### Permissions
|
|
||||||
- By default, workflows have read permissions in the repository for the contents scope only when no permissions are explicitly set.
|
|
||||||
- It is best practice to always define the permissions explicitly. This will allow actions to continue to work when the default permissions on the repository are changed. It also allows explicit grants of the least permissions possible.
|
|
||||||
- There are a lot of permissions available. [Read up on them](https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs) if you're unsure what to use.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
```
|
|
||||||
|
|
||||||
### Secrets
|
|
||||||
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
|
||||||
|
|
||||||
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
|
||||||
|
|
||||||
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
|
||||||
|
|
||||||
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
|
||||||
|
|
||||||
### Triggers
|
|
||||||
You can configure your workflows to run when specific activity on GitHub happens, at a scheduled time, or when an event outside of GitHub occurs. Read more details in the [GitHub docs](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows).
|
|
||||||
|
|
||||||
These triggers are under the `on` key of the workflow and more than one can be listed.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "main"
|
|
||||||
- "*.latest"
|
|
||||||
- "releases/*"
|
|
||||||
pull_request:
|
|
||||||
# catch when the PR is opened with the label or when the label is added
|
|
||||||
types: [opened, labeled]
|
|
||||||
workflow_dispatch:
|
|
||||||
```
|
|
||||||
|
|
||||||
Some triggers of note that we use:
|
|
||||||
|
|
||||||
- `push` - Runs your workflow when you push a commit or tag.
|
|
||||||
- `pull_request` - Runs your workflow when activity on a pull request in the workflow's repository occurs. Takes in a list of activity types (opened, labeled, etc) if appropriate.
|
|
||||||
- `pull_request_target` - Same as `pull_request` but runs in the context of the PR target branch.
|
|
||||||
- `workflow_call` - used with reusable workflows. Triggered by another workflow calling it.
|
|
||||||
- `workflow_dispatch` - Gives the ability to manually trigger a workflow from the GitHub API, GitHub CLI, or GitHub browser interface.
|
|
||||||
|
|
||||||
|
|
||||||
### Basic Formatting
|
|
||||||
- Add a description of what your workflow does at the top in this format
|
|
||||||
|
|
||||||
```
|
|
||||||
# **what?**
|
|
||||||
# Describe what the action does.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Why does this action exist?
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# How/when will it be triggered?
|
|
||||||
```
|
|
||||||
|
|
||||||
- Leave blank lines between steps and jobs
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
jobs:
|
|
||||||
dependency_changelog:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Get File Name Timestamp
|
|
||||||
id: filename_time
|
|
||||||
uses: nanzm/get-time-action@v1.1
|
|
||||||
with:
|
|
||||||
format: 'YYYYMMDD-HHmmss'
|
|
||||||
|
|
||||||
- name: Get File Content Timestamp
|
|
||||||
id: file_content_time
|
|
||||||
uses: nanzm/get-time-action@v1.1
|
|
||||||
with:
|
|
||||||
format: 'YYYY-MM-DDTHH:mm:ss.000000-05:00'
|
|
||||||
|
|
||||||
- name: Generate Filepath
|
|
||||||
id: fp
|
|
||||||
run: |
|
|
||||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
|
||||||
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
|
||||||
```
|
|
||||||
|
|
||||||
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
|
||||||
|
|
||||||
When possible, generate variables at the top of your workflow in a single place to reference later. This is not always strictly possible since you may generate a value to be used later mid-workflow.
|
|
||||||
|
|
||||||
Be sure to use quotes around these logs so special characters are not interpreted.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
job1:
|
|
||||||
- name: "[DEBUG] Print Variables"
|
|
||||||
run: |
|
|
||||||
echo "all variables defined as inputs"
|
|
||||||
echo "The last commit sha in the release: ${{ inputs.sha }}"
|
|
||||||
echo "The release version number: ${{ inputs.version_number }}"
|
|
||||||
echo "The changelog_path: ${{ inputs.changelog_path }}"
|
|
||||||
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
|
||||||
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
|
||||||
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
|
||||||
|
|
||||||
# collect all the variables that need to be used in subsequent jobs
|
|
||||||
- name: Set Variables
|
|
||||||
id: variables
|
|
||||||
run: |
|
|
||||||
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
|
||||||
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
|
||||||
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
job2:
|
|
||||||
needs: [job1]
|
|
||||||
- name: "[DEBUG] Print Variables"
|
|
||||||
run: |
|
|
||||||
echo "all variables defined in job1 > Set Variables > outputs"
|
|
||||||
echo "important_path: ${{ needs.job1.outputs.important_path }}"
|
|
||||||
echo "release_id: ${{ needs.job1.outputs.release_id }}"
|
|
||||||
echo "open_prs: ${{ needs.job1.outputs.open_prs }}"
|
|
||||||
```
|
|
||||||
|
|
||||||
- When it's not obvious what something does, add a comment!
|
|
||||||
|
|
||||||
___
|
|
||||||
|
|
||||||
## Tips
|
|
||||||
|
|
||||||
### Context
|
|
||||||
- The [GitHub CLI](https://cli.github.com/) is available in the default runners
|
|
||||||
- Actions run in your context. ie, using an action from the marketplace that uses the GITHUB_TOKEN uses the GITHUB_TOKEN generated by your workflow run.
|
|
||||||
|
|
||||||
### Runners
|
|
||||||
- We dynamically set runners based on repository vars. Admins can view repository vars and reset them. Current values are the following but are subject to change:
|
|
||||||
- `vars.UBUNTU_LATEST` -> `ubuntu-latest`
|
|
||||||
- `vars.WINDOWS_LATEST` -> `windows-latest`
|
|
||||||
- `vars.MACOS_LATEST` -> `macos-14`
|
|
||||||
|
|
||||||
### Actions from the Marketplace
|
|
||||||
- Don’t use external actions for things that can easily be accomplished manually.
|
|
||||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
|
||||||
- Pin actions _we don't control_ to tags.
|
|
||||||
|
|
||||||
### Connecting to AWS
|
|
||||||
- Authenticate with the aws managed workflow
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Configure AWS credentials from Test account
|
|
||||||
uses: aws-actions/configure-aws-credentials@v2
|
|
||||||
with:
|
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
aws-region: us-east-1
|
|
||||||
```
|
|
||||||
|
|
||||||
- Then access with the aws command that comes installed on the action runner machines
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Copy Artifacts from S3 via CLI
|
|
||||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
|
||||||
```
|
|
||||||
|
|
||||||
### Testing
|
|
||||||
|
|
||||||
- Depending on what your action does, you may be able to use [`act`](https://github.com/nektos/act) to test the action locally. Some features of GitHub Actions do not work with `act`, among those are reusable workflows. If you can't use `act`, you'll have to push your changes up before being able to test. This can be slow.
|
|
||||||
14
.github/actions/latest-wrangler/Dockerfile
vendored
14
.github/actions/latest-wrangler/Dockerfile
vendored
@@ -1,14 +0,0 @@
|
|||||||
FROM python:3-slim AS builder
|
|
||||||
ADD . /app
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# We are installing a dependency here directly into our app source dir
|
|
||||||
RUN pip install --target=/app requests packaging
|
|
||||||
|
|
||||||
# A distroless container image with Python and some basics like SSL certificates
|
|
||||||
# https://github.com/GoogleContainerTools/distroless
|
|
||||||
FROM gcr.io/distroless/python3-debian10
|
|
||||||
COPY --from=builder /app /app
|
|
||||||
WORKDIR /app
|
|
||||||
ENV PYTHONPATH /app
|
|
||||||
CMD ["/app/main.py"]
|
|
||||||
50
.github/actions/latest-wrangler/README.md
vendored
50
.github/actions/latest-wrangler/README.md
vendored
@@ -1,50 +0,0 @@
|
|||||||
# Github package 'latest' tag wrangler for containers
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
Plug in the necessary inputs to determine if the container being built should be tagged 'latest; at the package level, for example `dbt-redshift:latest`.
|
|
||||||
|
|
||||||
## Inputs
|
|
||||||
| Input | Description |
|
|
||||||
| - | - |
|
|
||||||
| `package` | Name of the GH package to check against |
|
|
||||||
| `new_version` | Semver of new container |
|
|
||||||
| `gh_token` | GH token with package read scope|
|
|
||||||
| `halt_on_missing` | Return non-zero exit code if requested package does not exist. (defaults to false)|
|
|
||||||
|
|
||||||
|
|
||||||
## Outputs
|
|
||||||
| Output | Description |
|
|
||||||
| - | - |
|
|
||||||
| `latest` | Wether or not the new container should be tagged 'latest'|
|
|
||||||
| `minor_latest` | Wether or not the new container should be tagged major.minor.latest |
|
|
||||||
|
|
||||||
## Example workflow
|
|
||||||
```yaml
|
|
||||||
name: Ship it!
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
package:
|
|
||||||
description: The package to publish
|
|
||||||
required: true
|
|
||||||
version_number:
|
|
||||||
description: The version number
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Wrangle latest tag
|
|
||||||
id: is_latest
|
|
||||||
uses: ./.github/actions/latest-wrangler
|
|
||||||
with:
|
|
||||||
package: ${{ github.event.inputs.package }}
|
|
||||||
new_version: ${{ github.event.inputs.new_version }}
|
|
||||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Print the results
|
|
||||||
run: |
|
|
||||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
|
||||||
echo "Is it minor.latest? Survey says: ${{ steps.is_latest.outputs.minor_latest }} !"
|
|
||||||
```
|
|
||||||
21
.github/actions/latest-wrangler/action.yml
vendored
21
.github/actions/latest-wrangler/action.yml
vendored
@@ -1,21 +0,0 @@
|
|||||||
name: "GitHub package `latest` tag wrangler for containers"
|
|
||||||
description: "Determines if the published image should include `latest` tags"
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
package_name:
|
|
||||||
description: "Package being published (i.e. `dbt-core`, `dbt-redshift`, etc.)"
|
|
||||||
required: true
|
|
||||||
new_version:
|
|
||||||
description: "SemVer of the package being published (i.e. 1.7.2, 1.8.0a1, etc.)"
|
|
||||||
required: true
|
|
||||||
github_token:
|
|
||||||
description: "Auth token for GitHub (must have view packages scope)"
|
|
||||||
required: true
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
tags:
|
|
||||||
description: "A list of tags to associate with this version"
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "docker"
|
|
||||||
image: "Dockerfile"
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
name: Ship it!
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
package:
|
|
||||||
description: The package to publish
|
|
||||||
required: true
|
|
||||||
version_number:
|
|
||||||
description: The version number
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Wrangle latest tag
|
|
||||||
id: is_latest
|
|
||||||
uses: ./.github/actions/latest-wrangler
|
|
||||||
with:
|
|
||||||
package: ${{ github.event.inputs.package }}
|
|
||||||
new_version: ${{ github.event.inputs.new_version }}
|
|
||||||
gh_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Print the results
|
|
||||||
run: |
|
|
||||||
echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"inputs": {
|
|
||||||
"version_number": "1.0.1",
|
|
||||||
"package": "dbt-redshift"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
72
.github/actions/latest-wrangler/main.py
vendored
72
.github/actions/latest-wrangler/main.py
vendored
@@ -1,72 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from packaging.version import Version, parse
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
package_name: str = os.environ["INPUT_PACKAGE_NAME"]
|
|
||||||
new_version: Version = parse(os.environ["INPUT_NEW_VERSION"])
|
|
||||||
github_token: str = os.environ["INPUT_GITHUB_TOKEN"]
|
|
||||||
|
|
||||||
response = _package_metadata(package_name, github_token)
|
|
||||||
published_versions = _published_versions(response)
|
|
||||||
new_version_tags = _new_version_tags(new_version, published_versions)
|
|
||||||
_register_tags(new_version_tags, package_name)
|
|
||||||
|
|
||||||
|
|
||||||
def _package_metadata(package_name: str, github_token: str) -> requests.Response:
|
|
||||||
url = f"https://api.github.com/orgs/dbt-labs/packages/container/{package_name}/versions"
|
|
||||||
return requests.get(url, auth=("", github_token))
|
|
||||||
|
|
||||||
|
|
||||||
def _published_versions(response: requests.Response) -> List[Version]:
|
|
||||||
package_metadata = response.json()
|
|
||||||
return [
|
|
||||||
parse(tag)
|
|
||||||
for version in package_metadata
|
|
||||||
for tag in version["metadata"]["container"]["tags"]
|
|
||||||
if "latest" not in tag
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _new_version_tags(new_version: Version, published_versions: List[Version]) -> List[str]:
|
|
||||||
# the package version is always a tag
|
|
||||||
tags = [str(new_version)]
|
|
||||||
|
|
||||||
# pre-releases don't get tagged with `latest`
|
|
||||||
if new_version.is_prerelease:
|
|
||||||
return tags
|
|
||||||
|
|
||||||
if new_version > max(published_versions):
|
|
||||||
tags.append("latest")
|
|
||||||
|
|
||||||
published_patches = [
|
|
||||||
version
|
|
||||||
for version in published_versions
|
|
||||||
if version.major == new_version.major and version.minor == new_version.minor
|
|
||||||
]
|
|
||||||
if new_version > max(published_patches):
|
|
||||||
tags.append(f"{new_version.major}.{new_version.minor}.latest")
|
|
||||||
|
|
||||||
return tags
|
|
||||||
|
|
||||||
|
|
||||||
def _register_tags(tags: List[str], package_name: str) -> None:
|
|
||||||
fully_qualified_tags = ",".join([f"ghcr.io/dbt-labs/{package_name}:{tag}" for tag in tags])
|
|
||||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
|
||||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
|
||||||
gh_output.write(f"fully_qualified_tags={fully_qualified_tags}")
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_response(response: requests.Response) -> None:
|
|
||||||
message = response["message"]
|
|
||||||
if response.status_code != 200:
|
|
||||||
print(f"Call to GitHub API failed: {response.status_code} - {message}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
10
.github/actions/setup-postgres-linux/action.yml
vendored
Normal file
10
.github/actions/setup-postgres-linux/action.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
name: "Set up postgres (linux)"
|
||||||
|
description: "Set up postgres service on linux vm for dbt integration tests"
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- shell: bash
|
||||||
|
run: |
|
||||||
|
sudo systemctl start postgresql.service
|
||||||
|
pg_isready
|
||||||
|
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
||||||
1
.github/actions/setup-postgres-linux/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-linux/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../../../test/setup_db.sh
|
||||||
24
.github/actions/setup-postgres-macos/action.yml
vendored
Normal file
24
.github/actions/setup-postgres-macos/action.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
name: "Set up postgres (macos)"
|
||||||
|
description: "Set up postgres service on macos vm for dbt integration tests"
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- shell: bash
|
||||||
|
run: |
|
||||||
|
brew services start postgresql
|
||||||
|
echo "Check PostgreSQL service is running"
|
||||||
|
i=10
|
||||||
|
COMMAND='pg_isready'
|
||||||
|
while [ $i -gt -1 ]; do
|
||||||
|
if [ $i == 0 ]; then
|
||||||
|
echo "PostgreSQL service not ready, all attempts exhausted"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Check PostgreSQL service status"
|
||||||
|
eval $COMMAND && break
|
||||||
|
echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i"
|
||||||
|
sleep 10
|
||||||
|
((i--))
|
||||||
|
done
|
||||||
|
createuser -s postgres
|
||||||
|
bash ${{ github.action_path }}/setup_db.sh
|
||||||
1
.github/actions/setup-postgres-macos/setup_db.sh
vendored
Symbolic link
1
.github/actions/setup-postgres-macos/setup_db.sh
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../../../test/setup_db.sh
|
||||||
@@ -5,22 +5,8 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- shell: pwsh
|
- shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
Write-Host -Object "Installing PostgreSQL 16 as windows service..."
|
$pgService = Get-Service -Name postgresql*
|
||||||
$installerArgs = @("--install_runtimes 0", "--superpassword root", "--enable_acledit 1", "--unattendedmodeui none", "--mode unattended")
|
|
||||||
$filePath = Invoke-DownloadWithRetry -Url "https://get.enterprisedb.com/postgresql/postgresql-16.1-1-windows-x64.exe" -Path "$env:PGROOT/postgresql-16.1-1-windows-x64.exe"
|
|
||||||
Start-Process -FilePath $filePath -ArgumentList $installerArgs -Wait -PassThru
|
|
||||||
|
|
||||||
Write-Host -Object "Validating PostgreSQL 16 Install..."
|
|
||||||
Get-Service -Name postgresql*
|
|
||||||
$pgReady = Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
|
||||||
$exitCode = $pgReady.ExitCode
|
|
||||||
if ($exitCode -ne 0) {
|
|
||||||
Write-Host -Object "PostgreSQL is not ready. Exitcode: $exitCode"
|
|
||||||
exit $exitCode
|
|
||||||
}
|
|
||||||
|
|
||||||
Write-Host -Object "Starting PostgreSQL 16 Service..."
|
|
||||||
$pgService = Get-Service -Name postgresql-x64-16
|
|
||||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||||
|
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||||
$env:Path += ";$env:PGBIN"
|
$env:Path += ";$env:PGBIN"
|
||||||
bash ${{ github.action_path }}/setup_db.sh
|
bash ${{ github.action_path }}/setup_db.sh
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
../../../scripts/setup_db.sh
|
../../../test/setup_db.sh
|
||||||
169
.github/dbt-postgres-testing.yml
vendored
169
.github/dbt-postgres-testing.yml
vendored
@@ -1,169 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Runs all tests in dbt-postgres with this branch of dbt-core to ensure nothing is broken
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Ensure dbt-core changes do not break dbt-postgres, as a basic proxy for other adapters
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run when trying to merge a PR into main.
|
|
||||||
# It can also be manually triggered.
|
|
||||||
|
|
||||||
# This workflow can be skipped by adding the "Skip Postgres Testing" label to the PR. This is
|
|
||||||
# useful when making a change in both `dbt-postgres` and `dbt-core` where the changes are dependant
|
|
||||||
# and cause the other repository to break.
|
|
||||||
|
|
||||||
name: "dbt-postgres Tests"
|
|
||||||
run-name: >-
|
|
||||||
${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call')
|
|
||||||
&& format('dbt-postgres@{0} with dbt-core@{1}', inputs.dbt-postgres-ref, inputs.dbt-core-ref)
|
|
||||||
|| 'dbt-postgres@main with dbt-core branch' }}
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "main"
|
|
||||||
- "*.latest"
|
|
||||||
- "releases/*"
|
|
||||||
pull_request:
|
|
||||||
merge_group:
|
|
||||||
types: [checks_requested]
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
dbt-postgres-ref:
|
|
||||||
description: "The branch of dbt-postgres to test against"
|
|
||||||
default: "main"
|
|
||||||
dbt-core-ref:
|
|
||||||
description: "The branch of dbt-core to test against"
|
|
||||||
default: "main"
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
dbt-postgres-ref:
|
|
||||||
description: "The branch of dbt-postgres to test against"
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: "main"
|
|
||||||
dbt-core-ref:
|
|
||||||
description: "The branch of dbt-core to test against"
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: "main"
|
|
||||||
|
|
||||||
permissions: read-all
|
|
||||||
|
|
||||||
# will cancel previous workflows triggered by the same event
|
|
||||||
# and for the same ref for PRs/merges or same SHA otherwise
|
|
||||||
# and for the same inputs on workflow_dispatch or workflow_call
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(fromJson('["pull_request", "merge_group"]'), github.event_name) && github.event.pull_request.head.ref || github.sha }}-${{ contains(fromJson('["workflow_call", "workflow_dispatch"]'), github.event_name) && github.event.inputs.dbt-postgres-ref && github.event.inputs.dbt-core-ref || github.sha }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
job-prep:
|
|
||||||
# This allow us to run the workflow on pull_requests as well so we can always run unit tests
|
|
||||||
# and only run integration tests on merge for time purposes
|
|
||||||
name: Setup Repo Refs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
dbt-postgres-ref: ${{ steps.core-ref.outputs.ref }}
|
|
||||||
dbt-core-ref: ${{ steps.common-ref.outputs.ref }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Input Refs"
|
|
||||||
id: job-inputs
|
|
||||||
run: |
|
|
||||||
echo "inputs.dbt-postgres-ref=${{ inputs.dbt-postgres-ref }}"
|
|
||||||
echo "inputs.dbt-core-ref=${{ inputs.dbt-core-ref }}"
|
|
||||||
|
|
||||||
- name: "Determine dbt-postgres ref"
|
|
||||||
id: core-ref
|
|
||||||
run: |
|
|
||||||
if [[ -z "${{ inputs.dbt-postgres-ref }}" ]]; then
|
|
||||||
REF="main"
|
|
||||||
else
|
|
||||||
REF=${{ inputs.dbt-postgres-ref }}
|
|
||||||
fi
|
|
||||||
echo "ref=$REF" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Determine dbt-core ref"
|
|
||||||
id: common-ref
|
|
||||||
run: |
|
|
||||||
if [[ -z "${{ inputs.dbt-core-ref }}" ]]; then
|
|
||||||
# these will be commits instead of branches
|
|
||||||
if [[ "${{ github.event_name }}" == "merge_group" ]]; then
|
|
||||||
REF=${{ github.event.merge_group.head_sha }}
|
|
||||||
else
|
|
||||||
REF=${{ github.event.pull_request.base.sha }}
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
REF=${{ inputs.dbt-core-ref }}
|
|
||||||
fi
|
|
||||||
echo "ref=$REF" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Final Refs"
|
|
||||||
run: |
|
|
||||||
echo "dbt-postgres-ref=${{ steps.core-ref.outputs.ref }}"
|
|
||||||
echo "dbt-core-ref=${{ steps.common-ref.outputs.ref }}"
|
|
||||||
|
|
||||||
integration-tests-postgres:
|
|
||||||
name: "dbt-postgres integration tests"
|
|
||||||
needs: [job-prep]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: "./dbt-postgres"
|
|
||||||
environment:
|
|
||||||
name: "dbt-postgres"
|
|
||||||
env:
|
|
||||||
POSTGRES_TEST_HOST: ${{ vars.POSTGRES_TEST_HOST }}
|
|
||||||
POSTGRES_TEST_PORT: ${{ vars.POSTGRES_TEST_PORT }}
|
|
||||||
POSTGRES_TEST_USER: ${{ vars.POSTGRES_TEST_USER }}
|
|
||||||
POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
|
|
||||||
POSTGRES_TEST_DATABASE: ${{ vars.POSTGRES_TEST_DATABASE }}
|
|
||||||
POSTGRES_TEST_THREADS: ${{ vars.POSTGRES_TEST_THREADS }}
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- ${{ vars.POSTGRES_TEST_PORT }}:5432
|
|
||||||
steps:
|
|
||||||
- name: "Check out dbt-adapters@${{ needs.job-prep.outputs.dbt-postgres-ref }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
repository: dbt-labs/dbt-adapters
|
|
||||||
ref: ${{ needs.job-prep.outputs.dbt-postgres-ref }}
|
|
||||||
|
|
||||||
- name: "Set up Python"
|
|
||||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version }}
|
|
||||||
|
|
||||||
- name: "Set environment variables"
|
|
||||||
run: |
|
|
||||||
echo "HATCH_PYTHON=${{ inputs.python-version }}" >> $GITHUB_ENV
|
|
||||||
echo "PIP_ONLY_BINARY=psycopg2-binary" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: "Setup test database"
|
|
||||||
run: psql -f ./scripts/setup_test_database.sql
|
|
||||||
env:
|
|
||||||
PGHOST: ${{ vars.POSTGRES_TEST_HOST }}
|
|
||||||
PGPORT: ${{ vars.POSTGRES_TEST_PORT }}
|
|
||||||
PGUSER: postgres
|
|
||||||
PGPASSWORD: postgres
|
|
||||||
PGDATABASE: postgres
|
|
||||||
|
|
||||||
- name: "Install hatch"
|
|
||||||
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # pypa/hatch@install
|
|
||||||
|
|
||||||
- name: "Run integration tests"
|
|
||||||
run: hatch run ${{ inputs.hatch-env }}:integration-tests
|
|
||||||
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -11,6 +11,11 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
rebase-strategy: "disabled"
|
rebase-strategy: "disabled"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/plugins/postgres"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
rebase-strategy: "disabled"
|
||||||
|
|
||||||
# docker dependencies
|
# docker dependencies
|
||||||
- package-ecosystem: "docker"
|
- package-ecosystem: "docker"
|
||||||
@@ -23,10 +28,3 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
rebase-strategy: "disabled"
|
rebase-strategy: "disabled"
|
||||||
|
|
||||||
# github dependencies
|
|
||||||
- package-ecosystem: "github-actions"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
rebase-strategy: "disabled"
|
|
||||||
|
|||||||
30
.github/pull_request_template.md
vendored
30
.github/pull_request_template.md
vendored
@@ -1,33 +1,21 @@
|
|||||||
Resolves #
|
resolves #
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Include the number of the issue addressed by this PR above, if applicable.
|
Include the number of the issue addressed by this PR above if applicable.
|
||||||
PRs for code changes without an associated issue *will not be merged*.
|
PRs for code changes without an associated issue *will not be merged*.
|
||||||
See CONTRIBUTING.md for more information.
|
See CONTRIBUTING.md for more information.
|
||||||
|
|
||||||
Add the `user docs` label to this PR if it will need docs changes. An
|
|
||||||
issue will get opened in docs.getdbt.com upon successful merge of this PR.
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Problem
|
### Description
|
||||||
|
|
||||||
<!---
|
<!---
|
||||||
Describe the problem this PR is solving. What is the application state
|
Describe the Pull Request here. Add any references and info to help reviewers
|
||||||
before this PR is merged?
|
understand your changes. Include any tradeoffs you considered.
|
||||||
-->
|
|
||||||
|
|
||||||
### Solution
|
|
||||||
|
|
||||||
<!---
|
|
||||||
Describe the way this PR solves the above problem. Add as much detail as you
|
|
||||||
can to help reviewers understand your changes. Include any alternatives and
|
|
||||||
tradeoffs you considered.
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Checklist
|
### Checklist
|
||||||
|
|
||||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||||
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||||
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||||
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
- [ ] I have added information about my change to be included in the [CHANGELOG](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#Adding-CHANGELOG-Entry).
|
||||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
|
||||||
|
|||||||
95
.github/scripts/integration-test-matrix.js
vendored
Normal file
95
.github/scripts/integration-test-matrix.js
vendored
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
module.exports = ({ context }) => {
|
||||||
|
const defaultPythonVersion = "3.8";
|
||||||
|
const supportedPythonVersions = ["3.7", "3.8", "3.9"];
|
||||||
|
const supportedAdapters = ["postgres"];
|
||||||
|
|
||||||
|
// if PR, generate matrix based on files changed and PR labels
|
||||||
|
if (context.eventName.includes("pull_request")) {
|
||||||
|
// `changes` is a list of adapter names that have related
|
||||||
|
// file changes in the PR
|
||||||
|
// ex: ['postgres', 'snowflake']
|
||||||
|
const changes = JSON.parse(process.env.CHANGES);
|
||||||
|
const labels = context.payload.pull_request.labels.map(({ name }) => name);
|
||||||
|
console.log("labels", labels);
|
||||||
|
console.log("changes", changes);
|
||||||
|
const testAllLabel = labels.includes("test all");
|
||||||
|
const include = [];
|
||||||
|
|
||||||
|
for (const adapter of supportedAdapters) {
|
||||||
|
if (
|
||||||
|
changes.includes(adapter) ||
|
||||||
|
testAllLabel ||
|
||||||
|
labels.includes(`test ${adapter}`)
|
||||||
|
) {
|
||||||
|
for (const pythonVersion of supportedPythonVersions) {
|
||||||
|
if (
|
||||||
|
pythonVersion === defaultPythonVersion ||
|
||||||
|
labels.includes(`test python${pythonVersion}`) ||
|
||||||
|
testAllLabel
|
||||||
|
) {
|
||||||
|
// always run tests on ubuntu by default
|
||||||
|
include.push({
|
||||||
|
os: "ubuntu-latest",
|
||||||
|
adapter,
|
||||||
|
"python-version": pythonVersion,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (labels.includes("test windows") || testAllLabel) {
|
||||||
|
include.push({
|
||||||
|
os: "windows-latest",
|
||||||
|
adapter,
|
||||||
|
"python-version": pythonVersion,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (labels.includes("test macos") || testAllLabel) {
|
||||||
|
include.push({
|
||||||
|
os: "macos-latest",
|
||||||
|
adapter,
|
||||||
|
"python-version": pythonVersion,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("matrix", { include });
|
||||||
|
|
||||||
|
return {
|
||||||
|
include,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// if not PR, generate matrix of python version, adapter, and operating
|
||||||
|
// system to run integration tests on
|
||||||
|
|
||||||
|
const include = [];
|
||||||
|
// run for all adapters and python versions on ubuntu
|
||||||
|
for (const adapter of supportedAdapters) {
|
||||||
|
for (const pythonVersion of supportedPythonVersions) {
|
||||||
|
include.push({
|
||||||
|
os: 'ubuntu-latest',
|
||||||
|
adapter: adapter,
|
||||||
|
"python-version": pythonVersion,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// additionally include runs for all adapters, on macos and windows,
|
||||||
|
// but only for the default python version
|
||||||
|
for (const adapter of supportedAdapters) {
|
||||||
|
for (const operatingSystem of ["windows-latest", "macos-latest"]) {
|
||||||
|
include.push({
|
||||||
|
os: operatingSystem,
|
||||||
|
adapter: adapter,
|
||||||
|
"python-version": defaultPythonVersion,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("matrix", { include });
|
||||||
|
|
||||||
|
return {
|
||||||
|
include,
|
||||||
|
};
|
||||||
|
};
|
||||||
186
.github/workflows/artifact-reviews.yml
vendored
186
.github/workflows/artifact-reviews.yml
vendored
@@ -1,186 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Enforces 2 reviews when artifact or validation files are modified.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Ensure artifact changes receive proper review from designated team members. GitHub doesn't support
|
|
||||||
# multiple reviews on a single PR based on files changed, so we need to enforce this manually.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run when reviews are submitted and dismissed.
|
|
||||||
|
|
||||||
name: "Enforce Additional Reviews on Artifact and Validations Changes"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
checks: write
|
|
||||||
pull-requests: write
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
# trigger check on review events. use pull_request_target for forks.
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, reopened, ready_for_review, synchronize, review_requested]
|
|
||||||
pull_request_review:
|
|
||||||
types: [submitted, edited, dismissed]
|
|
||||||
|
|
||||||
# only run this once per PR at a time
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
required_approvals: 2
|
|
||||||
team: "core-group"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-reviews:
|
|
||||||
name: "Validate Additional Reviews"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: "Get list of changed files"
|
|
||||||
id: changed_files
|
|
||||||
run: |
|
|
||||||
# Fetch files as JSON and process with jq to sanitize output
|
|
||||||
gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files \
|
|
||||||
| jq -r '.[].filename' \
|
|
||||||
| while IFS= read -r file; do
|
|
||||||
# Sanitize the filename by removing any special characters and command injection attempts
|
|
||||||
clean_file=$(echo "$file" | sed 's/[^a-zA-Z0-9\.\/\-_]//g')
|
|
||||||
echo "$clean_file"
|
|
||||||
done > changed_files.txt
|
|
||||||
echo "CHANGED_FILES<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
cat changed_files.txt >> $GITHUB_OUTPUT
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: "Check if any artifact files changed"
|
|
||||||
id: artifact_files_changed
|
|
||||||
run: |
|
|
||||||
artifact_changes=false
|
|
||||||
while IFS= read -r file; do
|
|
||||||
# Only process if file path looks legitimate
|
|
||||||
if [[ "$file" =~ ^[a-zA-Z0-9\.\/\-_]+$ ]]; then
|
|
||||||
if [[ "$file" == "core/dbt/artifacts/"* ]] ; then
|
|
||||||
artifact_changes=true
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done < changed_files.txt
|
|
||||||
echo "artifact_changes=$artifact_changes" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Get Core Team Members"
|
|
||||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
|
||||||
id: core_members
|
|
||||||
run: |
|
|
||||||
gh api -H "Accept: application/vnd.github+json" \
|
|
||||||
/orgs/dbt-labs/teams/${{ env.team }}/members > core_members.json
|
|
||||||
|
|
||||||
# Extract usernames and set as multiline output
|
|
||||||
echo "membership<<EOF" >> $GITHUB_OUTPUT
|
|
||||||
jq -r '.[].login' core_members.json >> $GITHUB_OUTPUT
|
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.IT_TEAM_MEMBERSHIP }}
|
|
||||||
|
|
||||||
- name: "Verify ${{ env.required_approvals }} core team approvals"
|
|
||||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
|
||||||
id: check_approvals
|
|
||||||
run: |
|
|
||||||
|
|
||||||
# Get all reviews
|
|
||||||
REVIEWS=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews)
|
|
||||||
echo "All reviews:"
|
|
||||||
echo "$REVIEWS"
|
|
||||||
# Count approved reviews from core team members (only most recent review per user)
|
|
||||||
CORE_APPROVALS=0
|
|
||||||
while IFS= read -r member; do
|
|
||||||
echo "Checking member: $member"
|
|
||||||
APPROVED=$(echo "$REVIEWS" | jq --arg user "$member" '
|
|
||||||
group_by(.user.login) |
|
|
||||||
map(select(.[0].user.login == $user) |
|
|
||||||
sort_by(.submitted_at) |
|
|
||||||
last) |
|
|
||||||
map(select(.state == "APPROVED" and (.state != "DISMISSED"))) |
|
|
||||||
length')
|
|
||||||
echo "Latest review state for $member: $APPROVED"
|
|
||||||
CORE_APPROVALS=$((CORE_APPROVALS + APPROVED))
|
|
||||||
echo "Running total: $CORE_APPROVALS"
|
|
||||||
done <<< "${{ steps.core_members.outputs.membership }}"
|
|
||||||
|
|
||||||
echo "CORE_APPROVALS=$CORE_APPROVALS" >> $GITHUB_OUTPUT
|
|
||||||
echo "CORE_APPROVALS=$CORE_APPROVALS"
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: "Find Comment"
|
|
||||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
|
|
||||||
uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # peter-evans/find-comment@v2
|
|
||||||
id: find-comment
|
|
||||||
with:
|
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
|
||||||
comment-author: 'github-actions[bot]'
|
|
||||||
body-includes: "### Additional Artifact Review Required"
|
|
||||||
|
|
||||||
- name: "Create Comment"
|
|
||||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true' && steps.find-comment.outputs.comment-id == '' && steps.check_approvals.outputs.CORE_APPROVALS < env.required_approvals
|
|
||||||
uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # peter-evans/create-or-update-comment@v3
|
|
||||||
with:
|
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
|
||||||
body: |
|
|
||||||
### Additional Artifact Review Required
|
|
||||||
|
|
||||||
Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members.
|
|
||||||
|
|
||||||
- name: "Notify if not enough approvals"
|
|
||||||
if: steps.artifact_files_changed.outputs.artifact_changes == 'true'
|
|
||||||
run: |
|
|
||||||
if [[ "${{ steps.check_approvals.outputs.CORE_APPROVALS }}" -ge "${{ env.required_approvals }}" ]]; then
|
|
||||||
title="Extra requirements met"
|
|
||||||
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
|
|
||||||
echo "::notice title=$title::$message"
|
|
||||||
echo "REVIEW_STATUS=success" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
title="PR Approval Requirements Not Met"
|
|
||||||
message="Changes to artifact directory files requires at least ${{ env.required_approvals }} approvals from core team members. Current number of core team approvals: ${{ steps.check_approvals.outputs.CORE_APPROVALS }} "
|
|
||||||
echo "::notice title=$title::$message"
|
|
||||||
echo "REVIEW_STATUS=neutral" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
id: review_check
|
|
||||||
|
|
||||||
- name: "Set check status"
|
|
||||||
id: status_check
|
|
||||||
run: |
|
|
||||||
if [[ "${{ steps.artifact_files_changed.outputs.artifact_changes }}" == 'false' ]]; then
|
|
||||||
# no extra review required
|
|
||||||
echo "current_status=success" >> $GITHUB_OUTPUT
|
|
||||||
elif [[ "${{ steps.review_check.outputs.REVIEW_STATUS }}" == "success" ]]; then
|
|
||||||
# we have all the required reviews
|
|
||||||
echo "current_status=success" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
# neutral exit - neither success nor failure
|
|
||||||
# we can't fail here because we use multiple triggers for this workflow and they won't reset the check
|
|
||||||
# workaround is to use a neutral exit to skip the check run until it's actually successful
|
|
||||||
echo "current_status=neutral" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Post Event"
|
|
||||||
# This step posts the status of the check because the workflow is triggered by multiple events
|
|
||||||
# and we need to ensure the check is always updated. Otherwise we would end up with duplicate
|
|
||||||
# checks in the GitHub UI.
|
|
||||||
run: |
|
|
||||||
if [[ "${{ steps.status_check.outputs.current_status }}" == "success" ]]; then
|
|
||||||
state="success"
|
|
||||||
else
|
|
||||||
state="failure"
|
|
||||||
fi
|
|
||||||
|
|
||||||
gh api \
|
|
||||||
--method POST \
|
|
||||||
-H "Accept: application/vnd.github+json" \
|
|
||||||
/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.base.sha }} \
|
|
||||||
-f state="$state" \
|
|
||||||
-f description="Artifact Review Check" \
|
|
||||||
-f context="Artifact Review Check" \
|
|
||||||
-f target_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
50
.github/workflows/auto-respond-bug-reports.yml
vendored
50
.github/workflows/auto-respond-bug-reports.yml
vendored
@@ -1,50 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Check if the an issue is opened near or during an extended holiday period.
|
|
||||||
# If so, post an automatically-generated comment about the holiday for bug reports.
|
|
||||||
# Also provide specific information to customers of dbt Cloud.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Explain why responses will be delayed during our holiday period.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run when new issues are opened.
|
|
||||||
|
|
||||||
name: Auto-Respond to Bug Reports During Holiday Period
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
auto-response:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
steps:
|
|
||||||
- name: Check if current date is within holiday period
|
|
||||||
id: date-check
|
|
||||||
run: |
|
|
||||||
current_date=$(date -u +"%Y-%m-%d")
|
|
||||||
start_date="2024-12-23"
|
|
||||||
end_date="2025-01-05"
|
|
||||||
|
|
||||||
if [[ "$current_date" < "$start_date" || "$current_date" > "$end_date" ]]; then
|
|
||||||
echo "outside_holiday=true" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "outside_holiday=false" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Post comment
|
|
||||||
if: ${{ env.outside_holiday == 'false' && contains(github.event.issue.labels.*.name, 'bug') }}
|
|
||||||
run: |
|
|
||||||
gh issue comment ${{ github.event.issue.number }} --repo ${{ github.repository }} --body "Thank you for your bug report! Our team is will be out of the office for [Christmas and our Global Week of Rest](https://handbook.getdbt.com/docs/time_off#2024-us-holidays), from December 25, 2024, through January 3, 2025.
|
|
||||||
|
|
||||||
We will review your issue as soon as possible after returning.
|
|
||||||
Thank you for your understanding, and happy holidays! 🎄🎉
|
|
||||||
|
|
||||||
If you are a customer of dbt Cloud, please contact our Customer Support team via the dbt Cloud web interface or email **support@dbtlabs.com**."
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
40
.github/workflows/backport.yml
vendored
40
.github/workflows/backport.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# When a PR is merged, if it has the backport label, it will create
|
|
||||||
# a new PR to backport those changes to the given branch. If it can't
|
|
||||||
# cleanly do a backport, it will comment on the merged PR of the failure.
|
|
||||||
#
|
|
||||||
# Label naming convention: "backport <branch name to backport to>"
|
|
||||||
# Example: backport 1.0.latest
|
|
||||||
#
|
|
||||||
# You MUST "Squash and merge" the original PR or this won't work.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Changes sometimes need to be backported to release branches.
|
|
||||||
# This automates the backporting process
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# Once a PR is "Squash and merge"'d, by adding a backport label, this is triggered
|
|
||||||
|
|
||||||
name: Backport
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- labeled
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
backport:
|
|
||||||
name: Backport
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
# Only react to merged PRs for security reasons.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
|
|
||||||
if: >
|
|
||||||
github.event.pull_request.merged
|
|
||||||
&& contains(github.event.label.name, 'backport')
|
|
||||||
steps:
|
|
||||||
- uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # tibdex/backport@v2.0.4
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
59
.github/workflows/bot-changelog.yml
vendored
59
.github/workflows/bot-changelog.yml
vendored
@@ -1,59 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# When bots create a PR, this action will add a corresponding changie yaml file to that
|
|
||||||
# PR when a specific label is added.
|
|
||||||
#
|
|
||||||
# The file is created off a template:
|
|
||||||
#
|
|
||||||
# kind: <per action matrix>
|
|
||||||
# body: <PR title>
|
|
||||||
# time: <current timestamp>
|
|
||||||
# custom:
|
|
||||||
# Author: <PR User Login (generally the bot)>
|
|
||||||
# Issue: 4904
|
|
||||||
# PR: <PR number>
|
|
||||||
#
|
|
||||||
# **why?**
|
|
||||||
# Automate changelog generation for more visability with automated bot PRs.
|
|
||||||
#
|
|
||||||
# **when?**
|
|
||||||
# Once a PR is created, label should be added to PR before or after creation. You can also
|
|
||||||
# manually trigger this by adding the appropriate label at any time.
|
|
||||||
#
|
|
||||||
# **how to add another bot?**
|
|
||||||
# Add the label and changie kind to the include matrix. That's it!
|
|
||||||
#
|
|
||||||
|
|
||||||
name: Bot Changelog
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
# catch when the PR is opened with the label or when the label is added
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
generate_changelog:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- label: "dependencies"
|
|
||||||
changie_kind: "Dependencies"
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: Create and commit changelog on bot PR
|
|
||||||
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
|
|
||||||
id: bot_changelog
|
|
||||||
uses: emmyoop/changie_bot@22b70618b13d0d1c64ea95212bafca2d2bf6b764 # emmyoop/changie_bot@v1.1.0
|
|
||||||
with:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
commit_author_name: "Github Build Bot"
|
|
||||||
commit_author_email: "<buildbot@fishtownanalytics.com>"
|
|
||||||
commit_message: "Add automated changelog yaml from template for bot PR"
|
|
||||||
changie_kind: ${{ matrix.changie_kind }}
|
|
||||||
label: ${{ matrix.label }}
|
|
||||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ${{ github.event.pull_request.number }}"
|
|
||||||
76
.github/workflows/changelog-check.yml
vendored
Normal file
76
.github/workflows/changelog-check.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# **what?**
|
||||||
|
# Checks that a file has been committed under the /.changes directory
|
||||||
|
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
||||||
|
# it is dynamically generated by change type and timestamp.
|
||||||
|
# This workflow should not require any secrets since it runs for PRs
|
||||||
|
# from forked repos.
|
||||||
|
# By default, secrets are not passed to workflows running from
|
||||||
|
# a forked repo.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# Ensure code change gets reflected in the CHANGELOG.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This will run for all PRs going into main and *.latest.
|
||||||
|
|
||||||
|
name: Check Changelog Entry
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changelog:
|
||||||
|
name: changelog
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check if changelog file was added
|
||||||
|
# https://github.com/marketplace/actions/paths-changes-filter
|
||||||
|
# For each filter, it sets output variable named by the filter to the text:
|
||||||
|
# 'true' - if any of changed files matches any of filter rules
|
||||||
|
# 'false' - if none of changed files matches any of filter rules
|
||||||
|
# also, returns:
|
||||||
|
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||||
|
uses: dorny/paths-filter@v2
|
||||||
|
id: filter
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
filters: |
|
||||||
|
changelog:
|
||||||
|
- added: '.changes/unreleased/**.yaml'
|
||||||
|
- name: Check if comment already exists
|
||||||
|
uses: peter-evans/find-comment@v1
|
||||||
|
id: changelog_comment
|
||||||
|
with:
|
||||||
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
comment-author: 'github-actions[bot]'
|
||||||
|
body-includes: ${{ env.changelog_comment }}
|
||||||
|
- name: Create PR comment if changelog entry is missing, required, and does nto exist
|
||||||
|
if: |
|
||||||
|
steps.filter.outputs.changelog == 'false' &&
|
||||||
|
!contains( github.event.pull_request.labels.*.name, 'Skip Changelog') &&
|
||||||
|
steps.changelog_comment.outputs.comment-body == ''
|
||||||
|
uses: peter-evans/create-or-update-comment@v1
|
||||||
|
with:
|
||||||
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
body: ${{ env.changelog_comment }}
|
||||||
|
- name: Fail job if changelog entry is missing and required
|
||||||
|
if: |
|
||||||
|
steps.filter.outputs.changelog == 'false' &&
|
||||||
|
!contains( github.event.pull_request.labels.*.name, 'Skip Changelog')
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
script: core.setFailed('Changelog entry required to merge.')
|
||||||
40
.github/workflows/changelog-existence.yml
vendored
40
.github/workflows/changelog-existence.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Checks that a file has been committed under the /.changes directory
|
|
||||||
# as a new CHANGELOG entry. Cannot check for a specific filename as
|
|
||||||
# it is dynamically generated by change type and timestamp.
|
|
||||||
# This workflow runs on pull_request_target because it requires
|
|
||||||
# secrets to post comments.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Ensure code change gets reflected in the CHANGELOG.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run for all PRs going into main and *.latest. It will
|
|
||||||
# run when they are opened, reopened, when any label is added or removed
|
|
||||||
# and when new code is pushed to the branch. The action will then get
|
|
||||||
# skipped if the 'Skip Changelog' label is present is any of the labels.
|
|
||||||
|
|
||||||
name: Check Changelog Entry
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
|
||||||
paths-ignore: ['.changes/**', '.github/**', 'tests/**', '**.md', '**.yml']
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
changelog:
|
|
||||||
uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main
|
|
||||||
with:
|
|
||||||
changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry).'
|
|
||||||
skip_label: 'Skip Changelog'
|
|
||||||
secrets: inherit
|
|
||||||
45
.github/workflows/check-artifact-changes.yml
vendored
45
.github/workflows/check-artifact-changes.yml
vendored
@@ -1,45 +0,0 @@
|
|||||||
name: Check Artifact Changes
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
|
||||||
paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
|
||||||
merge_group:
|
|
||||||
types: [checks_requested]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-artifact-changes:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Check for changes in core/dbt/artifacts
|
|
||||||
# https://github.com/marketplace/actions/paths-changes-filter
|
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # dorny/paths-filter@v3
|
|
||||||
id: check_artifact_changes
|
|
||||||
with:
|
|
||||||
filters: |
|
|
||||||
artifacts_changed:
|
|
||||||
- 'core/dbt/artifacts/**'
|
|
||||||
list-files: shell
|
|
||||||
|
|
||||||
- name: Fail CI if artifacts have changed
|
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
|
||||||
run: |
|
|
||||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
|
||||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
|
||||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
|
|
||||||
exit 1
|
|
||||||
|
|
||||||
- name: CI check passed
|
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'false'
|
|
||||||
run: |
|
|
||||||
echo "No prohibited artifact changes found in core/dbt/artifacts. CI check passed."
|
|
||||||
44
.github/workflows/community-label.yml
vendored
44
.github/workflows/community-label.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Label a PR with a `community` label when a PR is opened by a user outside core/adapters
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# To streamline triage and ensure that community contributions are recognized and prioritized
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# When a PR is opened, not in draft or moved from draft to ready for review
|
|
||||||
|
|
||||||
name: Label community PRs
|
|
||||||
|
|
||||||
on:
|
|
||||||
# have to use pull_request_target since community PRs come from forks
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, ready_for_review]
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
pull-requests: write # labels PRs
|
|
||||||
contents: read # reads team membership
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
open_issues:
|
|
||||||
# If this PR already has the community label, no need to relabel it
|
|
||||||
# If this PR is opened and not draft, determine if it needs to be labeled
|
|
||||||
# if the PR is converted out of draft, determine if it needs to be labeled
|
|
||||||
if: |
|
|
||||||
(
|
|
||||||
!contains(github.event.pull_request.labels.*.name, 'community')
|
|
||||||
&& (
|
|
||||||
(github.event.action == 'opened' && github.event.pull_request.draft == false)
|
|
||||||
|| github.event.action == 'ready_for_review'
|
|
||||||
)
|
|
||||||
&& github.event.pull_request.user.type != 'Bot'
|
|
||||||
&& github.event.pull_request.user.login != 'dependabot[bot]'
|
|
||||||
)
|
|
||||||
uses: dbt-labs/actions/.github/workflows/label-community.yml@main
|
|
||||||
with:
|
|
||||||
github_team: 'core-group'
|
|
||||||
label: 'community'
|
|
||||||
secrets: inherit
|
|
||||||
391
.github/workflows/cut-release-branch.yml
vendored
391
.github/workflows/cut-release-branch.yml
vendored
@@ -1,391 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Cuts the `*.latest` branch, bumps dependencies on it, cleans up all files in `.changes/unreleased`
|
|
||||||
# and `.changes/previous verion on main and bumps main to the input version.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Clean up the main branch after a release branch is cut and automate cutting the release branch.
|
|
||||||
# Generally reduces the workload of engineers and reducing error.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run when called manually or when triggered in another workflow.
|
|
||||||
|
|
||||||
# Example Usage including required permissions: TODO: update once finalized
|
|
||||||
|
|
||||||
# permissions:
|
|
||||||
# contents: read
|
|
||||||
# pull-requests: write
|
|
||||||
#
|
|
||||||
# name: Cut Release Branch
|
|
||||||
# jobs:
|
|
||||||
# changelog:
|
|
||||||
# uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
|
||||||
# with:
|
|
||||||
# new_branch_name: 1.7.latest
|
|
||||||
# PR_title: "Cleanup main after cutting new 1.7.latest branch"
|
|
||||||
# PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
|
||||||
# secrets:
|
|
||||||
# FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
# TODOs
|
|
||||||
# add note to eventually commit changes directly and bypass checks - same as release - when we move to this model run test action after merge
|
|
||||||
|
|
||||||
name: Cut new release branch
|
|
||||||
run-name: "Cutting New Branch: ${{ inputs.new_branch_name }}"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
new_branch_name:
|
|
||||||
description: "The full name of the new branch (ex. 1.5.latest)"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
env:
|
|
||||||
PYTHON_TARGET_VERSION: "3.10"
|
|
||||||
PR_TITLE: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
|
||||||
PR_BODY: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prep_work:
|
|
||||||
name: "Prep Work"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: "[DEBUG] Print Inputs"
|
|
||||||
run: |
|
|
||||||
echo "new_branch_name: ${{ inputs.new_branch_name }}"
|
|
||||||
echo "PR_title: ${{ env.PR_TITLE }}"
|
|
||||||
echo "PR_body: ${{ env.PR_BODY }}"
|
|
||||||
|
|
||||||
create_temp_branch:
|
|
||||||
name: "Create Temp branch off main"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
temp_branch_name: ${{ steps.variables.outputs.BRANCH_NAME }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Set Branch Value"
|
|
||||||
id: variables
|
|
||||||
run: |
|
|
||||||
echo "BRANCH_NAME=cutting_release_branch/main_cleanup_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Checkout ${{ github.repository }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: "main"
|
|
||||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
- name: "Create PR Branch"
|
|
||||||
run: |
|
|
||||||
user="Github Build Bot"
|
|
||||||
email="buildbot@fishtownanalytics.com"
|
|
||||||
git config user.name "$user"
|
|
||||||
git config user.email "$email"
|
|
||||||
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
|
||||||
git push --set-upstream origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
|
||||||
|
|
||||||
- name: "[Notification] Temp branch created"
|
|
||||||
run: |
|
|
||||||
message="Temp branch ${{ steps.variables.outputs.BRANCH_NAME }} created"
|
|
||||||
echo "::notice title="Temporary branch created": $title::$message"
|
|
||||||
|
|
||||||
cleanup_changelog:
|
|
||||||
name: "Clean Up Changelog"
|
|
||||||
needs: ["create_temp_branch"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
next-version: ${{ steps.semver-current.outputs.next-minor-alpha-version }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Checkout ${{ github.repository }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
|
||||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
- name: "Add Homebrew To PATH"
|
|
||||||
run: |
|
|
||||||
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "Install Homebrew Packages"
|
|
||||||
run: |
|
|
||||||
brew install pre-commit
|
|
||||||
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
|
||||||
brew install changie
|
|
||||||
|
|
||||||
- name: "Check Current Version In Code"
|
|
||||||
id: determine_version
|
|
||||||
run: |
|
|
||||||
current_version=$(grep '^version = ' core/pyproject.toml | sed 's/version = "\(.*\)"/\1/')
|
|
||||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "[Notification] Check Current Version In Code"
|
|
||||||
run: |
|
|
||||||
message="The current version is ${{ steps.determine_version.outputs.current_version }}"
|
|
||||||
echo "::notice title="Version Bump Check": $title::$message"
|
|
||||||
|
|
||||||
- name: "Parse Current Version Into Parts for Changelog Directories"
|
|
||||||
id: semver-current
|
|
||||||
uses: dbt-labs/actions/parse-semver@main
|
|
||||||
with:
|
|
||||||
version: ${{ steps.determine_version.outputs.current_version }}
|
|
||||||
|
|
||||||
- name: "[Notification] Next Alpha Version"
|
|
||||||
run: |
|
|
||||||
message="The next alpha version is ${{ steps.semver-current.outputs.next-minor-alpha-version }}"
|
|
||||||
echo "::notice title="Version Bump Check": $title::$message"
|
|
||||||
|
|
||||||
- name: "Delete Unreleased Changelog YAMLs"
|
|
||||||
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
rm .changes/unreleased/*.yaml || true
|
|
||||||
|
|
||||||
- name: "Delete Pre Release Changelogs and YAMLs"
|
|
||||||
# removal fails if no files exist. OK to continue since we're just cleaning up the files.
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
rm .changes/${{ steps.semver-current.outputs.base-version }}/*.yaml || true
|
|
||||||
rm .changes/${{ steps.semver-current.outputs.major }}.${{ steps.semver-current.outputs.minor }}.*.md || true
|
|
||||||
|
|
||||||
- name: "Cleanup CHANGELOG.md"
|
|
||||||
run: |
|
|
||||||
changie merge
|
|
||||||
|
|
||||||
- name: "Commit Changelog Cleanup to Branch"
|
|
||||||
run: |
|
|
||||||
user="Github Build Bot"
|
|
||||||
email="buildbot@fishtownanalytics.com"
|
|
||||||
git config user.name "$user"
|
|
||||||
git config user.email "$email"
|
|
||||||
git status
|
|
||||||
git add .
|
|
||||||
git commit -m "Clean up changelog on main"
|
|
||||||
git push
|
|
||||||
|
|
||||||
- name: "[Notification] Changelog cleaned up"
|
|
||||||
run: |
|
|
||||||
message="Changelog on ${{ needs.create_temp_branch.outputs.temp_branch_name }} cleaned up"
|
|
||||||
echo "::notice title="Changelog cleaned up": $title::$message"
|
|
||||||
|
|
||||||
bump_version:
|
|
||||||
name: "Bump to next minor version"
|
|
||||||
needs: ["cleanup_changelog", "create_temp_branch"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Checkout ${{ github.repository }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
|
||||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
|
|
||||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "${{ env.PYTHON_TARGET_VERSION }}"
|
|
||||||
|
|
||||||
- name: "Install Spark Dependencies"
|
|
||||||
if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }}
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install libsasl2-dev
|
|
||||||
|
|
||||||
- name: "Install Python Dependencies"
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install hatch
|
|
||||||
|
|
||||||
- name: "Bump Version To ${{ needs.cleanup_changelog.outputs.next-version }}"
|
|
||||||
run: |
|
|
||||||
cd core
|
|
||||||
hatch version ${{ needs.cleanup_changelog.outputs.next-version }}
|
|
||||||
hatch run dev-req
|
|
||||||
dbt --version
|
|
||||||
|
|
||||||
- name: "Commit Version Bump to Branch"
|
|
||||||
run: |
|
|
||||||
user="Github Build Bot"
|
|
||||||
email="buildbot@fishtownanalytics.com"
|
|
||||||
git config user.name "$user"
|
|
||||||
git config user.email "$email"
|
|
||||||
git status
|
|
||||||
git add .
|
|
||||||
git commit -m "Bumping version to ${{ needs.cleanup_changelog.outputs.next-version }}"
|
|
||||||
git push
|
|
||||||
|
|
||||||
- name: "[Notification] Version Bump completed"
|
|
||||||
run: |
|
|
||||||
message="Version on ${{ needs.create_temp_branch.outputs.temp_branch_name }} bumped to ${{ needs.cleanup_changelog.outputs.next-version }}"
|
|
||||||
echo "::notice title="Version Bump Completed": $title::$message"
|
|
||||||
|
|
||||||
cleanup:
|
|
||||||
name: "Cleanup Code Quality"
|
|
||||||
needs: ["create_temp_branch", "bump_version"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: "Checkout ${{ github.repository }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
|
||||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
- name: "Add Homebrew To PATH"
|
|
||||||
run: |
|
|
||||||
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "brew install pre-commit"
|
|
||||||
run: |
|
|
||||||
brew install pre-commit
|
|
||||||
|
|
||||||
# this step will fail on whitespace errors but also correct them
|
|
||||||
- name: "Cleanup - Remove Trailing Whitespace Via Pre-commit"
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* || true
|
|
||||||
|
|
||||||
# this step will fail on newline errors but also correct them
|
|
||||||
- name: "Cleanup - Remove Extra Newlines Via Pre-commit"
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* || true
|
|
||||||
|
|
||||||
- name: "Commit Version Bump to Branch"
|
|
||||||
run: |
|
|
||||||
user="Github Build Bot"
|
|
||||||
email="buildbot@fishtownanalytics.com"
|
|
||||||
git config user.name "$user"
|
|
||||||
git config user.email "$email"
|
|
||||||
git status
|
|
||||||
git add .
|
|
||||||
git commit -m "Code quality cleanup"
|
|
||||||
git push
|
|
||||||
|
|
||||||
open_pr:
|
|
||||||
name: "Open PR Against main"
|
|
||||||
needs: ["cleanup_changelog", "create_temp_branch", "cleanup"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
pr_number: ${{ steps.create_pr.outputs.pull-request-number }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Checkout ${{ github.repository }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.create_temp_branch.outputs.temp_branch_name }}
|
|
||||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
- name: "Determine PR Title"
|
|
||||||
id: pr_title
|
|
||||||
run: |
|
|
||||||
echo "pr_title=${{ env.PR_TITLE }}" >> $GITHUB_OUTPUT
|
|
||||||
if [${{ env.PR_TITLE }} == ""]; then
|
|
||||||
echo "pr_title='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Determine PR Body"
|
|
||||||
id: pr_body
|
|
||||||
run: |
|
|
||||||
echo "pr_body=${{ env.PR_BODY }}" >> $GITHUB_OUTPUT
|
|
||||||
if [${{ env.PR_BODY }} == ""]; then
|
|
||||||
echo "pr_body='Clean up changelogs and bump to version ${{ needs.cleanup_changelog.outputs.next-version }}'" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Add Branch Details"
|
|
||||||
id: pr_body_branch
|
|
||||||
run: |
|
|
||||||
branch_details="The workflow that generated this PR also created a new branch: ${{ inputs.new_branch_name }}"
|
|
||||||
full_body="${{ steps.pr_body.outputs.pr_body }} $branch_details"
|
|
||||||
echo "pr_full_body=$full_body" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Open Pull Request"
|
|
||||||
id: create_pr
|
|
||||||
run: |
|
|
||||||
pr_url=$(gh pr create -B main -H ${{ needs.create_temp_branch.outputs.temp_branch_name }} -l "Skip Changelog" -t "${{ steps.pr_title.outputs.pr_title }}" -b "${{ steps.pr_body_branch.outputs.pr_full_body }}")
|
|
||||||
echo "pr_url=$pr_url" >> $GITHUB_OUTPUT
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
|
|
||||||
- name: "[Notification] Pull Request Opened"
|
|
||||||
run: |
|
|
||||||
message="PR opened at ${{ steps.create_pr.outputs.pr_url }}"
|
|
||||||
echo "::notice title="Pull Request Opened": $title::$message"
|
|
||||||
|
|
||||||
cut_new_branch:
|
|
||||||
# don't cut the new branch until we're done opening the PR against main
|
|
||||||
name: "Cut New Branch ${{ inputs.new_branch_name }}"
|
|
||||||
needs: [open_pr]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Checkout ${{ github.repository }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: "Ensure New Branch Does Not Exist"
|
|
||||||
id: check_new_branch
|
|
||||||
run: |
|
|
||||||
title="Check New Branch Existence"
|
|
||||||
if git show-ref --quiet ${{ inputs.new_branch_name }}; then
|
|
||||||
message="Branch ${{ inputs.new_branch_name }} already exists. Exiting."
|
|
||||||
echo "::error $title::$message"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Create New Release Branch"
|
|
||||||
run: |
|
|
||||||
git checkout -b ${{ inputs.new_branch_name }}
|
|
||||||
|
|
||||||
- name: "Push up New Branch"
|
|
||||||
run: |
|
|
||||||
#Data for commit
|
|
||||||
user="Github Build Bot"
|
|
||||||
email="buildbot@fishtownanalytics.com"
|
|
||||||
git config user.name "$user"
|
|
||||||
git config user.email "$email"
|
|
||||||
git push --set-upstream origin ${{ inputs.new_branch_name }}
|
|
||||||
|
|
||||||
- name: "[Notification] New branch created"
|
|
||||||
run: |
|
|
||||||
message="New branch ${{ inputs.new_branch_name }} created"
|
|
||||||
echo "::notice title="New branch created": $title::$message"
|
|
||||||
|
|
||||||
- name: "Bump dependencies via script"
|
|
||||||
# This bumps the dependency on dbt-core in the adapters
|
|
||||||
if: ${{ !contains(github.repository, 'dbt-core') }}
|
|
||||||
run: |
|
|
||||||
echo ${{ github.repository }}
|
|
||||||
echo "running update_dependencies script"
|
|
||||||
bash ${GITHUB_WORKSPACE}/.github/scripts/update_dependencies.sh ${{ inputs.new_branch_name }}
|
|
||||||
commit_message="bumping .latest branch variable in update_dependencies.sh to ${{ inputs.new_branch_name }}"
|
|
||||||
git status
|
|
||||||
git add .
|
|
||||||
git commit -m "$commit_message"
|
|
||||||
git push
|
|
||||||
|
|
||||||
- name: "Bump env variable via script"
|
|
||||||
# bumps the RELEASE_BRANCH variable in nightly-release.yml in adapters
|
|
||||||
if: ${{ !contains(github.repository, 'dbt-core') }}
|
|
||||||
run: |
|
|
||||||
file="./.github/scripts/update_release_branch.sh"
|
|
||||||
if test -f "$file"; then
|
|
||||||
echo ${{ github.repository }}
|
|
||||||
echo "running some script yet to be written now"
|
|
||||||
bash $file ${{ inputs.new_branch_name }}
|
|
||||||
commit_message="updating env variable to ${{ inputs.new_branch_name }} in nightly-release.yml"
|
|
||||||
git status
|
|
||||||
git add .
|
|
||||||
git commit -m "$commit_message"
|
|
||||||
git push
|
|
||||||
else
|
|
||||||
echo "no $file seen skipping step"
|
|
||||||
fi
|
|
||||||
41
.github/workflows/docs-issue.yml
vendored
41
.github/workflows/docs-issue.yml
vendored
@@ -1,41 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# To reduce barriers for keeping docs up to date
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed.
|
|
||||||
|
|
||||||
|
|
||||||
name: Open issues in docs.getdbt.com repo when an issue is labeled
|
|
||||||
run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}"
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled, closed]
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write # comments on issues
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
open_issues:
|
|
||||||
# we only want to run this when the issue is closed as completed and the label `user docs` has been assigned.
|
|
||||||
# If this logic does not exist in this workflow, it runs the
|
|
||||||
# risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
|
|
||||||
# generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
|
|
||||||
# decide if it should run or not.
|
|
||||||
if: |
|
|
||||||
(github.event.issue.state == 'closed' &&
|
|
||||||
github.event.issue.state_reason == 'completed' &&
|
|
||||||
contains( github.event.issue.labels.*.name, 'user docs'))
|
|
||||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
|
||||||
with:
|
|
||||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
|
||||||
issue_title: "[Core] Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
|
||||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated.\n Originating from this issue: https://github.com/dbt-labs/dbt-core/issues/${{ github.event.issue.number }}"
|
|
||||||
secrets: inherit
|
|
||||||
222
.github/workflows/integration.yml
vendored
Normal file
222
.github/workflows/integration.yml
vendored
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
# **what?**
|
||||||
|
# This workflow runs all integration tests for supported OS
|
||||||
|
# and python versions and core adapters. If triggered by PR,
|
||||||
|
# the workflow will only run tests for adapters related
|
||||||
|
# to code changes. Use the `test all` and `test ${adapter}`
|
||||||
|
# label to run all or additional tests. Use `ok to test`
|
||||||
|
# label to mark PRs from forked repositories that are safe
|
||||||
|
# to run integration tests for. Requires secrets to run
|
||||||
|
# against different warehouses.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# This checks the functionality of dbt from a user's perspective
|
||||||
|
# and attempts to catch functional regressions.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This workflow will run on every push to a protected branch
|
||||||
|
# and when manually triggered. It will also run for all PRs, including
|
||||||
|
# PRs from forks. The workflow will be skipped until there is a label
|
||||||
|
# to mark the PR as safe to run.
|
||||||
|
|
||||||
|
name: Adapter Integration Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
# pushes to release branches
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "develop"
|
||||||
|
- "*.latest"
|
||||||
|
- "releases/*"
|
||||||
|
# all PRs, important to note that `pull_request_target` workflows
|
||||||
|
# will run in the context of the target branch of a PR
|
||||||
|
pull_request_target:
|
||||||
|
# manual tigger
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
# explicitly turn off permissions for `GITHUB_TOKEN`
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
# sets default shell to bash, for all operating systems
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# generate test metadata about what files changed and the testing matrix to use
|
||||||
|
test-metadata:
|
||||||
|
# run if not a PR from a forked repository or has a label to mark as safe to test
|
||||||
|
if: >-
|
||||||
|
github.event_name != 'pull_request_target' ||
|
||||||
|
github.event.pull_request.head.repo.full_name == github.repository ||
|
||||||
|
contains(github.event.pull_request.labels.*.name, 'ok to test')
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.generate-matrix.outputs.result }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository (non-PR)
|
||||||
|
if: github.event_name != 'pull_request_target'
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Check out the repository (PR)
|
||||||
|
if: github.event_name == 'pull_request_target'
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
|
||||||
|
- name: Check if relevant files changed
|
||||||
|
# https://github.com/marketplace/actions/paths-changes-filter
|
||||||
|
# For each filter, it sets output variable named by the filter to the text:
|
||||||
|
# 'true' - if any of changed files matches any of filter rules
|
||||||
|
# 'false' - if none of changed files matches any of filter rules
|
||||||
|
# also, returns:
|
||||||
|
# `changes` - JSON array with names of all filters matching any of the changed files
|
||||||
|
uses: dorny/paths-filter@v2
|
||||||
|
id: get-changes
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
filters: |
|
||||||
|
postgres:
|
||||||
|
- 'core/**'
|
||||||
|
- 'plugins/postgres/**'
|
||||||
|
- 'dev-requirements.txt'
|
||||||
|
|
||||||
|
- name: Generate integration test matrix
|
||||||
|
id: generate-matrix
|
||||||
|
uses: actions/github-script@v4
|
||||||
|
env:
|
||||||
|
CHANGES: ${{ steps.get-changes.outputs.changes }}
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/scripts/integration-test-matrix.js')
|
||||||
|
const matrix = script({ context })
|
||||||
|
console.log(matrix)
|
||||||
|
return matrix
|
||||||
|
|
||||||
|
test:
|
||||||
|
name: ${{ matrix.adapter }} / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||||
|
|
||||||
|
# run if not a PR from a forked repository or has a label to mark as safe to test
|
||||||
|
# also checks that the matrix generated is not empty
|
||||||
|
if: >-
|
||||||
|
needs.test-metadata.outputs.matrix &&
|
||||||
|
fromJSON( needs.test-metadata.outputs.matrix ).include[0] &&
|
||||||
|
(
|
||||||
|
github.event_name != 'pull_request_target' ||
|
||||||
|
github.event.pull_request.head.repo.full_name == github.repository ||
|
||||||
|
contains(github.event.pull_request.labels.*.name, 'ok to test')
|
||||||
|
)
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
needs: test-metadata
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix: ${{ fromJSON(needs.test-metadata.outputs.matrix) }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
TOXENV: integration-${{ matrix.adapter }}
|
||||||
|
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||||
|
DBT_INVOCATION_ENV: github-actions
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
if: github.event_name != 'pull_request_target'
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
# explicity checkout the branch for the PR,
|
||||||
|
# this is necessary for the `pull_request_target` event
|
||||||
|
- name: Check out the repository (PR)
|
||||||
|
if: github.event_name == 'pull_request_target'
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Set up postgres (linux)
|
||||||
|
if: |
|
||||||
|
matrix.adapter == 'postgres' &&
|
||||||
|
runner.os == 'Linux'
|
||||||
|
uses: ./.github/actions/setup-postgres-linux
|
||||||
|
|
||||||
|
- name: Set up postgres (macos)
|
||||||
|
if: |
|
||||||
|
matrix.adapter == 'postgres' &&
|
||||||
|
runner.os == 'macOS'
|
||||||
|
uses: ./.github/actions/setup-postgres-macos
|
||||||
|
|
||||||
|
- name: Set up postgres (windows)
|
||||||
|
if: |
|
||||||
|
matrix.adapter == 'postgres' &&
|
||||||
|
runner.os == 'Windows'
|
||||||
|
uses: ./.github/actions/setup-postgres-windows
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
|
run: |
|
||||||
|
pip install --user --upgrade pip
|
||||||
|
pip install tox
|
||||||
|
pip --version
|
||||||
|
tox --version
|
||||||
|
|
||||||
|
- name: Run tox (postgres)
|
||||||
|
if: matrix.adapter == 'postgres'
|
||||||
|
run: tox
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: logs
|
||||||
|
path: ./logs
|
||||||
|
|
||||||
|
- name: Get current date
|
||||||
|
if: always()
|
||||||
|
id: date
|
||||||
|
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv
|
||||||
|
path: integration_results.csv
|
||||||
|
|
||||||
|
require-label-comment:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
needs: test
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Needs permission PR comment
|
||||||
|
if: >-
|
||||||
|
needs.test.result == 'skipped' &&
|
||||||
|
github.event_name == 'pull_request_target' &&
|
||||||
|
github.event.pull_request.head.repo.full_name != github.repository
|
||||||
|
uses: unsplash/comment-on-pr@master
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
msg: |
|
||||||
|
"You do not have permissions to run integration tests, @dbt-labs/core "\
|
||||||
|
"needs to label this PR with `ok to test` in order to run integration tests!"
|
||||||
|
check_for_duplicate_msg: true
|
||||||
406
.github/workflows/main.yml
vendored
406
.github/workflows/main.yml
vendored
@@ -1,8 +1,9 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Runs code quality checks, unit tests, integration tests and
|
# Runs code quality checks, unit tests, and verifies python build on
|
||||||
# verifies python build on all code commited to the repository. This workflow
|
# all code commited to the repository. This workflow should not
|
||||||
# should not require any secrets since it runs for PRs from forked repos. By
|
# require any secrets since it runs for PRs from forked repos.
|
||||||
# default, secrets are not passed to workflows running from a forked repos.
|
# By default, secrets are not passed to workflows running from
|
||||||
|
# a forked repo.
|
||||||
|
|
||||||
# **why?**
|
# **why?**
|
||||||
# Ensure code for dbt meets a certain quality standard.
|
# Ensure code for dbt meets a certain quality standard.
|
||||||
@@ -17,11 +18,10 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- "main"
|
- "main"
|
||||||
|
- "develop"
|
||||||
- "*.latest"
|
- "*.latest"
|
||||||
- "releases/*"
|
- "releases/*"
|
||||||
pull_request:
|
pull_request:
|
||||||
merge_group:
|
|
||||||
types: [checks_requested]
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
@@ -35,321 +35,85 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
# top-level adjustments can be made here
|
|
||||||
env:
|
|
||||||
# number of parallel processes to spawn for python integration testing
|
|
||||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
code-quality:
|
code-quality:
|
||||||
name: code-quality
|
name: ${{ matrix.toxenv }}
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
toxenv: [flake8, mypy]
|
||||||
|
|
||||||
|
env:
|
||||||
|
TOXENV: ${{ matrix.toxenv }}
|
||||||
|
PYTEST_ADDOPTS: "-v --color=yes"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@v2
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
python -m pip --version
|
pip install tox
|
||||||
python -m pip install hatch
|
pip --version
|
||||||
cd core
|
tox --version
|
||||||
hatch run setup
|
|
||||||
|
|
||||||
- name: Verify dbt installation
|
- name: Run tox
|
||||||
run: |
|
run: tox
|
||||||
cd core
|
|
||||||
hatch run dbt --version
|
|
||||||
|
|
||||||
- name: Run pre-commit hooks
|
|
||||||
run: |
|
|
||||||
cd core
|
|
||||||
hatch run code-quality
|
|
||||||
|
|
||||||
unit:
|
unit:
|
||||||
name: "unit test / python ${{ matrix.python-version }}"
|
name: unit test / python ${{ matrix.python-version }}
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
python-version: [3.7, 3.8, 3.9]
|
||||||
|
|
||||||
|
env:
|
||||||
|
TOXENV: "unit"
|
||||||
|
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
python -m pip --version
|
pip install tox
|
||||||
python -m pip install hatch
|
pip --version
|
||||||
hatch --version
|
tox --version
|
||||||
|
|
||||||
- name: Run unit tests
|
- name: Run tox
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
run: tox
|
||||||
with:
|
|
||||||
timeout_minutes: 10
|
|
||||||
max_attempts: 3
|
|
||||||
command: cd core && hatch run ci:unit-tests
|
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
id: date
|
id: date
|
||||||
run: |
|
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
|
||||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Upload Unit Test Coverage to Codecov
|
- uses: actions/upload-artifact@v2
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
|
||||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
flags: unit
|
|
||||||
fail_ci_if_error: false
|
|
||||||
|
|
||||||
integration-metadata:
|
|
||||||
name: integration test metadata generation
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
|
||||||
include: ${{ steps.generate-include.outputs.include }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: generate split-groups
|
|
||||||
id: generate-split-groups
|
|
||||||
run: |
|
|
||||||
MATRIX_JSON="["
|
|
||||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
|
||||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
|
||||||
done
|
|
||||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
|
||||||
MATRIX_JSON+="]"
|
|
||||||
echo "split-groups=${MATRIX_JSON}"
|
|
||||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: generate include
|
|
||||||
id: generate-include
|
|
||||||
run: |
|
|
||||||
INCLUDE=('"python-version":"3.10","os":"windows-latest"' '"python-version":"3.10","os":"macos-14"' )
|
|
||||||
INCLUDE_GROUPS="["
|
|
||||||
for include in ${INCLUDE[@]}; do
|
|
||||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
|
||||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
|
||||||
done
|
|
||||||
done
|
|
||||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
|
||||||
INCLUDE_GROUPS+="]"
|
|
||||||
echo "include=${INCLUDE_GROUPS}"
|
|
||||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
integration-postgres:
|
|
||||||
name: "(${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}"
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
timeout-minutes: 30
|
|
||||||
needs:
|
|
||||||
- integration-metadata
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
|
||||||
os: ["ubuntu-latest"]
|
|
||||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
|
||||||
env:
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
|
||||||
DBT_TEST_USER_3: dbt_test_user_3
|
|
||||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
|
||||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
|
||||||
DD_SITE: datadoghq.com
|
|
||||||
DD_ENV: ci
|
|
||||||
DD_SERVICE: ${{ github.event.repository.name }}
|
|
||||||
|
|
||||||
services:
|
|
||||||
# Label used to access the service container
|
|
||||||
postgres:
|
|
||||||
# Docker Hub image
|
|
||||||
image: postgres
|
|
||||||
# Provide the password for postgres
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: password
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
# Set health checks to wait until postgres has started
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out the repository
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
|
|
||||||
- name: Run postgres setup script
|
|
||||||
run: |
|
|
||||||
./scripts/setup_db.sh
|
|
||||||
env:
|
|
||||||
PGHOST: localhost
|
|
||||||
PGPORT: 5432
|
|
||||||
PGPASSWORD: password
|
|
||||||
|
|
||||||
- name: Install python tools
|
|
||||||
run: |
|
|
||||||
python -m pip install --user --upgrade pip
|
|
||||||
python -m pip --version
|
|
||||||
python -m pip install hatch
|
|
||||||
hatch --version
|
|
||||||
|
|
||||||
- name: Run integration tests
|
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
|
||||||
with:
|
|
||||||
timeout_minutes: 30
|
|
||||||
max_attempts: 3
|
|
||||||
shell: bash
|
|
||||||
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
|
||||||
|
|
||||||
- name: Get current date
|
|
||||||
if: always()
|
|
||||||
id: date
|
|
||||||
run: |
|
|
||||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
|
||||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||||
path: ./logs
|
path: unit_results.csv
|
||||||
|
|
||||||
- name: Upload Integration Test Coverage to Codecov
|
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
|
||||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
flags: integration
|
|
||||||
fail_ci_if_error: false
|
|
||||||
|
|
||||||
integration-mac-windows:
|
|
||||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
timeout-minutes: 30
|
|
||||||
needs:
|
|
||||||
- integration-metadata
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
# already includes split group and runs mac + windows
|
|
||||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
|
||||||
env:
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
|
||||||
DBT_TEST_USER_3: dbt_test_user_3
|
|
||||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
|
||||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
|
||||||
DD_SITE: datadoghq.com
|
|
||||||
DD_ENV: ci
|
|
||||||
DD_SERVICE: ${{ github.event.repository.name }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out the repository
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
|
|
||||||
- name: Set up postgres (macos)
|
|
||||||
if: runner.os == 'macOS'
|
|
||||||
|
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
|
||||||
with:
|
|
||||||
timeout_minutes: 10
|
|
||||||
max_attempts: 3
|
|
||||||
command: ./scripts/setup_db.sh
|
|
||||||
|
|
||||||
- name: Set up postgres (windows)
|
|
||||||
if: runner.os == 'Windows'
|
|
||||||
uses: ./.github/actions/setup-postgres-windows
|
|
||||||
|
|
||||||
- name: Install python tools
|
|
||||||
run: |
|
|
||||||
python -m pip install --user --upgrade pip
|
|
||||||
python -m pip --version
|
|
||||||
python -m pip install hatch
|
|
||||||
hatch --version
|
|
||||||
|
|
||||||
- name: Run integration tests
|
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
|
||||||
with:
|
|
||||||
timeout_minutes: 30
|
|
||||||
max_attempts: 3
|
|
||||||
shell: bash
|
|
||||||
command: cd core && hatch run ci:integration-tests -- --ddtrace --splits ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }} --group ${{ matrix.split-group }}
|
|
||||||
|
|
||||||
- name: Get current date
|
|
||||||
if: always()
|
|
||||||
id: date
|
|
||||||
run: |
|
|
||||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
|
||||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }}
|
|
||||||
path: ./logs
|
|
||||||
|
|
||||||
- name: Upload Integration Test Coverage to Codecov
|
|
||||||
if: ${{ matrix.python-version == '3.11' }}
|
|
||||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
flags: integration
|
|
||||||
fail_ci_if_error: false
|
|
||||||
|
|
||||||
integration-report:
|
|
||||||
if: ${{ always() }}
|
|
||||||
name: Integration Test Suite
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [integration-mac-windows, integration-postgres]
|
|
||||||
steps:
|
|
||||||
- name: "Integration Tests Failed"
|
|
||||||
if: ${{ contains(needs.integration-mac-windows.result, 'failure') || contains(needs.integration-mac-windows.result, 'cancelled') || contains(needs.integration-postgres.result, 'failure') || contains(needs.integration-postgres.result, 'cancelled') }}
|
|
||||||
# when this is true the next step won't execute
|
|
||||||
run: |
|
|
||||||
echo "::notice title='Integration test suite failed'"
|
|
||||||
exit 1
|
|
||||||
|
|
||||||
- name: "Integration Tests Passed"
|
|
||||||
run: |
|
|
||||||
echo "::notice title='Integration test suite passed'"
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: build packages
|
name: build packages
|
||||||
@@ -358,18 +122,20 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: 3.8
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
python -m pip install --upgrade hatch twine check-wheel-contents
|
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||||
python -m pip --version
|
pip --version
|
||||||
|
|
||||||
- name: Build distributions
|
- name: Build distributions
|
||||||
run: ./scripts/build-dist.sh
|
run: ./scripts/build-dist.sh
|
||||||
@@ -377,7 +143,65 @@ jobs:
|
|||||||
- name: Show distributions
|
- name: Show distributions
|
||||||
run: ls -lh dist/
|
run: ls -lh dist/
|
||||||
|
|
||||||
- name: Check and verify distributions
|
- name: Check distribution descriptions
|
||||||
run: |
|
run: |
|
||||||
cd core
|
twine check dist/*
|
||||||
hatch run build:check-all
|
|
||||||
|
- name: Check wheel contents
|
||||||
|
run: |
|
||||||
|
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: dist
|
||||||
|
path: dist/
|
||||||
|
|
||||||
|
test-build:
|
||||||
|
name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||||
|
|
||||||
|
needs: build
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
python-version: [3.7, 3.8, 3.9]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
|
run: |
|
||||||
|
pip install --user --upgrade pip
|
||||||
|
pip install --upgrade wheel
|
||||||
|
pip --version
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: dist
|
||||||
|
path: dist/
|
||||||
|
|
||||||
|
- name: Show distributions
|
||||||
|
run: ls -lh dist/
|
||||||
|
|
||||||
|
- name: Install wheel distributions
|
||||||
|
run: |
|
||||||
|
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||||
|
|
||||||
|
- name: Check wheel distributions
|
||||||
|
run: |
|
||||||
|
dbt --version
|
||||||
|
|
||||||
|
- name: Install source distributions
|
||||||
|
# ignore dbt-1.0.0, which intentionally raises an error when installed from source
|
||||||
|
run: |
|
||||||
|
find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||||
|
|
||||||
|
- name: Check source distributions
|
||||||
|
run: |
|
||||||
|
dbt --version
|
||||||
|
|||||||
97
.github/workflows/nightly-release.yml
vendored
97
.github/workflows/nightly-release.yml
vendored
@@ -1,97 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
|
||||||
# - generate and validate data for night release (commit SHA, version number, release branch);
|
|
||||||
# - pass data to release workflow;
|
|
||||||
# - night release will be pushed to GitHub as a draft release;
|
|
||||||
# - night build will be pushed to test PyPI;
|
|
||||||
#
|
|
||||||
# **why?**
|
|
||||||
# Ensure an automated and tested release process for nightly builds
|
|
||||||
#
|
|
||||||
# **when?**
|
|
||||||
# This workflow runs on schedule or can be run manually on demand.
|
|
||||||
|
|
||||||
name: Nightly Test Release to GitHub and PyPI
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch: # for manual triggering
|
|
||||||
schedule:
|
|
||||||
- cron: 0 9 * * *
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write # this is the permission that allows creating a new release
|
|
||||||
packages: write # this is the permission that allows Docker release
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
env:
|
|
||||||
RELEASE_BRANCH: "main"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
aggregate-release-data:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
|
||||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ env.RELEASE_BRANCH }}
|
|
||||||
|
|
||||||
- name: "Get Current Version Number"
|
|
||||||
id: version-number-sources
|
|
||||||
run: |
|
|
||||||
current_version=$(grep '^version = ' core/dbt/__version__.py | sed 's/version = "\(.*\)"/\1/')
|
|
||||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Audit Version And Parse Into Parts"
|
|
||||||
id: semver
|
|
||||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
|
||||||
with:
|
|
||||||
version: ${{ steps.version-number-sources.outputs.current_version }}
|
|
||||||
|
|
||||||
- name: "Get Current Date"
|
|
||||||
id: current-date
|
|
||||||
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Generate Nightly Release Version Number"
|
|
||||||
id: nightly-release-version
|
|
||||||
run: |
|
|
||||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
|
||||||
echo "number=$number" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
|
||||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
|
||||||
with:
|
|
||||||
version: ${{ steps.nightly-release-version.outputs.number }}
|
|
||||||
|
|
||||||
- name: "Set Release Branch"
|
|
||||||
id: release-branch
|
|
||||||
run: |
|
|
||||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
log-outputs-aggregate-release-data:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
needs: [aggregate-release-data]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "[DEBUG] Log Outputs"
|
|
||||||
run: |
|
|
||||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
|
||||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
|
||||||
|
|
||||||
release-github-pypi:
|
|
||||||
needs: [aggregate-release-data]
|
|
||||||
|
|
||||||
uses: ./.github/workflows/release.yml
|
|
||||||
with:
|
|
||||||
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
|
||||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
|
||||||
test_run: true
|
|
||||||
nightly_release: true
|
|
||||||
secrets: inherit
|
|
||||||
176
.github/workflows/performance.yml
vendored
Normal file
176
.github/workflows/performance.yml
vendored
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
name: Performance Regression Tests
|
||||||
|
# Schedule triggers
|
||||||
|
on:
|
||||||
|
# runs twice a day at 10:05am and 10:05pm
|
||||||
|
schedule:
|
||||||
|
- cron: "5 10,22 * * *"
|
||||||
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# checks fmt of runner code
|
||||||
|
# purposefully not a dependency of any other job
|
||||||
|
# will block merging, but not prevent developing
|
||||||
|
fmt:
|
||||||
|
name: Cargo fmt
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- run: rustup component add rustfmt
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: fmt
|
||||||
|
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||||
|
|
||||||
|
# runs any tests associated with the runner
|
||||||
|
# these tests make sure the runner logic is correct
|
||||||
|
test-runner:
|
||||||
|
name: Test Runner
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
# turns errors into warnings
|
||||||
|
RUSTFLAGS: "-D warnings"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --manifest-path performance/runner/Cargo.toml
|
||||||
|
|
||||||
|
# build an optimized binary to be used as the runner in later steps
|
||||||
|
build-runner:
|
||||||
|
needs: [test-runner]
|
||||||
|
name: Build Runner
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
RUSTFLAGS: "-D warnings"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: build
|
||||||
|
args: --release --manifest-path performance/runner/Cargo.toml
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: runner
|
||||||
|
path: performance/runner/target/release/runner
|
||||||
|
|
||||||
|
# run the performance measurements on the current or default branch
|
||||||
|
measure-dev:
|
||||||
|
needs: [build-runner]
|
||||||
|
name: Measure Dev Branch
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: checkout dev
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
- name: install dbt
|
||||||
|
run: pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||||
|
- name: install hyperfine
|
||||||
|
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: runner
|
||||||
|
- name: change permissions
|
||||||
|
run: chmod +x ./runner
|
||||||
|
- name: run
|
||||||
|
run: ./runner measure -b dev -p ${{ github.workspace }}/performance/projects/
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: dev-results
|
||||||
|
path: performance/results/
|
||||||
|
|
||||||
|
# run the performance measurements on the release branch which we use
|
||||||
|
# as a performance baseline. This part takes by far the longest, so
|
||||||
|
# we do everything we can first so the job fails fast.
|
||||||
|
# -----
|
||||||
|
# we need to checkout dbt twice in this job: once for the baseline dbt
|
||||||
|
# version, and once to get the latest regression testing projects,
|
||||||
|
# metrics, and runner code from the develop or current branch so that
|
||||||
|
# the calculations match for both versions of dbt we are comparing.
|
||||||
|
measure-baseline:
|
||||||
|
needs: [build-runner]
|
||||||
|
name: Measure Baseline Branch
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: checkout latest
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
ref: "0.20.latest"
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
- name: move repo up a level
|
||||||
|
run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline
|
||||||
|
- name: "[debug] ls new dbt location"
|
||||||
|
run: ls ${{ github.workspace }}/../baseline/dbt/
|
||||||
|
# installation creates egg-links so we have to preserve source
|
||||||
|
- name: install dbt from new location
|
||||||
|
run: cd ${{ github.workspace }}/../baseline/dbt/ && pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||||
|
# checkout the current branch to get all the target projects
|
||||||
|
# this deletes the old checked out code which is why we had to copy before
|
||||||
|
- name: checkout dev
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: install hyperfine
|
||||||
|
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: runner
|
||||||
|
- name: change permissions
|
||||||
|
run: chmod +x ./runner
|
||||||
|
- name: run runner
|
||||||
|
run: ./runner measure -b baseline -p ${{ github.workspace }}/performance/projects/
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: baseline-results
|
||||||
|
path: performance/results/
|
||||||
|
|
||||||
|
# detect regressions on the output generated from measuring
|
||||||
|
# the two branches. Exits with non-zero code if a regression is detected.
|
||||||
|
calculate-regressions:
|
||||||
|
needs: [measure-dev, measure-baseline]
|
||||||
|
name: Compare Results
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: dev-results
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: baseline-results
|
||||||
|
- name: "[debug] ls result files"
|
||||||
|
run: ls
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: runner
|
||||||
|
- name: change permissions
|
||||||
|
run: chmod +x ./runner
|
||||||
|
- name: make results directory
|
||||||
|
run: mkdir ./final-output/
|
||||||
|
- name: run calculation
|
||||||
|
run: ./runner calculate -r ./ -o ./final-output/
|
||||||
|
# always attempt to upload the results even if there were regressions found
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
if: ${{ always() }}
|
||||||
|
with:
|
||||||
|
name: final-calculations
|
||||||
|
path: ./final-output/*
|
||||||
31
.github/workflows/release-branch-tests.yml
vendored
31
.github/workflows/release-branch-tests.yml
vendored
@@ -1,31 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# The purpose of this workflow is to trigger CI to run for each
|
|
||||||
# release branch and main branch on a regular cadence. If the CI workflow
|
|
||||||
# fails for a branch, it will post to #dev-core-alerts to raise awareness.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Ensures release branches and main are always shippable and not broken.
|
|
||||||
# Also, can catch any dependencies shifting beneath us that might
|
|
||||||
# introduce breaking changes (could also impact Cloud).
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# Mainly on a schedule of 9:00, 13:00, 18:00 UTC everyday.
|
|
||||||
# Manual trigger can also test on demand
|
|
||||||
|
|
||||||
name: Release branch scheduled testing
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 9,13,18 * * *' # 9:00, 13:00, 18:00 UTC
|
|
||||||
|
|
||||||
workflow_dispatch: # for manual triggering
|
|
||||||
|
|
||||||
# no special access is needed
|
|
||||||
permissions: read-all
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run_tests:
|
|
||||||
uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main
|
|
||||||
with:
|
|
||||||
workflows_to_run: '["main.yml"]'
|
|
||||||
secrets: inherit
|
|
||||||
411
.github/workflows/release.yml
vendored
411
.github/workflows/release.yml
vendored
@@ -1,281 +1,200 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Release workflow provides the following steps:
|
# Take the given commit, run unit tests specifically on that sha, build and
|
||||||
# - checkout the given commit;
|
# package it, and then release to GitHub and PyPi with that specific build
|
||||||
# - validate version in sources and changelog file for given version;
|
|
||||||
# - bump the version and generate a changelog if needed;
|
|
||||||
# - merge all changes to the target branch if needed;
|
|
||||||
# - run unit and integration tests against given commit;
|
|
||||||
# - build and package that SHA;
|
|
||||||
# - release it to GitHub and PyPI with that specific build;
|
|
||||||
# - release it to Docker
|
|
||||||
#
|
|
||||||
# **why?**
|
# **why?**
|
||||||
# Ensure an automated and tested release process
|
# Ensure an automated and tested release process
|
||||||
#
|
|
||||||
# **when?**
|
|
||||||
# This workflow can be run manually on demand or can be called by other workflows
|
|
||||||
|
|
||||||
name: "Release to GitHub, PyPI & Docker"
|
# **when?**
|
||||||
run-name: "Release ${{ inputs.version_number }} to GitHub, PyPI & Docker"
|
# This will only run manually with a given sha and version
|
||||||
|
|
||||||
|
name: Release to GitHub and PyPi
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
target_branch:
|
sha:
|
||||||
description: "The branch to release from"
|
description: 'The last commit sha in the release'
|
||||||
type: string
|
required: true
|
||||||
required: true
|
|
||||||
version_number:
|
version_number:
|
||||||
description: "The release version number (i.e. 1.0.0b1)"
|
description: 'The release version number (i.e. 1.0.0b1)'
|
||||||
type: string
|
required: true
|
||||||
required: true
|
|
||||||
test_run:
|
|
||||||
description: "Test run (Publish release as draft)"
|
|
||||||
type: boolean
|
|
||||||
default: true
|
|
||||||
required: false
|
|
||||||
nightly_release:
|
|
||||||
description: "Nightly release to dev environment"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
required: false
|
|
||||||
only_docker:
|
|
||||||
description: "Only release Docker image, skip GitHub & PyPI"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
required: false
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
target_branch:
|
|
||||||
description: "The branch to release from"
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
version_number:
|
|
||||||
description: "The release version number (i.e. 1.0.0b1)"
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
test_run:
|
|
||||||
description: "Test run (Publish release as draft)"
|
|
||||||
type: boolean
|
|
||||||
default: true
|
|
||||||
required: false
|
|
||||||
nightly_release:
|
|
||||||
description: "Nightly release to dev environment"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
required: false
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write # this is the permission that allows creating a new release
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
env:
|
|
||||||
MIN_HATCH_VERSION: "1.11.0"
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
job-setup:
|
unit:
|
||||||
name: Log Inputs
|
name: Unit test
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
outputs:
|
|
||||||
use_hatch: ${{ steps.use_hatch.outputs.use_hatch }}
|
|
||||||
steps:
|
|
||||||
- name: "[DEBUG] Print Variables"
|
|
||||||
run: |
|
|
||||||
echo Inputs
|
|
||||||
echo The branch to release from: ${{ inputs.target_branch }}
|
|
||||||
echo The release version number: ${{ inputs.version_number }}
|
|
||||||
echo Test run: ${{ inputs.test_run }}
|
|
||||||
echo Nightly release: ${{ inputs.nightly_release }}
|
|
||||||
echo Only Docker: ${{ inputs.only_docker }}
|
|
||||||
|
|
||||||
# In version env.HATCH_VERSION we started to use hatch for build tooling. Before that we used setuptools.
|
runs-on: ubuntu-latest
|
||||||
# This needs to check if we're using hatch or setuptools based on the version being released. We should
|
|
||||||
# check if the version is greater than or equal to env.HATCH_VERSION. If it is, we use hatch, otherwise we use setuptools.
|
|
||||||
- name: "Check if using hatch"
|
|
||||||
id: use_hatch
|
|
||||||
run: |
|
|
||||||
# Extract major.minor from versions like 1.11.0a1 -> 1.11
|
|
||||||
INPUT_MAJ_MIN=$(echo "${{ inputs.version_number }}" | sed -E 's/^([0-9]+\.[0-9]+).*/\1/')
|
|
||||||
HATCH_MAJ_MIN=$(echo "${{ env.MIN_HATCH_VERSION }}" | sed -E 's/^([0-9]+\.[0-9]+).*/\1/')
|
|
||||||
|
|
||||||
if [ $(echo "$INPUT_MAJ_MIN >= $HATCH_MAJ_MIN" | bc) -eq 1 ]; then
|
env:
|
||||||
echo "use_hatch=true" >> $GITHUB_OUTPUT
|
TOXENV: "unit"
|
||||||
else
|
|
||||||
echo "use_hatch=false" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: "Notify if using hatch"
|
|
||||||
run: |
|
|
||||||
if [ ${{ steps.use_hatch.outputs.use_hatch }} = "true" ]; then
|
|
||||||
echo "::notice title="Using Hatch": $title::Using Hatch for release"
|
|
||||||
else
|
|
||||||
echo "::notice title="Using Setuptools": $title::Using Setuptools for release"
|
|
||||||
fi
|
|
||||||
|
|
||||||
bump-version-generate-changelog:
|
|
||||||
name: Bump package version, Generate changelog
|
|
||||||
needs: [job-setup]
|
|
||||||
if: ${{ !inputs.only_docker }}
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
|
||||||
|
|
||||||
with:
|
|
||||||
version_number: ${{ inputs.version_number }}
|
|
||||||
hatch_directory: "core"
|
|
||||||
target_branch: ${{ inputs.target_branch }}
|
|
||||||
env_setup_script_path: "scripts/env-setup.sh"
|
|
||||||
test_run: ${{ inputs.test_run }}
|
|
||||||
nightly_release: ${{ inputs.nightly_release }}
|
|
||||||
use_hatch: ${{ needs.job-setup.outputs.use_hatch == 'true' }} # workflow outputs are strings...
|
|
||||||
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
log-outputs-bump-version-generate-changelog:
|
|
||||||
name: "[Log output] Bump package version, Generate changelog"
|
|
||||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
|
||||||
|
|
||||||
needs: [bump-version-generate-changelog]
|
|
||||||
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Print variables
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
ref: ${{ github.event.inputs.sha }}
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
pip install --user --upgrade pip
|
||||||
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
pip install tox
|
||||||
|
pip --version
|
||||||
|
tox --version
|
||||||
|
|
||||||
build-test-package:
|
- name: Run tox
|
||||||
name: Build, Test, Package
|
run: tox
|
||||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
|
||||||
needs: [job-setup, bump-version-generate-changelog]
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
build:
|
||||||
|
name: build packages
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
ref: ${{ github.event.inputs.sha }}
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
|
run: |
|
||||||
|
pip install --user --upgrade pip
|
||||||
|
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||||
|
pip --version
|
||||||
|
|
||||||
|
- name: Build distributions
|
||||||
|
run: ./scripts/build-dist.sh
|
||||||
|
|
||||||
|
- name: Show distributions
|
||||||
|
run: ls -lh dist/
|
||||||
|
|
||||||
|
- name: Check distribution descriptions
|
||||||
|
run: |
|
||||||
|
twine check dist/*
|
||||||
|
|
||||||
|
- name: Check wheel contents
|
||||||
|
run: |
|
||||||
|
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: dist
|
||||||
|
path: |
|
||||||
|
dist/
|
||||||
|
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
||||||
|
|
||||||
|
test-build:
|
||||||
|
name: verify packages
|
||||||
|
|
||||||
|
needs: [build, unit]
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
|
run: |
|
||||||
|
pip install --user --upgrade pip
|
||||||
|
pip install --upgrade wheel
|
||||||
|
pip --version
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: dist
|
||||||
|
path: dist/
|
||||||
|
|
||||||
|
- name: Show distributions
|
||||||
|
run: ls -lh dist/
|
||||||
|
|
||||||
|
- name: Install wheel distributions
|
||||||
|
run: |
|
||||||
|
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||||
|
|
||||||
|
- name: Check wheel distributions
|
||||||
|
run: |
|
||||||
|
dbt --version
|
||||||
|
|
||||||
|
- name: Install source distributions
|
||||||
|
run: |
|
||||||
|
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||||
|
|
||||||
|
- name: Check source distributions
|
||||||
|
run: |
|
||||||
|
dbt --version
|
||||||
|
|
||||||
with:
|
|
||||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
|
||||||
version_number: ${{ inputs.version_number }}
|
|
||||||
hatch_directory: "core"
|
|
||||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
|
||||||
build_script_path: "scripts/build-dist.sh"
|
|
||||||
package_test_command: "dbt --version"
|
|
||||||
test_run: ${{ inputs.test_run }}
|
|
||||||
nightly_release: ${{ inputs.nightly_release }}
|
|
||||||
use_hatch: ${{ needs.job-setup.outputs.use_hatch == 'true' }} # workflow outputs are strings...
|
|
||||||
|
|
||||||
github-release:
|
github-release:
|
||||||
name: GitHub Release
|
name: GitHub Release
|
||||||
if: ${{ !failure() && !cancelled() && !inputs.only_docker }}
|
|
||||||
|
|
||||||
needs: [bump-version-generate-changelog, build-test-package]
|
needs: test-build
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
with:
|
|
||||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
|
||||||
version_number: ${{ inputs.version_number }}
|
|
||||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
|
||||||
test_run: ${{ inputs.test_run }}
|
|
||||||
|
|
||||||
pypi-release:
|
|
||||||
name: PyPI Release
|
|
||||||
|
|
||||||
needs: [github-release]
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
|
||||||
|
|
||||||
with:
|
|
||||||
version_number: ${{ inputs.version_number }}
|
|
||||||
test_run: ${{ inputs.test_run }}
|
|
||||||
|
|
||||||
secrets:
|
|
||||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
|
||||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
|
||||||
|
|
||||||
determine-docker-package:
|
|
||||||
# dbt-postgres exists within dbt-core for versions 1.7 and earlier but is a separate package for 1.8 and later.
|
|
||||||
# determine if we need to release dbt-core or both dbt-core and dbt-postgres
|
|
||||||
name: Determine Docker Package
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
needs: [pypi-release]
|
|
||||||
outputs:
|
|
||||||
matrix: ${{ steps.determine-docker-package.outputs.matrix }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Audit Version And Parse Into Parts"
|
- uses: actions/download-artifact@v2
|
||||||
id: semver
|
|
||||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
|
||||||
with:
|
with:
|
||||||
version: ${{ inputs.version_number }}
|
name: dist
|
||||||
|
path: '.'
|
||||||
- name: "Determine Packages to Release"
|
|
||||||
id: determine-docker-package
|
# Need to set an output variable because env variables can't be taken as input
|
||||||
|
# This is needed for the next step with releasing to GitHub
|
||||||
|
- name: Find release type
|
||||||
|
id: release_type
|
||||||
|
env:
|
||||||
|
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||||
run: |
|
run: |
|
||||||
if [ ${{ steps.semver.outputs.minor }} -ge 8 ]; then
|
echo ::set-output name=isPrerelease::$IS_PRERELEASE
|
||||||
json_output={\"package\":[\"dbt-core\"]}
|
|
||||||
else
|
|
||||||
json_output={\"package\":[\"dbt-core\",\"dbt-postgres\"]}
|
|
||||||
fi
|
|
||||||
echo "matrix=$json_output" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
docker-release:
|
- name: Creating GitHub Release
|
||||||
name: "Docker Release for ${{ matrix.package }}"
|
uses: softprops/action-gh-release@v1
|
||||||
needs: [determine-docker-package]
|
with:
|
||||||
# We cannot release to docker on a test run because it uses the tag in GitHub as
|
name: dbt-core v${{github.event.inputs.version_number}}
|
||||||
# what we need to release but draft releases don't actually tag the commit so it
|
tag_name: v${{github.event.inputs.version_number}}
|
||||||
# finds nothing to release
|
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
||||||
if: ${{ !failure() && !cancelled() && (!inputs.test_run || inputs.only_docker) }}
|
target_commitish: ${{github.event.inputs.sha}}
|
||||||
strategy:
|
body: |
|
||||||
matrix: ${{fromJson(needs.determine-docker-package.outputs.matrix)}}
|
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
||||||
|
files: |
|
||||||
|
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||||
|
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||||
|
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
||||||
|
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
||||||
|
|
||||||
|
pypi-release:
|
||||||
|
name: Pypi release
|
||||||
|
|
||||||
permissions:
|
runs-on: ubuntu-latest
|
||||||
packages: write
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main
|
needs: github-release
|
||||||
with:
|
|
||||||
package: ${{ matrix.package }}
|
|
||||||
version_number: ${{ inputs.version_number }}
|
|
||||||
test_run: ${{ inputs.test_run }}
|
|
||||||
|
|
||||||
slack-notification:
|
environment: PypiProd
|
||||||
name: Slack Notification
|
steps:
|
||||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
needs:
|
name: dist
|
||||||
[
|
path: 'dist'
|
||||||
bump-version-generate-changelog,
|
|
||||||
build-test-package,
|
- name: Publish distribution to PyPI
|
||||||
github-release,
|
uses: pypa/gh-action-pypi-publish@v1.4.2
|
||||||
pypi-release,
|
with:
|
||||||
docker-release,
|
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||||
]
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
|
||||||
with:
|
|
||||||
status: "failure"
|
|
||||||
|
|
||||||
secrets:
|
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
|
||||||
|
|
||||||
testing-slack-notification:
|
|
||||||
# sends notifications to #slackbot-test
|
|
||||||
name: Testing - Slack Notification
|
|
||||||
if: ${{ failure() && inputs.test_run && !inputs.nightly_release }}
|
|
||||||
|
|
||||||
needs:
|
|
||||||
[
|
|
||||||
bump-version-generate-changelog,
|
|
||||||
build-test-package,
|
|
||||||
github-release,
|
|
||||||
pypi-release,
|
|
||||||
docker-release,
|
|
||||||
]
|
|
||||||
|
|
||||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
|
||||||
with:
|
|
||||||
status: "failure"
|
|
||||||
|
|
||||||
secrets:
|
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }}
|
|
||||||
|
|||||||
30
.github/workflows/repository-cleanup.yml
vendored
30
.github/workflows/repository-cleanup.yml
vendored
@@ -1,30 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# Cleanup branches left over from automation and testing. Also cleanup
|
|
||||||
# draft releases from release testing.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# The automations are leaving behind branches and releases that clutter
|
|
||||||
# the repository. Sometimes we need them to debug processes so we don't
|
|
||||||
# want them immediately deleted. Running on Saturday to avoid running
|
|
||||||
# at the same time as an actual release to prevent breaking a release
|
|
||||||
# mid-release.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# Mainly on a schedule of 12:00 Saturday.
|
|
||||||
# Manual trigger can also run on demand
|
|
||||||
|
|
||||||
name: Repository Cleanup
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above
|
|
||||||
|
|
||||||
workflow_dispatch: # for manual triggering
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cleanup-repo:
|
|
||||||
uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main
|
|
||||||
secrets: inherit
|
|
||||||
104
.github/workflows/schema-check.yml
vendored
104
.github/workflows/schema-check.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
# **what?**
|
# **what?**
|
||||||
# Compares the schema of the dbt version of the given ref vs
|
# Compares the schema of the dbt version of the given ref vs
|
||||||
# the latest official schema releases found in schemas.getdbt.com.
|
# the latest official schema releases found in schemas.getdbt.com.
|
||||||
# If there are differences, the workflow will fail and upload the
|
# If there are differences, the workflow will fail and upload the
|
||||||
# diff as an artifact. The metadata team should be alerted to the change.
|
# diff as an artifact. The metadata team should be alerted to the change.
|
||||||
@@ -9,93 +9,79 @@
|
|||||||
# occur so we want to proactively alert to it.
|
# occur so we want to proactively alert to it.
|
||||||
#
|
#
|
||||||
# **when?**
|
# **when?**
|
||||||
# Only can be run manually
|
# On pushes to `develop` and release branches. Manual runs are also enabled.
|
||||||
name: Artifact Schema Check
|
name: Artifact Schema Check
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# pull_request:
|
|
||||||
# types: [ opened, reopened, labeled, unlabeled, synchronize ]
|
|
||||||
# paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ]
|
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
pull_request: #TODO: remove before merging
|
||||||
target_branch:
|
push:
|
||||||
description: "The branch to check against"
|
branches:
|
||||||
type: string
|
- "develop"
|
||||||
default: "main"
|
- "*.latest"
|
||||||
required: true
|
- "releases/*"
|
||||||
|
|
||||||
# no special access is needed
|
|
||||||
permissions: read-all
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas
|
||||||
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}/schema_changes.txt
|
SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt
|
||||||
DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt
|
DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt
|
||||||
SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com
|
SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
checking-schemas:
|
checking-schemas:
|
||||||
name: "Post-merge schema changes required"
|
name: "Checking schemas"
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: 3.8
|
||||||
|
|
||||||
- name: Checkout dbt repo
|
- name: Checkout dbt repo
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@v2.3.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||||
ref: ${{ inputs.target_branch }}
|
|
||||||
|
|
||||||
- name: Check for changes in core/dbt/artifacts
|
|
||||||
# https://github.com/marketplace/actions/paths-changes-filter
|
|
||||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # dorny/paths-filter@v3
|
|
||||||
id: check_artifact_changes
|
|
||||||
with:
|
|
||||||
filters: |
|
|
||||||
artifacts_changed:
|
|
||||||
- 'core/dbt/artifacts/**'
|
|
||||||
list-files: shell
|
|
||||||
working-directory: ${{ env.DBT_REPO_DIRECTORY }}
|
|
||||||
|
|
||||||
- name: Succeed if no artifacts have changed
|
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'false'
|
|
||||||
run: |
|
|
||||||
echo "No artifact changes found in core/dbt/artifacts. CI check passed."
|
|
||||||
|
|
||||||
- name: Checkout schemas.getdbt.com repo
|
- name: Checkout schemas.getdbt.com repo
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
uses: actions/checkout@v2.3.4
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
with:
|
||||||
with:
|
|
||||||
repository: dbt-labs/schemas.getdbt.com
|
repository: dbt-labs/schemas.getdbt.com
|
||||||
ref: "main"
|
ref: 'main'
|
||||||
|
ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }}
|
||||||
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
path: ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||||
|
|
||||||
- name: Generate current schema
|
- name: Generate current schema
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
cd ${{ env.DBT_REPO_DIRECTORY }}/core
|
cd ${{ env.DBT_REPO_DIRECTORY }}
|
||||||
pip install --upgrade pip hatch
|
python3 -m venv env
|
||||||
hatch run setup
|
source env/bin/activate
|
||||||
hatch run json-schema -- --path ${{ env.LATEST_SCHEMA_PATH }}
|
pip install --upgrade pip
|
||||||
|
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||||
|
python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }}
|
||||||
|
|
||||||
# Copy generated schema files into the schemas.getdbt.com repo
|
# Copy generated schema files into the schemas.getdbt.com repo
|
||||||
# Do a git diff to find any changes
|
# Do a git diff to find any changes
|
||||||
# Ignore any lines with date-like (yyyy-mm-dd) or version-like (x.y.z) changes
|
# Ignore any date or version changes though
|
||||||
- name: Compare schemas
|
- name: Compare schemas
|
||||||
if: steps.check_artifact_changes.outputs.artifacts_changed == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }}
|
cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||||
cd ${{ env.SCHEMA_REPO_DIRECTORY }}
|
cd ${{ env.SCHEMA_REPO_DIRECTORY }}
|
||||||
git diff -I='*[0-9]{4}-[0-9]{2}-[0-9]{2}' -I='*[0-9]+\.[0-9]+\.[0-9]+' --exit-code > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||||
|
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary)
|
||||||
|
if [[ $(echo diff_results) ]]; then
|
||||||
|
echo $diff_results
|
||||||
|
echo "Schema changes detected!"
|
||||||
|
git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \
|
||||||
|
-I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }}
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "No schema changes detected"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Upload schema diff
|
- name: Upload schema diff
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # actions/upload-artifact@v4
|
uses: actions/upload-artifact@v2.2.4
|
||||||
if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }}
|
if: ${{ failure() }}
|
||||||
with:
|
with:
|
||||||
name: "schema_changes.txt"
|
name: 'schema_schanges.txt'
|
||||||
path: "${{ env.SCHEMA_DIFF_ARTIFACT }}"
|
path: '${{ env.SCHEMA_DIFF_ARTIFACT }}'
|
||||||
|
|||||||
16
.github/workflows/stale.yml
vendored
16
.github/workflows/stale.yml
vendored
@@ -3,10 +3,16 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: "30 1 * * *"
|
- cron: "30 1 * * *"
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
||||||
|
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
||||||
|
with:
|
||||||
|
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||||
|
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||||
|
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||||
|
days-before-stale: 180
|
||||||
|
# ignore checking issues with the following labels
|
||||||
|
exempt-issue-labels: "epic,discussion"
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
# This Action checks makes a dbt run to sample json structured logs
|
# This Action checks makes a dbt run to sample json structured logs
|
||||||
# and checks that they conform to the currently documented schema.
|
# and checks that they conform to the currently documented schema.
|
||||||
#
|
#
|
||||||
# If this action fails it either means we have unintentionally deviated
|
# If this action fails it either means we have unintentionally deviated
|
||||||
# from our documented structured logging schema, or we need to bump the
|
# from our documented structured logging schema, or we need to bump the
|
||||||
# version of our structured logging and add new documentation to
|
# version of our structured logging and add new documentation to
|
||||||
# communicate these changes.
|
# communicate these changes.
|
||||||
|
|
||||||
|
|
||||||
name: Structured Logging Schema Check
|
name: Structured Logging Schema Check
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -14,104 +15,45 @@ on:
|
|||||||
- "*.latest"
|
- "*.latest"
|
||||||
- "releases/*"
|
- "releases/*"
|
||||||
pull_request:
|
pull_request:
|
||||||
merge_group:
|
|
||||||
types: [checks_requested]
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
# top-level adjustments can be made here
|
|
||||||
env:
|
|
||||||
# number of parallel processes to spawn for python testing
|
|
||||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
integration-metadata:
|
|
||||||
name: integration test metadata generation
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: generate split-groups
|
|
||||||
id: generate-split-groups
|
|
||||||
run: |
|
|
||||||
MATRIX_JSON="["
|
|
||||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
|
||||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
|
||||||
done
|
|
||||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
|
||||||
MATRIX_JSON+="]"
|
|
||||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# run the performance measurements on the current or default branch
|
# run the performance measurements on the current or default branch
|
||||||
test-schema:
|
test-schema:
|
||||||
name: Test Log Schema
|
name: Test Log Schema
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
|
||||||
needs:
|
|
||||||
- integration-metadata
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
|
||||||
env:
|
env:
|
||||||
# turns warnings into errors
|
# turns warnings into errors
|
||||||
RUSTFLAGS: "-D warnings"
|
RUSTFLAGS: "-D warnings"
|
||||||
# points tests to the log file
|
# points tests to the log file
|
||||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||||
# tells integration tests to output into json format
|
# tells integration tests to output into json format
|
||||||
DBT_LOG_FORMAT: "json"
|
DBT_LOG_FORMAT: 'json'
|
||||||
# tell eventmgr to convert logging events into bytes
|
|
||||||
DBT_TEST_BINARY_SERIALIZATION: "true"
|
|
||||||
# Additional test users
|
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
|
||||||
DBT_TEST_USER_3: dbt_test_user_3
|
|
||||||
|
|
||||||
services:
|
|
||||||
# Label used to access the service container
|
|
||||||
postgres:
|
|
||||||
# Docker Hub image
|
|
||||||
image: postgres
|
|
||||||
# Provide the password for postgres
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: password
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
# Set health checks to wait until postgres has started
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: checkout dev
|
- name: checkout dev
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
uses: actions/setup-python@v2.2.2
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.8"
|
||||||
|
|
||||||
- name: Install python dependencies
|
- uses: actions-rs/toolchain@v1
|
||||||
run: |
|
with:
|
||||||
pip install --user --upgrade pip
|
profile: minimal
|
||||||
pip --version
|
toolchain: stable
|
||||||
pip install hatch
|
override: true
|
||||||
hatch --version
|
|
||||||
|
|
||||||
- name: Run postgres setup script
|
- name: install dbt
|
||||||
run: |
|
run: pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||||
./scripts/setup_db.sh
|
|
||||||
env:
|
- name: Set up postgres
|
||||||
PGHOST: localhost
|
uses: ./.github/actions/setup-postgres-linux
|
||||||
PGPORT: 5432
|
|
||||||
PGPASSWORD: password
|
|
||||||
|
|
||||||
- name: ls
|
- name: ls
|
||||||
run: ls
|
run: ls
|
||||||
@@ -119,19 +61,11 @@ jobs:
|
|||||||
# integration tests generate a ton of logs in different files. the next step will find them all.
|
# integration tests generate a ton of logs in different files. the next step will find them all.
|
||||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
run: tox -e py38-postgres -- -nauto
|
||||||
with:
|
|
||||||
timeout_minutes: 30
|
|
||||||
max_attempts: 3
|
|
||||||
command: cd core && hatch run ci:integration-tests -- -nauto
|
|
||||||
env:
|
|
||||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
|
||||||
|
|
||||||
test-schema-report:
|
# apply our schema tests to every log event from the previous step
|
||||||
name: Log Schema Test Suite
|
# skips any output that isn't valid json
|
||||||
runs-on: ubuntu-latest
|
- uses: actions-rs/cargo@v1
|
||||||
needs: test-schema
|
with:
|
||||||
steps:
|
command: run
|
||||||
- name: "[Notification] Log test suite passes"
|
args: --manifest-path test/interop/log_parsing/Cargo.toml
|
||||||
run: |
|
|
||||||
echo "::notice title="Log test suite passes""
|
|
||||||
|
|||||||
164
.github/workflows/test-repeater.yml
vendored
164
.github/workflows/test-repeater.yml
vendored
@@ -1,164 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# This workflow will test all test(s) at the input path given number of times to determine if it's flaky or not. You can test with any supported OS/Python combination.
|
|
||||||
# This is batched in 10 to allow more test iterations faster.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# Testing if a test is flaky and if a previously flaky test has been fixed. This allows easy testing on supported python versions and OS combinations.
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This is triggered manually from dbt-core.
|
|
||||||
|
|
||||||
name: Flaky Tester
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
branch:
|
|
||||||
description: "Branch to check out"
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: "main"
|
|
||||||
test_path:
|
|
||||||
description: "Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)"
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
default: "tests/functional/..."
|
|
||||||
python_version:
|
|
||||||
description: "Version of Python to Test Against"
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- "3.10"
|
|
||||||
- "3.11"
|
|
||||||
os:
|
|
||||||
description: "OS to run test in"
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- "ubuntu-latest"
|
|
||||||
- "macos-14"
|
|
||||||
- "windows-latest"
|
|
||||||
num_runs_per_batch:
|
|
||||||
description: "Max number of times to run the test per batch. We always run 10 batches."
|
|
||||||
type: number
|
|
||||||
required: true
|
|
||||||
default: "50"
|
|
||||||
|
|
||||||
permissions: read-all
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
debug:
|
|
||||||
runs-on: ${{ vars.UBUNTU_LATEST }}
|
|
||||||
steps:
|
|
||||||
- name: "[DEBUG] Output Inputs"
|
|
||||||
run: |
|
|
||||||
echo "Branch: ${{ inputs.branch }}"
|
|
||||||
echo "test_path: ${{ inputs.test_path }}"
|
|
||||||
echo "python_version: ${{ inputs.python_version }}"
|
|
||||||
echo "os: ${{ inputs.os }}"
|
|
||||||
echo "num_runs_per_batch: ${{ inputs.num_runs_per_batch }}"
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
runs-on: ${{ inputs.os }}
|
|
||||||
strategy:
|
|
||||||
# run all batches, even if one fails. This informs how flaky the test may be.
|
|
||||||
fail-fast: false
|
|
||||||
# using a matrix to speed up the jobs since the matrix will run in parallel when runners are available
|
|
||||||
matrix:
|
|
||||||
batch: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
|
|
||||||
env:
|
|
||||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
|
||||||
DBT_TEST_USER_3: dbt_test_user_3
|
|
||||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
|
||||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
|
||||||
DD_SITE: datadoghq.com
|
|
||||||
DD_ENV: ci
|
|
||||||
DD_SERVICE: ${{ github.event.repository.name }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Checkout code"
|
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.branch }}
|
|
||||||
|
|
||||||
- name: "Setup Python"
|
|
||||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: "${{ inputs.python_version }}"
|
|
||||||
|
|
||||||
- name: "Install hatch"
|
|
||||||
run: python -m pip install --user --upgrade pip hatch
|
|
||||||
|
|
||||||
- name: "Setup Dev Environment"
|
|
||||||
run: |
|
|
||||||
cd core
|
|
||||||
hatch run setup
|
|
||||||
|
|
||||||
- name: "Set up postgres (linux)"
|
|
||||||
if: inputs.os == '${{ vars.UBUNTU_LATEST }}'
|
|
||||||
run: |
|
|
||||||
cd core
|
|
||||||
hatch run setup-db
|
|
||||||
|
|
||||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
|
||||||
- name: Set up postgres (macos)
|
|
||||||
if: runner.os == 'macOS'
|
|
||||||
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # nick-fields/retry@v3
|
|
||||||
with:
|
|
||||||
timeout_minutes: 10
|
|
||||||
max_attempts: 3
|
|
||||||
command: ./scripts/setup_db.sh
|
|
||||||
|
|
||||||
- name: "Set up postgres (windows)"
|
|
||||||
if: inputs.os == 'windows-latest'
|
|
||||||
uses: ./.github/actions/setup-postgres-windows
|
|
||||||
|
|
||||||
- name: "Test Command"
|
|
||||||
id: command
|
|
||||||
run: |
|
|
||||||
test_command="python -m pytest ${{ inputs.test_path }}"
|
|
||||||
echo "test_command=$test_command" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Run test ${{ inputs.num_runs_per_batch }} times"
|
|
||||||
id: pytest
|
|
||||||
run: |
|
|
||||||
set +e
|
|
||||||
for ((i=1; i<=${{ inputs.num_runs_per_batch }}; i++))
|
|
||||||
do
|
|
||||||
echo "Running pytest iteration $i..."
|
|
||||||
python -m pytest --ddtrace ${{ inputs.test_path }}
|
|
||||||
exit_code=$?
|
|
||||||
|
|
||||||
if [[ $exit_code -eq 0 ]]; then
|
|
||||||
success=$((success + 1))
|
|
||||||
echo "Iteration $i: Success"
|
|
||||||
else
|
|
||||||
failure=$((failure + 1))
|
|
||||||
echo "Iteration $i: Failure"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "==========================="
|
|
||||||
echo "Successful runs: $success"
|
|
||||||
echo "Failed runs: $failure"
|
|
||||||
echo "==========================="
|
|
||||||
echo
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "failure=$failure" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Success and Failure Summary: ${{ inputs.os }}/Python ${{ inputs.python_version }}"
|
|
||||||
run: |
|
|
||||||
echo "Batch: ${{ matrix.batch }}"
|
|
||||||
echo "Successful runs: ${{ steps.pytest.outputs.success }}"
|
|
||||||
echo "Failed runs: ${{ steps.pytest.outputs.failure }}"
|
|
||||||
|
|
||||||
- name: "Error for Failures"
|
|
||||||
if: ${{ steps.pytest.outputs.failure }}
|
|
||||||
run: |
|
|
||||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
|
||||||
exit 1
|
|
||||||
31
.github/workflows/triage-labels.yml
vendored
31
.github/workflows/triage-labels.yml
vendored
@@ -1,31 +0,0 @@
|
|||||||
# **what?**
|
|
||||||
# When the core team triages, we sometimes need more information from the issue creator. In
|
|
||||||
# those cases we remove the `triage` label and add the `awaiting_response` label. Once we
|
|
||||||
# recieve a response in the form of a comment, we want the `awaiting_response` label removed
|
|
||||||
# in favor of the `triage` label so we are aware that the issue needs action.
|
|
||||||
|
|
||||||
# **why?**
|
|
||||||
# To help with out team triage issue tracking
|
|
||||||
|
|
||||||
# **when?**
|
|
||||||
# This will run when a comment is added to an issue and that issue has to `awaiting_response` label.
|
|
||||||
|
|
||||||
name: Update Triage Label
|
|
||||||
|
|
||||||
on: issue_comment
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
triage_label:
|
|
||||||
if: contains(github.event.issue.labels.*.name, 'awaiting_response')
|
|
||||||
uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main
|
|
||||||
with:
|
|
||||||
add_label: "triage"
|
|
||||||
remove_label: "awaiting_response"
|
|
||||||
secrets: inherit
|
|
||||||
109
.github/workflows/version-bump.yml
vendored
Normal file
109
.github/workflows/version-bump.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
# **what?**
|
||||||
|
# This workflow will take a version number and a dry run flag. With that
|
||||||
|
# it will run versionbump to update the version number everywhere in the
|
||||||
|
# code base and then generate an update Docker requirements file. If this
|
||||||
|
# is a dry run, a draft PR will open with the changes. If this isn't a dry
|
||||||
|
# run, the changes will be committed to the branch this is run on.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# This is to aid in releasing dbt and making sure we have updated
|
||||||
|
# the versions and Docker requirements in all places.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# This is triggered either manually OR
|
||||||
|
# from the repository_dispatch event "version-bump" which is sent from
|
||||||
|
# the dbt-release repo Action
|
||||||
|
|
||||||
|
name: Version Bump
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version_number:
|
||||||
|
description: 'The version number to bump to'
|
||||||
|
required: true
|
||||||
|
is_dry_run:
|
||||||
|
description: 'Creates a draft PR to allow testing instead of committing to a branch'
|
||||||
|
required: true
|
||||||
|
default: 'true'
|
||||||
|
repository_dispatch:
|
||||||
|
types: [version-bump]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
bump:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Set version and dry run values
|
||||||
|
id: variables
|
||||||
|
env:
|
||||||
|
VERSION_NUMBER: "${{ github.event.client_payload.version_number == '' && github.event.inputs.version_number || github.event.client_payload.version_number }}"
|
||||||
|
IS_DRY_RUN: "${{ github.event.client_payload.is_dry_run == '' && github.event.inputs.is_dry_run || github.event.client_payload.is_dry_run }}"
|
||||||
|
run: |
|
||||||
|
echo Repository dispatch event version: ${{ github.event.client_payload.version_number }}
|
||||||
|
echo Repository dispatch event dry run: ${{ github.event.client_payload.is_dry_run }}
|
||||||
|
echo Workflow dispatch event version: ${{ github.event.inputs.version_number }}
|
||||||
|
echo Workflow dispatch event dry run: ${{ github.event.inputs.is_dry_run }}
|
||||||
|
echo ::set-output name=VERSION_NUMBER::$VERSION_NUMBER
|
||||||
|
echo ::set-output name=IS_DRY_RUN::$IS_DRY_RUN
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "3.8"
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
|
run: |
|
||||||
|
python3 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
pip install --upgrade pip
|
||||||
|
|
||||||
|
- name: Create PR branch
|
||||||
|
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||||
|
run: |
|
||||||
|
git checkout -b bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||||
|
git push origin bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||||
|
git branch --set-upstream-to=origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID
|
||||||
|
|
||||||
|
- name: Generate Docker requirements
|
||||||
|
run: |
|
||||||
|
source env/bin/activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
pip freeze -l > docker/requirements/requirements.txt
|
||||||
|
git status
|
||||||
|
|
||||||
|
- name: Bump version
|
||||||
|
run: |
|
||||||
|
source env/bin/activate
|
||||||
|
pip install -r dev-requirements.txt
|
||||||
|
env/bin/bumpversion --allow-dirty --new-version ${{steps.variables.outputs.VERSION_NUMBER}} major
|
||||||
|
git status
|
||||||
|
|
||||||
|
- name: Commit version bump directly
|
||||||
|
uses: EndBug/add-and-commit@v7
|
||||||
|
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'false' }}
|
||||||
|
with:
|
||||||
|
author_name: 'Github Build Bot'
|
||||||
|
author_email: 'buildbot@fishtownanalytics.com'
|
||||||
|
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||||
|
|
||||||
|
- name: Commit version bump to branch
|
||||||
|
uses: EndBug/add-and-commit@v7
|
||||||
|
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||||
|
with:
|
||||||
|
author_name: 'Github Build Bot'
|
||||||
|
author_email: 'buildbot@fishtownanalytics.com'
|
||||||
|
message: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||||
|
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||||
|
push: 'origin origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v3
|
||||||
|
if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }}
|
||||||
|
with:
|
||||||
|
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||||
|
draft: true
|
||||||
|
base: ${{github.ref}}
|
||||||
|
title: 'Bumping version to ${{steps.variables.outputs.VERSION_NUMBER}}'
|
||||||
|
branch: 'bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_${{GITHUB.RUN_ID}}'
|
||||||
29
.gitignore
vendored
29
.gitignore
vendored
@@ -11,11 +11,8 @@ __pycache__/
|
|||||||
env*/
|
env*/
|
||||||
dbt_env/
|
dbt_env/
|
||||||
build/
|
build/
|
||||||
!tests/functional/build
|
|
||||||
!core/dbt/docs/build
|
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
dist-*/
|
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
@@ -27,11 +24,8 @@ var/
|
|||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
.mypy_cache/
|
*.mypy_cache/
|
||||||
.dmypy.json
|
|
||||||
logs/
|
logs/
|
||||||
.user.yml
|
|
||||||
profiles.yml
|
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
# Usually these files are written by a python script from a template
|
# Usually these files are written by a python script from a template
|
||||||
@@ -55,12 +49,9 @@ coverage.xml
|
|||||||
*,cover
|
*,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
test.env
|
test.env
|
||||||
makefile.test.env
|
|
||||||
*.pytest_cache/
|
|
||||||
|
|
||||||
# Unit test artifacts
|
|
||||||
index.html
|
|
||||||
|
|
||||||
|
# Mypy
|
||||||
|
.mypy_cache/
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
@@ -75,10 +66,10 @@ docs/_build/
|
|||||||
# PyBuilder
|
# PyBuilder
|
||||||
target/
|
target/
|
||||||
|
|
||||||
# Ipython Notebook
|
#Ipython Notebook
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
|
||||||
# Emacs
|
#Emacs
|
||||||
*~
|
*~
|
||||||
|
|
||||||
# Sublime Text
|
# Sublime Text
|
||||||
@@ -87,7 +78,6 @@ target/
|
|||||||
# Vim
|
# Vim
|
||||||
*.sw*
|
*.sw*
|
||||||
|
|
||||||
# Pyenv
|
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# Vim
|
# Vim
|
||||||
@@ -96,20 +86,11 @@ target/
|
|||||||
# pycharm
|
# pycharm
|
||||||
.idea/
|
.idea/
|
||||||
venv/
|
venv/
|
||||||
.venv*/
|
|
||||||
|
|
||||||
# AWS credentials
|
# AWS credentials
|
||||||
.aws/
|
.aws/
|
||||||
|
|
||||||
# MacOS
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|
||||||
# vscode
|
# vscode
|
||||||
.vscode/
|
.vscode/
|
||||||
*.code-workspace
|
|
||||||
|
|
||||||
# poetry
|
|
||||||
poetry.lock
|
|
||||||
|
|
||||||
# asdf
|
|
||||||
.tool-versions
|
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
[settings]
|
|
||||||
profile=black
|
|
||||||
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
|
||||||
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user