mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 19:41:28 +00:00
Compare commits
23 Commits
adding-sem
...
v1.4.0b1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df93858b4b | ||
|
|
e8da84fb9e | ||
|
|
7e90e067af | ||
|
|
5e4e917de5 | ||
|
|
05dc0212e7 | ||
|
|
c00052cbfb | ||
|
|
3d54a83822 | ||
|
|
fafd5edbda | ||
|
|
8478262580 | ||
|
|
83b1fee062 | ||
|
|
0fbbc896b2 | ||
|
|
0544b08543 | ||
|
|
bef6edb942 | ||
|
|
99f27de934 | ||
|
|
9c91f3a7bd | ||
|
|
1b6fed2ffd | ||
|
|
0721f2c1b7 | ||
|
|
b9a35da118 | ||
|
|
60f80056b1 | ||
|
|
540c3b79aa | ||
|
|
16f529e1d4 | ||
|
|
ebfcf2a9ef | ||
|
|
67a8138b65 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.4.0a1
|
||||
current_version = 1.4.0b1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
|
||||
92
.changes/1.4.0-b1.md
Normal file
92
.changes/1.4.0-b1.md
Normal file
@@ -0,0 +1,92 @@
|
||||
## dbt-core 1.4.0-b1 - December 15, 2022
|
||||
|
||||
### Features
|
||||
|
||||
- Added favor-state flag to optionally favor state nodes even if unselected node exists ([#2968](https://github.com/dbt-labs/dbt-core/issues/2968))
|
||||
- Update structured logging. Convert to using protobuf messages. Ensure events are enriched with node_info. ([#5610](https://github.com/dbt-labs/dbt-core/issues/5610))
|
||||
- Friendlier error messages when packages.yml is malformed ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- Migrate dbt-utils current_timestamp macros into core + adapters ([#5521](https://github.com/dbt-labs/dbt-core/issues/5521))
|
||||
- Allow partitions in external tables to be supplied as a list ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- extend -f flag shorthand for seed command ([#5990](https://github.com/dbt-labs/dbt-core/issues/5990))
|
||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- Adding tarball install method for packages. Allowing package tarball to be specified via url in the packages.yaml. ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- Added an md5 function to the base context ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- Exposures support metrics in lineage ([#6057](https://github.com/dbt-labs/dbt-core/issues/6057))
|
||||
- Add support for Python 3.11 ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- incremental predicates ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992))
|
||||
- Add validation of enabled config for metrics, exposures and sources ([#6030](https://github.com/dbt-labs/dbt-core/issues/6030))
|
||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- Add functors to ensure event types with str-type attributes are initialized to spec, even when provided non-str type params. ([#5436](https://github.com/dbt-labs/dbt-core/issues/5436))
|
||||
- Allow hooks to fail without halting execution flow ([#5625](https://github.com/dbt-labs/dbt-core/issues/5625))
|
||||
- Clarify Error Message for how many models are allowed in a Python file ([#6245](https://github.com/dbt-labs/dbt-core/issues/6245))
|
||||
- After this, will be possible to use default values for dbt.config.get ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- Use full path for writing manifest ([#6055](https://github.com/dbt-labs/dbt-core/issues/6055))
|
||||
- [CT-1284] Change Python model default materialization to table ([#6345](https://github.com/dbt-labs/dbt-core/issues/6345))
|
||||
- Repair a regression which prevented basic logging before the logging subsystem is completely configured. ([#6434](https://github.com/dbt-labs/dbt-core/issues/6434))
|
||||
|
||||
### Docs
|
||||
|
||||
- minor doc correction ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- Generate API docs for new CLI interface ([dbt-docs/#5528](https://github.com/dbt-labs/dbt-docs/issues/5528))
|
||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323))
|
||||
- Alphabetize `core/dbt/README.md` ([dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Put black config in explicit config ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946))
|
||||
- Added flat_graph attribute the Manifest class's deepcopy() coverage ([#5809](https://github.com/dbt-labs/dbt-core/issues/5809))
|
||||
- Add mypy configs so `mypy` passes from CLI ([#5983](https://github.com/dbt-labs/dbt-core/issues/5983))
|
||||
- Exception message cleanup. ([#6023](https://github.com/dbt-labs/dbt-core/issues/6023))
|
||||
- Add dmypy cache to gitignore ([#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- Provide useful errors when the value of 'materialized' is invalid ([#5229](https://github.com/dbt-labs/dbt-core/issues/5229))
|
||||
- Clean up string formatting ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- Fixed extra whitespace in strings introduced by black. ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- Remove the 'root_path' field from most nodes ([#6171](https://github.com/dbt-labs/dbt-core/issues/6171))
|
||||
- Combine certain logging events with different levels ([#6173](https://github.com/dbt-labs/dbt-core/issues/6173))
|
||||
- Convert threading tests to pytest ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Convert postgres index tests to pytest ([#5770](https://github.com/dbt-labs/dbt-core/issues/5770))
|
||||
- Convert use color tests to pytest ([#5771](https://github.com/dbt-labs/dbt-core/issues/5771))
|
||||
- Add github actions workflow to generate high level CLI API docs ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Functionality-neutral refactor of event logging system to improve encapsulation and modularity. ([#6139](https://github.com/dbt-labs/dbt-core/issues/6139))
|
||||
- Consolidate ParsedNode and CompiledNode classes ([#6383](https://github.com/dbt-labs/dbt-core/issues/6383))
|
||||
- Prevent doc gen workflow from running on forks ([#6386](https://github.com/dbt-labs/dbt-core/issues/6386))
|
||||
- Fix intermittent database connection failure in Windows CI test ([#6394](https://github.com/dbt-labs/dbt-core/issues/6394))
|
||||
- Refactor and clean up manifest nodes ([#6426](https://github.com/dbt-labs/dbt-core/issues/6426))
|
||||
- Restore important legacy logging behaviors, following refactor which removed them ([#6437](https://github.com/dbt-labs/dbt-core/issues/6437))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core ([#5917](https://github.com/dbt-labs/dbt-core/pull/5917))
|
||||
- Bump black from 22.8.0 to 22.10.0 ([#6019](https://github.com/dbt-labs/dbt-core/pull/6019))
|
||||
- Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core ([#6108](https://github.com/dbt-labs/dbt-core/pull/6108))
|
||||
- Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core ([#6144](https://github.com/dbt-labs/dbt-core/pull/6144))
|
||||
|
||||
### Dependency
|
||||
|
||||
- Bump mashumaro[msgpack] from 3.1.1 to 3.2 in /core ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
|
||||
### Contributors
|
||||
- [@andy-clapson](https://github.com/andy-clapson) ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- [@chamini2](https://github.com/chamini2) ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- [@daniel-murray](https://github.com/daniel-murray) ([#2968](https://github.com/dbt-labs/dbt-core/issues/2968))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5990](https://github.com/dbt-labs/dbt-core/issues/5990))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368), [#6394](https://github.com/dbt-labs/dbt-core/issues/6394))
|
||||
- [@devmessias](https://github.com/devmessias) ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- [@eve-johns](https://github.com/eve-johns) ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- [@haritamar](https://github.com/haritamar) ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- [@josephberni](https://github.com/josephberni) ([#2968](https://github.com/dbt-labs/dbt-core/issues/2968))
|
||||
- [@joshuataylor](https://github.com/joshuataylor) ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- [@justbldwn](https://github.com/justbldwn) ([#6245](https://github.com/dbt-labs/dbt-core/issues/6245))
|
||||
- [@luke-bassett](https://github.com/luke-bassett) ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- [@max-sixty](https://github.com/max-sixty) ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946), [#5983](https://github.com/dbt-labs/dbt-core/issues/5983), [#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- [@timle2](https://github.com/timle2) ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680))
|
||||
@@ -1,7 +1,6 @@
|
||||
kind: "Dependency"
|
||||
kind: "Dependencies"
|
||||
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
|
||||
time: 2022-09-23T00:06:46.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 5917
|
||||
PR: "5917"
|
||||
@@ -1,7 +1,6 @@
|
||||
kind: "Dependency"
|
||||
kind: "Dependencies"
|
||||
body: "Bump black from 22.8.0 to 22.10.0"
|
||||
time: 2022-10-07T00:08:48.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6019
|
||||
PR: "6019"
|
||||
@@ -1,7 +1,6 @@
|
||||
kind: "Dependency"
|
||||
kind: "Dependencies"
|
||||
body: "Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core"
|
||||
time: 2022-10-20T00:07:53.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6108
|
||||
PR: "6108"
|
||||
@@ -1,7 +1,6 @@
|
||||
kind: "Dependency"
|
||||
kind: "Dependencies"
|
||||
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
|
||||
time: 2022-10-26T00:09:10.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6144
|
||||
PR: "6144"
|
||||
7
.changes/1.4.0/Dependency-20221205-002118.yaml
Normal file
7
.changes/1.4.0/Dependency-20221205-002118.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: "Dependency"
|
||||
body: "Bump mashumaro[msgpack] from 3.1.1 to 3.2 in /core"
|
||||
time: 2022-12-05T00:21:18.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6375
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-08T15:41:57.689162-04:00
|
||||
custom:
|
||||
Author: andy-clapson
|
||||
Issue: "5791"
|
||||
PR: "5684"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-07T09:06:56.446078-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5528"
|
||||
PR: "6022"
|
||||
@@ -3,4 +3,3 @@ time: 2022-10-17T17:14:11.715348-05:00
|
||||
custom:
|
||||
Author: paulbenschmidt
|
||||
Issue: "5880"
|
||||
PR: "324"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-16T15:57:43.204201+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "323"
|
||||
PR: "346"
|
||||
6
.changes/1.4.0/Docs-20221202-150523.yaml
Normal file
6
.changes/1.4.0/Docs-20221202-150523.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Alphabetize `core/dbt/README.md`
|
||||
time: 2022-12-02T15:05:23.695333-07:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "6368"
|
||||
@@ -5,4 +5,3 @@ time: 2022-04-08T16:54:59.696564+01:00
|
||||
custom:
|
||||
Author: daniel-murray josephberni
|
||||
Issue: "2968"
|
||||
PR: "5859"
|
||||
6
.changes/1.4.0/Features-20220817-154857.yaml
Normal file
6
.changes/1.4.0/Features-20220817-154857.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Update structured logging. Convert to using protobuf messages. Ensure events are enriched with node_info.
|
||||
time: 2022-08-17T15:48:57.225267-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5610"
|
||||
7
.changes/1.4.0/Features-20220823-085727.yaml
Normal file
7
.changes/1.4.0/Features-20220823-085727.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: incremental predicates
|
||||
time: 2022-08-23T08:57:27.640804-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "5680"
|
||||
PR: "5702"
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-12T12:59:35.121188+01:00
|
||||
custom:
|
||||
Author: jared-rimmer
|
||||
Issue: "5486"
|
||||
PR: "5812"
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-14T09:56:25.97818-07:00
|
||||
custom:
|
||||
Author: colin-rogers-dbt
|
||||
Issue: "5521"
|
||||
PR: "5838"
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-25T21:16:51.051239654+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "5929"
|
||||
PR: "5930"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-03T11:07:05.381632-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "5990"
|
||||
PR: "5991"
|
||||
@@ -5,4 +5,3 @@ time: 2022-11-02T15:00:03.000805-05:00
|
||||
custom:
|
||||
Author: racheldaniel
|
||||
Issue: "6201"
|
||||
PR: "6202"
|
||||
8
.changes/1.4.0/Features-20221107-105018.yaml
Normal file
8
.changes/1.4.0/Features-20221107-105018.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Features
|
||||
body: Adding tarball install method for packages. Allowing package tarball to be specified
|
||||
via url in the packages.yaml.
|
||||
time: 2022-11-07T10:50:18.464545-05:00
|
||||
custom:
|
||||
Author: timle2
|
||||
Issue: "4205"
|
||||
PR: "4689"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-14T18:52:07.788593+02:00
|
||||
custom:
|
||||
Author: haritamar
|
||||
Issue: "6246"
|
||||
PR: "6247"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-30T11:29:13.256034-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6057"
|
||||
PR: "6342"
|
||||
7
.changes/1.4.0/Features-20221206-150704.yaml
Normal file
7
.changes/1.4.0/Features-20221206-150704.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Add support for Python 3.11
|
||||
time: 2022-12-06T15:07:04.753127+01:00
|
||||
custom:
|
||||
Author: joshuataylor MichelleArk jtcohen6
|
||||
Issue: "6147"
|
||||
PR: "6326"
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-16T10:48:54.162273-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "3992"
|
||||
PR: "5868"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-10T11:32:18.752322-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6030"
|
||||
PR: "6038"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-11T16:07:15.464093-04:00
|
||||
custom:
|
||||
Author: chamini2
|
||||
Issue: "6041"
|
||||
PR: "6042"
|
||||
@@ -5,4 +5,3 @@ time: 2022-10-16T17:37:42.846683-07:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "5436"
|
||||
PR: "5874"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-07T09:53:14.340257-06:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "5625"
|
||||
PR: "6059"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-15T08:10:21.527884-05:00
|
||||
custom:
|
||||
Author: justbldwn
|
||||
Issue: "6245"
|
||||
PR: "6251"
|
||||
7
.changes/1.4.0/Fixes-20221124-163419.yaml
Normal file
7
.changes/1.4.0/Fixes-20221124-163419.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: After this, will be possible to use default values for dbt.config.get
|
||||
time: 2022-11-24T16:34:19.039512764-03:00
|
||||
custom:
|
||||
Author: devmessias
|
||||
Issue: "6309"
|
||||
PR: "6317"
|
||||
6
.changes/1.4.0/Fixes-20221202-164859.yaml
Normal file
6
.changes/1.4.0/Fixes-20221202-164859.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Use full path for writing manifest
|
||||
time: 2022-12-02T16:48:59.029519-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6055"
|
||||
6
.changes/1.4.0/Fixes-20221213-112620.yaml
Normal file
6
.changes/1.4.0/Fixes-20221213-112620.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: '[CT-1284] Change Python model default materialization to table'
|
||||
time: 2022-12-13T11:26:20.550017-08:00
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "6345"
|
||||
7
.changes/1.4.0/Fixes-20221214-155307.yaml
Normal file
7
.changes/1.4.0/Fixes-20221214-155307.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Repair a regression which prevented basic logging before the logging subsystem
|
||||
is completely configured.
|
||||
time: 2022-12-14T15:53:07.396512-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6434"
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-27T19:42:59.241433-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "5946"
|
||||
PR: "5947"
|
||||
@@ -4,4 +4,3 @@ time: 2022-09-29T13:44:06.275941-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "5809"
|
||||
PR: "5975"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-05T12:03:10.061263-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "5983"
|
||||
PR: "5983"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-07T09:46:27.682872-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "6023"
|
||||
PR: "6024"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-07T14:00:44.227644-07:00
|
||||
custom:
|
||||
Author: max-sixty
|
||||
Issue: "6028"
|
||||
PR: "5978"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-13T18:19:12.167548-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "5229"
|
||||
PR: "6025"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-17T15:15:11.499246-05:00
|
||||
custom:
|
||||
Author: luke-bassett
|
||||
Issue: "1350"
|
||||
PR: "6086"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-17T15:58:44.676549-04:00
|
||||
custom:
|
||||
Author: eve-johns
|
||||
Issue: "6068"
|
||||
PR: "6082"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-28T10:48:37.687886-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6171"
|
||||
PR: "6172"
|
||||
@@ -4,4 +4,3 @@ time: 2022-10-28T11:03:44.887836-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6173"
|
||||
PR: "6174"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-08T07:45:50.589147-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
PR: "6226"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-08T11:56:33.743042-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5770"
|
||||
PR: "6228"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-08T13:31:04.788547-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5771"
|
||||
PR: "6230"
|
||||
@@ -4,4 +4,3 @@ time: 2022-11-16T13:00:37.916202-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
PR: "6187"
|
||||
8
.changes/1.4.0/Under the Hood-20221118-145717.yaml
Normal file
8
.changes/1.4.0/Under the Hood-20221118-145717.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Under the Hood
|
||||
body: Functionality-neutral refactor of event logging system to improve encapsulation
|
||||
and modularity.
|
||||
time: 2022-11-18T14:57:17.792622-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6139"
|
||||
PR: "6291"
|
||||
7
.changes/1.4.0/Under the Hood-20221205-164948.yaml
Normal file
7
.changes/1.4.0/Under the Hood-20221205-164948.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Consolidate ParsedNode and CompiledNode classes
|
||||
time: 2022-12-05T16:49:48.563583-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6383"
|
||||
PR: "6384"
|
||||
7
.changes/1.4.0/Under the Hood-20221206-094015.yaml
Normal file
7
.changes/1.4.0/Under the Hood-20221206-094015.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Prevent doc gen workflow from running on forks
|
||||
time: 2022-12-06T09:40:15.301984-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "6386"
|
||||
PR: "6390"
|
||||
7
.changes/1.4.0/Under the Hood-20221206-113053.yaml
Normal file
7
.changes/1.4.0/Under the Hood-20221206-113053.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fix intermittent database connection failure in Windows CI test
|
||||
time: 2022-12-06T11:30:53.166009-07:00
|
||||
custom:
|
||||
Author: MichelleArk dbeatty10
|
||||
Issue: "6394"
|
||||
PR: "6395"
|
||||
7
.changes/1.4.0/Under the Hood-20221211-214240.yaml
Normal file
7
.changes/1.4.0/Under the Hood-20221211-214240.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor and clean up manifest nodes
|
||||
time: 2022-12-11T21:42:40.560074-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6426"
|
||||
PR: "6427"
|
||||
7
.changes/1.4.0/Under the Hood-20221213-214106.yaml
Normal file
7
.changes/1.4.0/Under the Hood-20221213-214106.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Restore important legacy logging behaviors, following refactor which removed
|
||||
them
|
||||
time: 2022-12-13T21:41:06.815133-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "6437"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Proto logging messages
|
||||
time: 2022-08-17T15:48:57.225267-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5610"
|
||||
PR: "5643"
|
||||
107
.changie.yaml
107
.changie.yaml
@@ -6,19 +6,67 @@ changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: '- {{.Body}} ([#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), [#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $IssueList := list }}
|
||||
{{- $changes := splitList " " $.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
|
||||
kinds:
|
||||
- label: Breaking Changes
|
||||
- label: Features
|
||||
- label: Fixes
|
||||
- label: Docs
|
||||
changeFormat: '- {{.Body}} ([dbt-docs/#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-docs/issues/{{.Custom.Issue}}), [dbt-docs/#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-docs/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $IssueList := list }}
|
||||
{{- $changes := splitList " " $.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
- label: Security
|
||||
changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/dbt-labs/dbt-core/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/dbt-labs/dbt-core/pull/{{.Custom.PR}}))'
|
||||
changeFormat: |-
|
||||
{{- $PRList := list }}
|
||||
{{- $changes := splitList " " $.Custom.PR }}
|
||||
{{- range $pullrequest := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }}
|
||||
{{- $PRList = append $PRList $changeLink }}
|
||||
{{- end -}}
|
||||
- {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
|
||||
skipGlobalChoices: true
|
||||
additionalChoices:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
newlines:
|
||||
afterChangelogHeader: 1
|
||||
@@ -33,42 +81,41 @@ custom:
|
||||
type: string
|
||||
minLength: 3
|
||||
- key: Issue
|
||||
label: GitHub Issue Number
|
||||
type: int
|
||||
minInt: 1
|
||||
- key: PR
|
||||
label: GitHub Pull Request Number
|
||||
type: int
|
||||
minInt: 1
|
||||
label: GitHub Issue Number (separated by a single space if multiple)
|
||||
type: string
|
||||
minLength: 1
|
||||
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a PR */}}
|
||||
{{- /* loop through all authors for a single changelog */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core PRs */}}
|
||||
{{- $prLink := $change.Kind }}
|
||||
{{- if eq $change.Kind "Docs" }}
|
||||
{{- $prLink = "[dbt-docs/#pr](https://github.com/dbt-labs/dbt-docs/pull/pr)" | replace "pr" $change.Custom.PR }}
|
||||
{{- else }}
|
||||
{{- $prLink = "[#pr](https://github.com/dbt-labs/dbt-core/pull/pr)" | replace "pr" $change.Custom.PR }}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other PRs associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $prList := get $contributorDict $author }}
|
||||
{{- $prList = append $prList $prLink }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- else }}
|
||||
{{- $prList := list $prLink }}
|
||||
{{- $contributorDict := set $contributorDict $author $prList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core issues */}}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $change.Custom.PR }}
|
||||
{{- else if eq $change.Kind "Docs"}}
|
||||
{{- $changeLink = "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- else }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $contributionList := get $contributorDict $author }}
|
||||
{{- $contributionList = append $contributionList $changeLink }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- else }}
|
||||
{{- $contributionList := list $changeLink }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
{{- end }}
|
||||
{{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
|
||||
|
||||
4
.github/workflows/bot-changelog.yml
vendored
4
.github/workflows/bot-changelog.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- label: "dependencies"
|
||||
changie_kind: "Dependency"
|
||||
changie_kind: "Dependencies"
|
||||
- label: "snyk"
|
||||
changie_kind: "Security"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -58,4 +58,4 @@ jobs:
|
||||
commit_message: "Add automated changelog yaml from template for bot PR"
|
||||
changie_kind: ${{ matrix.changie_kind }}
|
||||
label: ${{ matrix.label }}
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: 4904\n PR: ${{ github.event.pull_request.number }}"
|
||||
custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}"
|
||||
|
||||
3
.github/workflows/generate-cli-api-docs.yml
vendored
3
.github/workflows/generate-cli-api-docs.yml
vendored
@@ -34,6 +34,7 @@ jobs:
|
||||
check_gen:
|
||||
name: check if generation needed
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.head.repo.fork == false }}
|
||||
outputs:
|
||||
cli_dir_changed: ${{ steps.check_cli.outputs.cli_dir_changed }}
|
||||
docs_dir_changed: ${{ steps.check_docs.outputs.docs_dir_changed }}
|
||||
@@ -44,8 +45,6 @@ jobs:
|
||||
echo "env.CLI_DIR: ${{ env.CLI_DIR }}"
|
||||
echo "env.DOCS_BUILD_DIR: ${{ env.DOCS_BUILD_DIR }}"
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
echo ">>>>> git log"
|
||||
git log --pretty=oneline | head -5
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
4
.github/workflows/main.yml
vendored
4
.github/workflows/main.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
|
||||
93
CHANGELOG.md
93
CHANGELOG.md
@@ -5,6 +5,99 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.4.0-b1 - December 15, 2022
|
||||
|
||||
### Features
|
||||
|
||||
- Added favor-state flag to optionally favor state nodes even if unselected node exists ([#2968](https://github.com/dbt-labs/dbt-core/issues/2968))
|
||||
- Update structured logging. Convert to using protobuf messages. Ensure events are enriched with node_info. ([#5610](https://github.com/dbt-labs/dbt-core/issues/5610))
|
||||
- Friendlier error messages when packages.yml is malformed ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- Migrate dbt-utils current_timestamp macros into core + adapters ([#5521](https://github.com/dbt-labs/dbt-core/issues/5521))
|
||||
- Allow partitions in external tables to be supplied as a list ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- extend -f flag shorthand for seed command ([#5990](https://github.com/dbt-labs/dbt-core/issues/5990))
|
||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- Adding tarball install method for packages. Allowing package tarball to be specified via url in the packages.yaml. ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- Added an md5 function to the base context ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- Exposures support metrics in lineage ([#6057](https://github.com/dbt-labs/dbt-core/issues/6057))
|
||||
- Add support for Python 3.11 ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- incremental predicates ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992))
|
||||
- Add validation of enabled config for metrics, exposures and sources ([#6030](https://github.com/dbt-labs/dbt-core/issues/6030))
|
||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- Add functors to ensure event types with str-type attributes are initialized to spec, even when provided non-str type params. ([#5436](https://github.com/dbt-labs/dbt-core/issues/5436))
|
||||
- Allow hooks to fail without halting execution flow ([#5625](https://github.com/dbt-labs/dbt-core/issues/5625))
|
||||
- Clarify Error Message for how many models are allowed in a Python file ([#6245](https://github.com/dbt-labs/dbt-core/issues/6245))
|
||||
- After this, will be possible to use default values for dbt.config.get ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- Use full path for writing manifest ([#6055](https://github.com/dbt-labs/dbt-core/issues/6055))
|
||||
- [CT-1284] Change Python model default materialization to table ([#6345](https://github.com/dbt-labs/dbt-core/issues/6345))
|
||||
- Repair a regression which prevented basic logging before the logging subsystem is completely configured. ([#6434](https://github.com/dbt-labs/dbt-core/issues/6434))
|
||||
|
||||
### Docs
|
||||
|
||||
- minor doc correction ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- Generate API docs for new CLI interface ([dbt-docs/#5528](https://github.com/dbt-labs/dbt-docs/issues/5528))
|
||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323))
|
||||
- Alphabetize `core/dbt/README.md` ([dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Put black config in explicit config ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946))
|
||||
- Added flat_graph attribute the Manifest class's deepcopy() coverage ([#5809](https://github.com/dbt-labs/dbt-core/issues/5809))
|
||||
- Add mypy configs so `mypy` passes from CLI ([#5983](https://github.com/dbt-labs/dbt-core/issues/5983))
|
||||
- Exception message cleanup. ([#6023](https://github.com/dbt-labs/dbt-core/issues/6023))
|
||||
- Add dmypy cache to gitignore ([#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- Provide useful errors when the value of 'materialized' is invalid ([#5229](https://github.com/dbt-labs/dbt-core/issues/5229))
|
||||
- Clean up string formatting ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- Fixed extra whitespace in strings introduced by black. ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- Remove the 'root_path' field from most nodes ([#6171](https://github.com/dbt-labs/dbt-core/issues/6171))
|
||||
- Combine certain logging events with different levels ([#6173](https://github.com/dbt-labs/dbt-core/issues/6173))
|
||||
- Convert threading tests to pytest ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Convert postgres index tests to pytest ([#5770](https://github.com/dbt-labs/dbt-core/issues/5770))
|
||||
- Convert use color tests to pytest ([#5771](https://github.com/dbt-labs/dbt-core/issues/5771))
|
||||
- Add github actions workflow to generate high level CLI API docs ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Functionality-neutral refactor of event logging system to improve encapsulation and modularity. ([#6139](https://github.com/dbt-labs/dbt-core/issues/6139))
|
||||
- Consolidate ParsedNode and CompiledNode classes ([#6383](https://github.com/dbt-labs/dbt-core/issues/6383))
|
||||
- Prevent doc gen workflow from running on forks ([#6386](https://github.com/dbt-labs/dbt-core/issues/6386))
|
||||
- Fix intermittent database connection failure in Windows CI test ([#6394](https://github.com/dbt-labs/dbt-core/issues/6394))
|
||||
- Refactor and clean up manifest nodes ([#6426](https://github.com/dbt-labs/dbt-core/issues/6426))
|
||||
- Restore important legacy logging behaviors, following refactor which removed them ([#6437](https://github.com/dbt-labs/dbt-core/issues/6437))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core ([#5917](https://github.com/dbt-labs/dbt-core/pull/5917))
|
||||
- Bump black from 22.8.0 to 22.10.0 ([#6019](https://github.com/dbt-labs/dbt-core/pull/6019))
|
||||
- Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core ([#6108](https://github.com/dbt-labs/dbt-core/pull/6108))
|
||||
- Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core ([#6144](https://github.com/dbt-labs/dbt-core/pull/6144))
|
||||
|
||||
### Dependency
|
||||
|
||||
- Bump mashumaro[msgpack] from 3.1.1 to 3.2 in /core ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
|
||||
### Contributors
|
||||
- [@andy-clapson](https://github.com/andy-clapson) ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- [@chamini2](https://github.com/chamini2) ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- [@daniel-murray](https://github.com/daniel-murray) ([#2968](https://github.com/dbt-labs/dbt-core/issues/2968))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5990](https://github.com/dbt-labs/dbt-core/issues/5990))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368), [#6394](https://github.com/dbt-labs/dbt-core/issues/6394))
|
||||
- [@devmessias](https://github.com/devmessias) ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- [@eve-johns](https://github.com/eve-johns) ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- [@haritamar](https://github.com/haritamar) ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- [@josephberni](https://github.com/josephberni) ([#2968](https://github.com/dbt-labs/dbt-core/issues/2968))
|
||||
- [@joshuataylor](https://github.com/joshuataylor) ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- [@justbldwn](https://github.com/justbldwn) ([#6245](https://github.com/dbt-labs/dbt-core/issues/6245))
|
||||
- [@luke-bassett](https://github.com/luke-bassett) ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- [@max-sixty](https://github.com/max-sixty) ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946), [#5983](https://github.com/dbt-labs/dbt-core/issues/5983), [#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- [@timle2](https://github.com/timle2) ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680))
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ There are some tools that will be helpful to you in developing locally. While th
|
||||
|
||||
These are the tools used in `dbt-core` development and testing:
|
||||
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, 3.10 and 3.11
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`black`](https://github.com/psf/black) for code formatting
|
||||
@@ -160,7 +160,7 @@ suites.
|
||||
|
||||
#### `tox`
|
||||
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
|
||||
#### `pytest`
|
||||
|
||||
@@ -201,13 +201,21 @@ Here are some general rules for adding tests:
|
||||
* Sometimes flake8 complains about lines that are actually fine, in which case you can put a comment on the line such as: # noqa or # noqa: ANNN, where ANNN is the error code that flake8 issues.
|
||||
* To collect output for `CProfile`, run dbt with the `-r` option and the name of an output file, i.e. `dbt -r dbt.cprof run`. If you just want to profile parsing, you can do: `dbt -r dbt.cprof parse`. `pip` install `snakeviz` to view the output. Run `snakeviz dbt.cprof` and output will be rendered in a browser window.
|
||||
|
||||
## Adding a CHANGELOG Entry
|
||||
## Adding or modifying a CHANGELOG Entry
|
||||
|
||||
We use [changie](https://changie.dev) to generate `CHANGELOG` entries. **Note:** Do not edit the `CHANGELOG.md` directly. Your modifications will be lost.
|
||||
|
||||
Follow the steps to [install `changie`](https://changie.dev/guide/installation/) for your system.
|
||||
|
||||
Once changie is installed and your PR is created, simply run `changie new` and changie will walk you through the process of creating a changelog entry. Commit the file that's created and your changelog entry is complete!
|
||||
Once changie is installed and your PR is created for a new feature, simply run the following command and changie will walk you through the process of creating a changelog entry:
|
||||
|
||||
```shell
|
||||
changie new
|
||||
```
|
||||
|
||||
Commit the file that's created and your changelog entry is complete!
|
||||
|
||||
If you are contributing to a feature already in progress, you will modify the changie yaml file in dbt/.changes/unreleased/ related to your change. If you need help finding this file, please ask within the discussion for the pull request!
|
||||
|
||||
You don't need to worry about which `dbt-core` version your change will go into. Just create the changelog entry with `changie`, and open your PR against the `main` branch. All merged changes will be included in the next minor version of `dbt-core`. The Core maintainers _may_ choose to "backport" specific changes in order to patch older minor versions. In that case, a maintainer will take care of that backport after merging your PR, before releasing the new version of `dbt-core`.
|
||||
|
||||
|
||||
@@ -49,6 +49,9 @@ RUN apt-get update \
|
||||
python3.10 \
|
||||
python3.10-dev \
|
||||
python3.10-venv \
|
||||
python3.11 \
|
||||
python3.11-dev \
|
||||
python3.11-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
|
||||
@@ -2,50 +2,59 @@
|
||||
|
||||
## The following are individual files in this directory.
|
||||
|
||||
### deprecations.py
|
||||
|
||||
### flags.py
|
||||
|
||||
### main.py
|
||||
|
||||
### tracking.py
|
||||
|
||||
### version.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### links.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### ui.py
|
||||
|
||||
### compilation.py
|
||||
|
||||
### constants.py
|
||||
|
||||
### dataclass_schema.py
|
||||
|
||||
### deprecations.py
|
||||
|
||||
### exceptions.py
|
||||
|
||||
### flags.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### hooks.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### links.py
|
||||
|
||||
### logger.py
|
||||
|
||||
### main.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### profiler.py
|
||||
|
||||
### selected_resources.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### tracking.py
|
||||
|
||||
### ui.py
|
||||
|
||||
### utils.py
|
||||
|
||||
### version.py
|
||||
|
||||
|
||||
## The subdirectories will be documented in a README in the subdirectory
|
||||
* config
|
||||
* include
|
||||
* adapters
|
||||
* context
|
||||
* deps
|
||||
* graph
|
||||
* task
|
||||
* cli
|
||||
* clients
|
||||
* config
|
||||
* context
|
||||
* contracts
|
||||
* deps
|
||||
* docs
|
||||
* events
|
||||
* graph
|
||||
* include
|
||||
* parser
|
||||
* task
|
||||
* tests
|
||||
|
||||
@@ -48,6 +48,7 @@ from dbt.events.types import (
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
@@ -169,7 +170,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if conn.name == conn_name and conn.state == "open":
|
||||
return conn
|
||||
|
||||
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
|
||||
if conn.state == "open":
|
||||
fire_event(ConnectionReused(conn_name=conn_name))
|
||||
@@ -336,7 +339,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception:
|
||||
fire_event(
|
||||
RollbackFailed(
|
||||
conn_name=cast_to_str(connection.name), exc_info=traceback.format_exc()
|
||||
conn_name=cast_to_str(connection.name),
|
||||
exc_info=traceback.format_exc(),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -345,10 +350,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, "close"):
|
||||
fire_event(ConnectionClosed(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(
|
||||
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
|
||||
)
|
||||
connection.handle.close()
|
||||
else:
|
||||
fire_event(ConnectionLeftOpen(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(
|
||||
ConnectionLeftOpen(
|
||||
conn_name=cast_to_str(connection.name), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
@@ -359,7 +370,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
cls._rollback_handle(connection)
|
||||
|
||||
connection.transaction_open = False
|
||||
@@ -371,7 +382,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
return connection
|
||||
|
||||
if connection.transaction_open and connection.handle:
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
cls._rollback_handle(connection)
|
||||
connection.transaction_open = False
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ from typing import (
|
||||
List,
|
||||
Mapping,
|
||||
Iterator,
|
||||
Union,
|
||||
Set,
|
||||
)
|
||||
|
||||
@@ -38,9 +37,8 @@ from dbt.adapters.protocol import (
|
||||
)
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import (
|
||||
CacheMiss,
|
||||
@@ -64,9 +62,6 @@ from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_msg
|
||||
|
||||
|
||||
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
|
||||
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
|
||||
@@ -243,9 +238,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return conn.name
|
||||
|
||||
@contextmanager
|
||||
def connection_named(
|
||||
self, name: str, node: Optional[CompileResultNode] = None
|
||||
) -> Iterator[None]:
|
||||
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
|
||||
try:
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.set(name, node)
|
||||
@@ -257,7 +250,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
self.connections.query_header.reset()
|
||||
|
||||
@contextmanager
|
||||
def connection_for(self, node: CompileResultNode) -> Iterator[None]:
|
||||
def connection_for(self, node: ResultNode) -> Iterator[None]:
|
||||
with self.connection_named(node.unique_id, node):
|
||||
yield
|
||||
|
||||
@@ -372,7 +365,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[CompileResultNode] = chain(
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
|
||||
@@ -5,7 +5,7 @@ from dbt.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
@@ -90,7 +90,7 @@ class MacroQueryStringSetter:
|
||||
def reset(self):
|
||||
self.set("master", None)
|
||||
|
||||
def set(self, name: str, node: Optional[CompileResultNode]):
|
||||
def set(self, name: str, node: Optional[ResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
|
||||
from dbt.contracts.graph.compiled import CompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
RelationType,
|
||||
ComponentName,
|
||||
@@ -27,8 +26,10 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
path: Path
|
||||
type: Optional[RelationType] = None
|
||||
quote_character: str = '"'
|
||||
include_policy: Policy = Policy()
|
||||
quote_policy: Policy = Policy()
|
||||
# Python 3.11 requires that these use default_factory instead of simple default
|
||||
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
|
||||
include_policy: Policy = field(default_factory=lambda: Policy())
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
dbt_created: bool = False
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
@@ -39,9 +40,9 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@classmethod
|
||||
def _get_field_named(cls, field_name):
|
||||
for field, _ in cls._get_fields():
|
||||
if field.name == field_name:
|
||||
return field
|
||||
for f, _ in cls._get_fields():
|
||||
if f.name == field_name:
|
||||
return f
|
||||
# this should be unreachable
|
||||
raise ValueError(f"BaseRelation has no {field_name} field!")
|
||||
|
||||
@@ -52,11 +53,11 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
return cls._get_field_named("quote_policy").default
|
||||
return cls._get_field_named("quote_policy").default_factory()
|
||||
|
||||
@classmethod
|
||||
def get_default_include_policy(cls) -> Policy:
|
||||
return cls._get_field_named("include_policy").default
|
||||
return cls._get_field_named("include_policy").default_factory()
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Override `.get` to return a metadata object so we don't break
|
||||
@@ -184,7 +185,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
@@ -209,7 +210,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_ephemeral_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
node: ManifestNode,
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
@@ -222,7 +223,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
node: ManifestNode,
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
@@ -243,20 +244,20 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
node: ResultNode,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, ParsedSourceDefinition):
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
if not isinstance(node, (ParsedNode, CompiledNode)):
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedNode or CompiledNode but "
|
||||
"got {}".format(type(node))
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import (
|
||||
Generic,
|
||||
TypeVar,
|
||||
Tuple,
|
||||
Union,
|
||||
Dict,
|
||||
Any,
|
||||
)
|
||||
@@ -17,8 +16,7 @@ from typing_extensions import Protocol
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
|
||||
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.relation import Policy, HasQuoting
|
||||
@@ -48,11 +46,7 @@ class RelationProtocol(Protocol):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
) -> Self:
|
||||
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
|
||||
...
|
||||
|
||||
|
||||
@@ -65,7 +59,7 @@ class CompilerProtocol(Protocol):
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> NonSourceCompiledNode:
|
||||
) -> ManifestNode:
|
||||
...
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
|
||||
@@ -56,7 +57,13 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=cast_to_str(connection.name)))
|
||||
fire_event(
|
||||
ConnectionUsed(
|
||||
conn_type=self.TYPE,
|
||||
conn_name=cast_to_str(connection.name),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
with self.exception_handler(sql):
|
||||
if abridge_sql_log:
|
||||
@@ -64,7 +71,11 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
else:
|
||||
log_sql = sql
|
||||
|
||||
fire_event(SQLQuery(conn_name=cast_to_str(connection.name), sql=log_sql))
|
||||
fire_event(
|
||||
SQLQuery(
|
||||
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
pre = time.time()
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
@@ -72,7 +83,9 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
fire_event(
|
||||
SQLQueryStatus(
|
||||
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
|
||||
status=str(self.get_response(cursor)),
|
||||
elapsed=round((time.time() - pre)),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -156,7 +169,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
"it does not have one open!".format(connection.name)
|
||||
)
|
||||
|
||||
fire_event(SQLCommit(conn_name=connection.name))
|
||||
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
|
||||
self.add_commit_query()
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -31,7 +31,6 @@ def cli_runner():
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.enable_legacy_logger
|
||||
@p.event_buffer_size
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_format
|
||||
|
||||
@@ -80,14 +80,6 @@ enable_legacy_logger = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
event_buffer_size = click.option(
|
||||
"--event-buffer-size",
|
||||
envvar="DBT_EVENT_BUFFER_SIZE",
|
||||
help="Sets the max number of events to buffer in EVENT_HISTORY.",
|
||||
default=100000,
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
|
||||
|
||||
fail_fast = click.option(
|
||||
|
||||
@@ -25,8 +25,7 @@ from dbt.utils import (
|
||||
)
|
||||
|
||||
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
|
||||
from dbt.contracts.graph.compiled import CompiledGenericTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedGenericTestNode
|
||||
from dbt.contracts.graph.nodes import GenericTestNode
|
||||
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
@@ -620,7 +619,7 @@ GENERIC_TEST_KWARGS_NAME = "_dbt_generic_test_kwargs"
|
||||
|
||||
def add_rendered_test_kwargs(
|
||||
context: Dict[str, Any],
|
||||
node: Union[ParsedGenericTestNode, CompiledGenericTestNode],
|
||||
node: GenericTestNode,
|
||||
capture_macros: bool = False,
|
||||
) -> None:
|
||||
"""Render each of the test kwargs in the given context using the native
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, cast, Optional
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
import pickle
|
||||
@@ -12,15 +12,14 @@ from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model_context
|
||||
from dbt.contracts.graph.manifest import Manifest, UniqueID
|
||||
from dbt.contracts.graph.compiled import (
|
||||
COMPILED_TYPES,
|
||||
CompiledGenericTestNode,
|
||||
from dbt.contracts.graph.nodes import (
|
||||
ManifestNode,
|
||||
ManifestSQLNode,
|
||||
GenericTestNode,
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
ManifestNode,
|
||||
NonSourceCompiledNode,
|
||||
SeedNode,
|
||||
)
|
||||
from dbt.contracts.graph.parsed import ParsedNode
|
||||
from dbt.exceptions import (
|
||||
dependency_not_found,
|
||||
InternalException,
|
||||
@@ -28,7 +27,8 @@ from dbt.exceptions import (
|
||||
)
|
||||
from dbt.graph import Graph
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import FoundStats, CompilingNode, WritingInjectedSQLForNode
|
||||
from dbt.events.types import FoundStats, WritingInjectedSQLForNode
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
@@ -36,14 +36,6 @@ import dbt.tracking
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
|
||||
def _compiled_type_for(model: ParsedNode):
|
||||
if type(model) not in COMPILED_TYPES:
|
||||
raise InternalException(
|
||||
f"Asked to compile {type(model)} node, but it has no compiled form"
|
||||
)
|
||||
return COMPILED_TYPES[type(model)]
|
||||
|
||||
|
||||
def print_compile_stats(stats):
|
||||
names = {
|
||||
NodeType.Model: "model",
|
||||
@@ -176,7 +168,7 @@ class Compiler:
|
||||
# a dict for jinja rendering of SQL
|
||||
def _create_node_context(
|
||||
self,
|
||||
node: NonSourceCompiledNode,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
@@ -184,7 +176,7 @@ class Compiler:
|
||||
context = generate_runtime_model_context(node, self.config, manifest)
|
||||
context.update(extra_context)
|
||||
|
||||
if isinstance(node, CompiledGenericTestNode):
|
||||
if isinstance(node, GenericTestNode):
|
||||
# for test nodes, add a special keyword args value to the context
|
||||
jinja.add_rendered_test_kwargs(context, node)
|
||||
|
||||
@@ -195,14 +187,6 @@ class Compiler:
|
||||
relation_cls = adapter.Relation
|
||||
return relation_cls.add_ephemeral_prefix(name)
|
||||
|
||||
def _get_relation_name(self, node: ParsedNode):
|
||||
relation_name = None
|
||||
if node.is_relational and not node.is_ephemeral_model:
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
return relation_name
|
||||
|
||||
def _inject_ctes_into_sql(self, sql: str, ctes: List[InjectedCTE]) -> str:
|
||||
"""
|
||||
`ctes` is a list of InjectedCTEs like:
|
||||
@@ -261,10 +245,10 @@ class Compiler:
|
||||
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: NonSourceCompiledNode,
|
||||
model: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
||||
) -> Tuple[ManifestSQLNode, List[InjectedCTE]]:
|
||||
"""This method is called by the 'compile_node' method. Starting
|
||||
from the node that it is passed in, it will recursively call
|
||||
itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
@@ -279,7 +263,8 @@ class Compiler:
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
model.extra_ctes_injected = True
|
||||
if not isinstance(model, SeedNode):
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
@@ -298,6 +283,7 @@ class Compiler:
|
||||
f"could not be resolved: {cte.id}"
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
assert not isinstance(cte_model, SeedNode)
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f"{cte.id} is not ephemeral")
|
||||
@@ -305,8 +291,6 @@ class Compiler:
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
assert isinstance(cte_model, tuple(COMPILED_TYPES.values()))
|
||||
cte_model = cast(NonSourceCompiledNode, cte_model)
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
@@ -343,21 +327,19 @@ class Compiler:
|
||||
|
||||
return model, prepended_ctes
|
||||
|
||||
# creates a compiled_node from the ManifestNode passed in,
|
||||
# Sets compiled fields in the ManifestSQLNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> NonSourceCompiledNode:
|
||||
) -> ManifestSQLNode:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
fire_event(CompilingNode(unique_id=node.unique_id))
|
||||
|
||||
data = node.to_dict(omit_none=True)
|
||||
data.update(
|
||||
{
|
||||
@@ -367,9 +349,8 @@ class Compiler:
|
||||
"extra_ctes": [],
|
||||
}
|
||||
)
|
||||
compiled_node = _compiled_type_for(node).from_dict(data)
|
||||
|
||||
if compiled_node.language == ModelLanguage.python:
|
||||
if node.language == ModelLanguage.python:
|
||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
||||
|
||||
# quoating seems like something very specific to sql so far
|
||||
@@ -377,7 +358,7 @@ class Compiler:
|
||||
# TODO try to find better way to do this, given that
|
||||
original_quoting = self.config.quoting
|
||||
self.config.quoting = {key: False for key in original_quoting.keys()}
|
||||
context = self._create_node_context(compiled_node, manifest, extra_context)
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
|
||||
postfix = jinja.get_rendered(
|
||||
"{{ py_script_postfix(model) }}",
|
||||
@@ -385,23 +366,21 @@ class Compiler:
|
||||
node,
|
||||
)
|
||||
# we should NOT jinja render the python model's 'raw code'
|
||||
compiled_node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
# restore quoting settings in the end since context is lazy evaluated
|
||||
self.config.quoting = original_quoting
|
||||
|
||||
else:
|
||||
context = self._create_node_context(compiled_node, manifest, extra_context)
|
||||
compiled_node.compiled_code = jinja.get_rendered(
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
node.compiled_code = jinja.get_rendered(
|
||||
node.raw_code,
|
||||
context,
|
||||
node,
|
||||
)
|
||||
|
||||
compiled_node.relation_name = self._get_relation_name(node)
|
||||
node.compiled = True
|
||||
|
||||
compiled_node.compiled = True
|
||||
|
||||
return compiled_node
|
||||
return node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
@@ -508,10 +487,13 @@ class Compiler:
|
||||
return Graph(linker.graph)
|
||||
|
||||
# writes the "compiled_code" into the target/compiled directory
|
||||
def _write_node(self, node: NonSourceCompiledNode) -> ManifestNode:
|
||||
if not node.extra_ctes_injected or node.resource_type == NodeType.Snapshot:
|
||||
def _write_node(self, node: ManifestSQLNode) -> ManifestSQLNode:
|
||||
if not node.extra_ctes_injected or node.resource_type in (
|
||||
NodeType.Snapshot,
|
||||
NodeType.Seed,
|
||||
):
|
||||
return node
|
||||
fire_event(WritingInjectedSQLForNode(unique_id=node.unique_id))
|
||||
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
|
||||
|
||||
if node.compiled_code:
|
||||
node.compiled_path = node.write_node(
|
||||
@@ -521,11 +503,11 @@ class Compiler:
|
||||
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
write: bool = True,
|
||||
) -> NonSourceCompiledNode:
|
||||
) -> ManifestSQLNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
|
||||
@@ -8,7 +8,7 @@ from dbt import utils
|
||||
from dbt.clients.jinja import get_rendered
|
||||
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.contracts.graph.compiled import CompiledResource
|
||||
from dbt.contracts.graph.nodes import Resource
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
MacroReturn,
|
||||
@@ -18,6 +18,7 @@ from dbt.exceptions import (
|
||||
)
|
||||
from dbt.events.functions import fire_event, get_invocation_id
|
||||
from dbt.events.types import JinjaLogInfo, JinjaLogDebug
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.version import __version__ as dbt_version
|
||||
|
||||
# These modules are added to the context. Consider alternative
|
||||
@@ -134,11 +135,11 @@ class Var:
|
||||
self,
|
||||
context: Mapping[str, Any],
|
||||
cli_vars: Mapping[str, Any],
|
||||
node: Optional[CompiledResource] = None,
|
||||
node: Optional[Resource] = None,
|
||||
) -> None:
|
||||
self._context: Mapping[str, Any] = context
|
||||
self._cli_vars: Mapping[str, Any] = cli_vars
|
||||
self._node: Optional[CompiledResource] = node
|
||||
self._node: Optional[Resource] = node
|
||||
self._merged: Mapping[str, Any] = self._generate_merged()
|
||||
|
||||
def _generate_merged(self) -> Mapping[str, Any]:
|
||||
@@ -558,9 +559,9 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endmacro %}"
|
||||
"""
|
||||
if info:
|
||||
fire_event(JinjaLogInfo(msg=msg))
|
||||
fire_event(JinjaLogInfo(msg=msg, node_info=get_node_info()))
|
||||
else:
|
||||
fire_event(JinjaLogDebug(msg=msg))
|
||||
fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info()))
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
|
||||
@@ -5,9 +5,8 @@ from dbt.exceptions import (
|
||||
doc_target_not_found,
|
||||
)
|
||||
from dbt.config.runtime import RuntimeConfig
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
from dbt.contracts.graph.nodes import Macro, ResultNode
|
||||
|
||||
from dbt.context.base import contextmember
|
||||
from dbt.context.configured import SchemaYamlContext
|
||||
@@ -17,7 +16,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
def __init__(
|
||||
self,
|
||||
config: RuntimeConfig,
|
||||
node: Union[ParsedMacro, CompileResultNode],
|
||||
node: Union[Macro, ResultNode],
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
) -> None:
|
||||
@@ -55,7 +54,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
else:
|
||||
doc_invalid_args(self.node, args)
|
||||
|
||||
# ParsedDocumentation
|
||||
# Documentation
|
||||
target_doc = self.manifest.resolve_doc(
|
||||
doc_name,
|
||||
doc_package_name,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import Dict, MutableMapping, Optional
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
from dbt.contracts.graph.nodes import Macro
|
||||
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
|
||||
MacroNamespace = Dict[str, ParsedMacro]
|
||||
MacroNamespace = Dict[str, Macro]
|
||||
|
||||
|
||||
# This class builds the MacroResolver by adding macros
|
||||
@@ -21,7 +21,7 @@ MacroNamespace = Dict[str, ParsedMacro]
|
||||
class MacroResolver:
|
||||
def __init__(
|
||||
self,
|
||||
macros: MutableMapping[str, ParsedMacro],
|
||||
macros: MutableMapping[str, Macro],
|
||||
root_project_name: str,
|
||||
internal_package_names,
|
||||
) -> None:
|
||||
@@ -77,7 +77,7 @@ class MacroResolver:
|
||||
def _add_macro_to(
|
||||
self,
|
||||
package_namespaces: Dict[str, MacroNamespace],
|
||||
macro: ParsedMacro,
|
||||
macro: Macro,
|
||||
):
|
||||
if macro.package_name in package_namespaces:
|
||||
namespace = package_namespaces[macro.package_name]
|
||||
@@ -89,7 +89,7 @@ class MacroResolver:
|
||||
raise_duplicate_macro_name(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: ParsedMacro):
|
||||
def add_macro(self, macro: Macro):
|
||||
macro_name: str = macro.name
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set
|
||||
|
||||
from dbt.clients.jinja import MacroGenerator, MacroStack
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
from dbt.contracts.graph.nodes import Macro
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
|
||||
|
||||
@@ -112,7 +112,7 @@ class MacroNamespaceBuilder:
|
||||
def _add_macro_to(
|
||||
self,
|
||||
hierarchy: Dict[str, FlatNamespace],
|
||||
macro: ParsedMacro,
|
||||
macro: Macro,
|
||||
macro_func: MacroGenerator,
|
||||
):
|
||||
if macro.package_name in hierarchy:
|
||||
@@ -125,7 +125,7 @@ class MacroNamespaceBuilder:
|
||||
raise_duplicate_macro_name(macro_func.macro, macro, macro.package_name)
|
||||
hierarchy[macro.package_name][macro.name] = macro_func
|
||||
|
||||
def add_macro(self, macro: ParsedMacro, ctx: Dict[str, Any]):
|
||||
def add_macro(self, macro: Macro, ctx: Dict[str, Any]):
|
||||
macro_name: str = macro.name
|
||||
|
||||
# MacroGenerator is in clients/jinja.py
|
||||
@@ -147,13 +147,11 @@ class MacroNamespaceBuilder:
|
||||
elif macro.package_name == self.root_package:
|
||||
self.globals[macro_name] = macro_func
|
||||
|
||||
def add_macros(self, macros: Iterable[ParsedMacro], ctx: Dict[str, Any]):
|
||||
def add_macros(self, macros: Iterable[Macro], ctx: Dict[str, Any]):
|
||||
for macro in macros:
|
||||
self.add_macro(macro, ctx)
|
||||
|
||||
def build_namespace(
|
||||
self, macros: Iterable[ParsedMacro], ctx: Dict[str, Any]
|
||||
) -> MacroNamespace:
|
||||
def build_namespace(self, macros: Iterable[Macro], ctx: Dict[str, Any]) -> MacroNamespace:
|
||||
self.add_macros(macros, ctx)
|
||||
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
|
||||
@@ -28,18 +28,15 @@ from .macros import MacroNamespaceBuilder, MacroNamespace
|
||||
from .manifest import ManifestContext
|
||||
from dbt.contracts.connection import AdapterResponse
|
||||
from dbt.contracts.graph.manifest import Manifest, Disabled
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompiledResource,
|
||||
CompiledSeedNode,
|
||||
from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Exposure,
|
||||
Metric,
|
||||
SeedNode,
|
||||
SourceDefinition,
|
||||
Resource,
|
||||
ManifestNode,
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedSeedNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.events.functions import get_metadata_vars
|
||||
from dbt.exceptions import (
|
||||
@@ -512,7 +509,7 @@ class OperationRefResolver(RuntimeRefResolver):
|
||||
def create_relation(self, target_model: ManifestNode, name: str) -> RelationProxy:
|
||||
if target_model.is_ephemeral_model:
|
||||
# In operations, we can't ref() ephemeral nodes, because
|
||||
# ParsedMacros do not support set_cte
|
||||
# Macros do not support set_cte
|
||||
raise_compiler_error(
|
||||
"Operations can not ref() ephemeral nodes, but {} is ephemeral".format(
|
||||
target_model.name
|
||||
@@ -584,9 +581,9 @@ class ModelConfiguredVar(Var):
|
||||
self,
|
||||
context: Dict[str, Any],
|
||||
config: RuntimeConfig,
|
||||
node: CompiledResource,
|
||||
node: Resource,
|
||||
) -> None:
|
||||
self._node: CompiledResource
|
||||
self._node: Resource
|
||||
self._config: RuntimeConfig = config
|
||||
super().__init__(context, config.cli_vars, node=node)
|
||||
|
||||
@@ -690,7 +687,7 @@ class ProviderContext(ManifestContext):
|
||||
raise InternalException(f"Invalid provider given to context: {provider}")
|
||||
# mypy appeasement - we know it'll be a RuntimeConfig
|
||||
self.config: RuntimeConfig
|
||||
self.model: Union[ParsedMacro, ManifestNode] = model
|
||||
self.model: Union[Macro, ManifestNode] = model
|
||||
super().__init__(config, manifest, model.package_name)
|
||||
self.sql_results: Dict[str, AttrDict] = {}
|
||||
self.context_config: Optional[ContextConfig] = context_config
|
||||
@@ -779,7 +776,7 @@ class ProviderContext(ManifestContext):
|
||||
@contextmember
|
||||
def write(self, payload: str) -> str:
|
||||
# macros/source defs aren't 'writeable'.
|
||||
if isinstance(self.model, (ParsedMacro, ParsedSourceDefinition)):
|
||||
if isinstance(self.model, (Macro, SourceDefinition)):
|
||||
raise_compiler_error('cannot "write" macros or sources')
|
||||
self.model.build_path = self.model.write_node(self.config.target_path, "run", payload)
|
||||
return ""
|
||||
@@ -799,7 +796,7 @@ class ProviderContext(ManifestContext):
|
||||
|
||||
@contextmember
|
||||
def load_agate_table(self) -> agate.Table:
|
||||
if not isinstance(self.model, (ParsedSeedNode, CompiledSeedNode)):
|
||||
if not isinstance(self.model, SeedNode):
|
||||
raise_compiler_error(
|
||||
"can only load_agate_table for seeds (got a {})".format(self.model.resource_type)
|
||||
)
|
||||
@@ -1220,7 +1217,13 @@ class ProviderContext(ManifestContext):
|
||||
if return_value is not None:
|
||||
# Save the env_var value in the manifest and the var name in the source_file.
|
||||
# If this is compiling, do not save because it's irrelevant to parsing.
|
||||
if self.model and not hasattr(self.model, "compiled"):
|
||||
compiling = (
|
||||
True
|
||||
if hasattr(self.model, "compiled")
|
||||
and getattr(self.model, "compiled", False) is True
|
||||
else False
|
||||
)
|
||||
if self.model and not compiling:
|
||||
# If the environment variable is set from a default, store a string indicating
|
||||
# that so we can skip partial parsing. Otherwise the file will be scheduled for
|
||||
# reparsing. If the default changes, the file will have been updated and therefore
|
||||
@@ -1275,7 +1278,7 @@ class MacroContext(ProviderContext):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: ParsedMacro,
|
||||
model: Macro,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
provider: Provider,
|
||||
@@ -1390,7 +1393,7 @@ def generate_parser_model_context(
|
||||
|
||||
|
||||
def generate_generate_name_macro_context(
|
||||
macro: ParsedMacro,
|
||||
macro: Macro,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
) -> Dict[str, Any]:
|
||||
@@ -1408,7 +1411,7 @@ def generate_runtime_model_context(
|
||||
|
||||
|
||||
def generate_runtime_macro_context(
|
||||
macro: ParsedMacro,
|
||||
macro: Macro,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: Optional[str],
|
||||
@@ -1444,7 +1447,7 @@ class ExposureMetricResolver(BaseResolver):
|
||||
|
||||
|
||||
def generate_parse_exposure(
|
||||
exposure: ParsedExposure,
|
||||
exposure: Exposure,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
@@ -1494,7 +1497,7 @@ class MetricRefResolver(BaseResolver):
|
||||
|
||||
|
||||
def generate_parse_metrics(
|
||||
metric: ParsedMetric,
|
||||
metric: Metric,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
|
||||
@@ -16,6 +16,7 @@ from dbt.exceptions import InternalException
|
||||
from dbt.utils import translate_aliases
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from typing_extensions import Protocol
|
||||
from dbt.dataclass_schema import (
|
||||
dbtClassMixin,
|
||||
@@ -112,7 +113,9 @@ class LazyHandle:
|
||||
self.opener = opener
|
||||
|
||||
def resolve(self, connection: Connection) -> Connection:
|
||||
fire_event(NewConnectionOpening(connection_state=connection.state))
|
||||
fire_event(
|
||||
NewConnectionOpening(connection_state=connection.state, node_info=get_node_info())
|
||||
)
|
||||
return self.opener(connection)
|
||||
|
||||
|
||||
|
||||
@@ -1,236 +0,0 @@
|
||||
from dbt.contracts.graph.parsed import (
|
||||
HasTestMetadata,
|
||||
ParsedNode,
|
||||
ParsedAnalysisNode,
|
||||
ParsedSingularTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedResource,
|
||||
ParsedRPCNode,
|
||||
ParsedSqlNode,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
ParsedSourceDefinition,
|
||||
SeedConfig,
|
||||
TestConfig,
|
||||
same_seeds,
|
||||
)
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.contracts.util import Replaceable
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Union, Dict, Type
|
||||
|
||||
|
||||
@dataclass
|
||||
class InjectedCTE(dbtClassMixin, Replaceable):
|
||||
id: str
|
||||
sql: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledNodeMixin(dbtClassMixin):
|
||||
# this is a special mixin class to provide a required argument. If a node
|
||||
# is missing a `compiled` flag entirely, it must not be a CompiledNode.
|
||||
compiled: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledNode(ParsedNode, CompiledNodeMixin):
|
||||
compiled_code: Optional[str] = None
|
||||
extra_ctes_injected: bool = False
|
||||
extra_ctes: List[InjectedCTE] = field(default_factory=list)
|
||||
relation_name: Optional[str] = None
|
||||
_pre_injected_sql: Optional[str] = None
|
||||
|
||||
def set_cte(self, cte_id: str, sql: str):
|
||||
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
|
||||
do if extra_ctes were an OrderedDict
|
||||
"""
|
||||
for cte in self.extra_ctes:
|
||||
if cte.id == cte_id:
|
||||
cte.sql = sql
|
||||
break
|
||||
else:
|
||||
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
dct = super().__post_serialize__(dct)
|
||||
if "_pre_injected_sql" in dct:
|
||||
del dct["_pre_injected_sql"]
|
||||
return dct
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledAnalysisNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Analysis]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledHookNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Operation]})
|
||||
index: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledModelNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Model]})
|
||||
|
||||
|
||||
# TODO: rm?
|
||||
@dataclass
|
||||
class CompiledRPCNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.RPCCall]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSqlNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.SqlOperation]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSeedNode(CompiledNode):
|
||||
# keep this in sync with ParsedSeedNode!
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
||||
config: SeedConfig = field(default_factory=SeedConfig)
|
||||
root_path: Optional[str] = None
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
"""Seeds are never empty"""
|
||||
return False
|
||||
|
||||
def same_body(self, other) -> bool:
|
||||
return same_seeds(self, other)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSnapshotNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Snapshot]})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledSingularTestNode(CompiledNode):
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Test]})
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type:ignore
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompiledGenericTestNode(CompiledNode, HasTestMetadata):
|
||||
# keep this in sync with ParsedGenericTestNode!
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Test]})
|
||||
column_name: Optional[str] = None
|
||||
file_key_name: Optional[str] = None
|
||||
# Was not able to make mypy happy and keep the code working. We need to
|
||||
# refactor the various configs.
|
||||
config: TestConfig = field(default_factory=TestConfig) # type:ignore
|
||||
|
||||
def same_contents(self, other) -> bool:
|
||||
if other is None:
|
||||
return False
|
||||
|
||||
return self.same_config(other) and self.same_fqn(other) and True
|
||||
|
||||
|
||||
CompiledTestNode = Union[CompiledSingularTestNode, CompiledGenericTestNode]
|
||||
|
||||
|
||||
PARSED_TYPES: Dict[Type[CompiledNode], Type[ParsedResource]] = {
|
||||
CompiledAnalysisNode: ParsedAnalysisNode,
|
||||
CompiledModelNode: ParsedModelNode,
|
||||
CompiledHookNode: ParsedHookNode,
|
||||
CompiledRPCNode: ParsedRPCNode,
|
||||
CompiledSqlNode: ParsedSqlNode,
|
||||
CompiledSeedNode: ParsedSeedNode,
|
||||
CompiledSnapshotNode: ParsedSnapshotNode,
|
||||
CompiledSingularTestNode: ParsedSingularTestNode,
|
||||
CompiledGenericTestNode: ParsedGenericTestNode,
|
||||
}
|
||||
|
||||
|
||||
COMPILED_TYPES: Dict[Type[ParsedResource], Type[CompiledNode]] = {
|
||||
ParsedAnalysisNode: CompiledAnalysisNode,
|
||||
ParsedModelNode: CompiledModelNode,
|
||||
ParsedHookNode: CompiledHookNode,
|
||||
ParsedRPCNode: CompiledRPCNode,
|
||||
ParsedSqlNode: CompiledSqlNode,
|
||||
ParsedSeedNode: CompiledSeedNode,
|
||||
ParsedSnapshotNode: CompiledSnapshotNode,
|
||||
ParsedSingularTestNode: CompiledSingularTestNode,
|
||||
ParsedGenericTestNode: CompiledGenericTestNode,
|
||||
}
|
||||
|
||||
|
||||
# for some types, the compiled type is the parsed type, so make this easy
|
||||
CompiledType = Union[Type[CompiledNode], Type[ParsedResource]]
|
||||
CompiledResource = Union[ParsedResource, CompiledNode]
|
||||
|
||||
|
||||
def compiled_type_for(parsed: ParsedNode) -> CompiledType:
|
||||
if type(parsed) in COMPILED_TYPES:
|
||||
return COMPILED_TYPES[type(parsed)]
|
||||
else:
|
||||
return type(parsed)
|
||||
|
||||
|
||||
def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
|
||||
cls = PARSED_TYPES.get(type(compiled))
|
||||
if cls is None:
|
||||
# how???
|
||||
raise ValueError("invalid resource_type: {}".format(compiled.resource_type))
|
||||
|
||||
return cls.from_dict(compiled.to_dict(omit_none=True))
|
||||
|
||||
|
||||
NonSourceCompiledNode = Union[
|
||||
CompiledAnalysisNode,
|
||||
CompiledSingularTestNode,
|
||||
CompiledModelNode,
|
||||
CompiledHookNode,
|
||||
CompiledRPCNode,
|
||||
CompiledSqlNode,
|
||||
CompiledGenericTestNode,
|
||||
CompiledSeedNode,
|
||||
CompiledSnapshotNode,
|
||||
]
|
||||
|
||||
NonSourceParsedNode = Union[
|
||||
ParsedAnalysisNode,
|
||||
ParsedSingularTestNode,
|
||||
ParsedHookNode,
|
||||
ParsedModelNode,
|
||||
ParsedRPCNode,
|
||||
ParsedSqlNode,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSeedNode,
|
||||
ParsedSnapshotNode,
|
||||
]
|
||||
|
||||
|
||||
# This is anything that can be in manifest.nodes.
|
||||
ManifestNode = Union[
|
||||
NonSourceCompiledNode,
|
||||
NonSourceParsedNode,
|
||||
]
|
||||
|
||||
# We allow either parsed or compiled nodes, or parsed sources, as some
|
||||
# 'compile()' calls in the runner actually just return the original parsed
|
||||
# node they were given.
|
||||
CompileResultNode = Union[
|
||||
ManifestNode,
|
||||
ParsedSourceDefinition,
|
||||
]
|
||||
|
||||
# anything that participates in the graph: sources, exposures, metrics,
|
||||
# or manifest nodes
|
||||
GraphMemberNode = Union[
|
||||
CompileResultNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
]
|
||||
@@ -16,29 +16,24 @@ from typing import (
|
||||
TypeVar,
|
||||
Callable,
|
||||
Generic,
|
||||
cast,
|
||||
AbstractSet,
|
||||
ClassVar,
|
||||
)
|
||||
from typing_extensions import Protocol
|
||||
from uuid import UUID
|
||||
|
||||
from dbt.contracts.graph.compiled import (
|
||||
CompileResultNode,
|
||||
ManifestNode,
|
||||
NonSourceCompiledNode,
|
||||
GraphMemberNode,
|
||||
)
|
||||
from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro,
|
||||
ParsedDocumentation,
|
||||
ParsedSourceDefinition,
|
||||
ParsedGenericTestNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
HasUniqueID,
|
||||
from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Documentation,
|
||||
SourceDefinition,
|
||||
GenericTestNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
UnpatchedSourceDefinition,
|
||||
ManifestNodes,
|
||||
ManifestNode,
|
||||
GraphMemberNode,
|
||||
ResultNode,
|
||||
BaseNode,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import SourcePatch
|
||||
from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile
|
||||
@@ -96,7 +91,7 @@ class DocLookup(dbtClassMixin):
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_doc(self, doc: ParsedDocumentation):
|
||||
def add_doc(self, doc: Documentation):
|
||||
if doc.name not in self.storage:
|
||||
self.storage[doc.name] = {}
|
||||
self.storage[doc.name][doc.package_name] = doc.unique_id
|
||||
@@ -105,7 +100,7 @@ class DocLookup(dbtClassMixin):
|
||||
for doc in manifest.docs.values():
|
||||
self.add_doc(doc)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest) -> ParsedDocumentation:
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest) -> Documentation:
|
||||
if unique_id not in manifest.docs:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Doc {unique_id} found in cache but not found in manifest"
|
||||
@@ -127,7 +122,7 @@ class SourceLookup(dbtClassMixin):
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_source(self, source: ParsedSourceDefinition):
|
||||
def add_source(self, source: SourceDefinition):
|
||||
if source.search_name not in self.storage:
|
||||
self.storage[source.search_name] = {}
|
||||
|
||||
@@ -138,7 +133,7 @@ class SourceLookup(dbtClassMixin):
|
||||
if hasattr(source, "source_name"):
|
||||
self.add_source(source)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> ParsedSourceDefinition:
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SourceDefinition:
|
||||
if unique_id not in manifest.sources:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Source {unique_id} found in cache but not found in manifest"
|
||||
@@ -198,7 +193,7 @@ class MetricLookup(dbtClassMixin):
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_metric(self, metric: ParsedMetric):
|
||||
def add_metric(self, metric: Metric):
|
||||
if metric.search_name not in self.storage:
|
||||
self.storage[metric.search_name] = {}
|
||||
|
||||
@@ -209,7 +204,7 @@ class MetricLookup(dbtClassMixin):
|
||||
if hasattr(metric, "name"):
|
||||
self.add_metric(metric)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> ParsedMetric:
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> Metric:
|
||||
if unique_id not in manifest.metrics:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Metric {unique_id} found in cache but not found in manifest"
|
||||
@@ -325,7 +320,7 @@ def _sort_values(dct):
|
||||
|
||||
|
||||
def build_node_edges(nodes: List[ManifestNode]):
|
||||
"""Build the forward and backward edges on the given list of ParsedNodes
|
||||
"""Build the forward and backward edges on the given list of ManifestNodes
|
||||
and return them as two separate dictionaries, each mapping unique IDs to
|
||||
lists of edges.
|
||||
"""
|
||||
@@ -343,10 +338,10 @@ def build_node_edges(nodes: List[ManifestNode]):
|
||||
# Build a map of children of macros and generic tests
|
||||
def build_macro_edges(nodes: List[Any]):
|
||||
forward_edges: Dict[str, List[str]] = {
|
||||
n.unique_id: [] for n in nodes if n.unique_id.startswith("macro") or n.depends_on.macros
|
||||
n.unique_id: [] for n in nodes if n.unique_id.startswith("macro") or n.depends_on_macros
|
||||
}
|
||||
for node in nodes:
|
||||
for unique_id in node.depends_on.macros:
|
||||
for unique_id in node.depends_on_macros:
|
||||
if unique_id in forward_edges.keys():
|
||||
forward_edges[unique_id].append(node.unique_id)
|
||||
return _sort_values(forward_edges)
|
||||
@@ -365,7 +360,7 @@ class Locality(enum.IntEnum):
|
||||
@dataclass
|
||||
class MacroCandidate:
|
||||
locality: Locality
|
||||
macro: ParsedMacro
|
||||
macro: Macro
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, MacroCandidate):
|
||||
@@ -430,16 +425,14 @@ M = TypeVar("M", bound=MacroCandidate)
|
||||
|
||||
|
||||
class CandidateList(List[M]):
|
||||
def last(self) -> Optional[ParsedMacro]:
|
||||
def last(self) -> Optional[Macro]:
|
||||
if not self:
|
||||
return None
|
||||
self.sort()
|
||||
return self[-1].macro
|
||||
|
||||
|
||||
def _get_locality(
|
||||
macro: ParsedMacro, root_project_name: str, internal_packages: Set[str]
|
||||
) -> Locality:
|
||||
def _get_locality(macro: Macro, root_project_name: str, internal_packages: Set[str]) -> Locality:
|
||||
if macro.package_name == root_project_name:
|
||||
return Locality.Root
|
||||
elif macro.package_name in internal_packages:
|
||||
@@ -465,16 +458,16 @@ class Disabled(Generic[D]):
|
||||
target: D
|
||||
|
||||
|
||||
MaybeMetricNode = Optional[Union[ParsedMetric, Disabled[ParsedMetric]]]
|
||||
MaybeMetricNode = Optional[Union[Metric, Disabled[Metric]]]
|
||||
|
||||
|
||||
MaybeDocumentation = Optional[ParsedDocumentation]
|
||||
MaybeDocumentation = Optional[Documentation]
|
||||
|
||||
|
||||
MaybeParsedSource = Optional[
|
||||
Union[
|
||||
ParsedSourceDefinition,
|
||||
Disabled[ParsedSourceDefinition],
|
||||
SourceDefinition,
|
||||
Disabled[SourceDefinition],
|
||||
]
|
||||
]
|
||||
|
||||
@@ -514,7 +507,7 @@ class MacroMethods:
|
||||
|
||||
def find_macro_by_name(
|
||||
self, name: str, root_project_name: str, package: Optional[str]
|
||||
) -> Optional[ParsedMacro]:
|
||||
) -> Optional[Macro]:
|
||||
"""Find a macro in the graph by its name and package name, or None for
|
||||
any package. The root project name is used to determine priority:
|
||||
- locally defined macros come first
|
||||
@@ -537,7 +530,7 @@ class MacroMethods:
|
||||
|
||||
def find_generate_macro_by_name(
|
||||
self, component: str, root_project_name: str
|
||||
) -> Optional[ParsedMacro]:
|
||||
) -> Optional[Macro]:
|
||||
"""
|
||||
The `generate_X_name` macros are similar to regular ones, but ignore
|
||||
imported packages.
|
||||
@@ -606,11 +599,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
# is added it must all be added in the __reduce_ex__ method in the
|
||||
# args tuple in the right position.
|
||||
nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict)
|
||||
sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict)
|
||||
macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict)
|
||||
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
|
||||
metrics: MutableMapping[str, ParsedMetric] = field(default_factory=dict)
|
||||
sources: MutableMapping[str, SourceDefinition] = field(default_factory=dict)
|
||||
macros: MutableMapping[str, Macro] = field(default_factory=dict)
|
||||
docs: MutableMapping[str, Documentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, Exposure] = field(default_factory=dict)
|
||||
metrics: MutableMapping[str, Metric] = field(default_factory=dict)
|
||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
@@ -658,7 +651,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
obj._lock = flags.MP_CONTEXT.Lock()
|
||||
return obj
|
||||
|
||||
def sync_update_node(self, new_node: NonSourceCompiledNode) -> NonSourceCompiledNode:
|
||||
def sync_update_node(self, new_node: ManifestNode) -> ManifestNode:
|
||||
"""update the node with a lock. The only time we should want to lock is
|
||||
when compiling an ephemeral ancestor of a node at runtime, because
|
||||
multiple threads could be just-in-time compiling the same ephemeral
|
||||
@@ -671,21 +664,21 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
with self._lock:
|
||||
existing = self.nodes[new_node.unique_id]
|
||||
if getattr(existing, "compiled", False):
|
||||
# already compiled -> must be a NonSourceCompiledNode
|
||||
return cast(NonSourceCompiledNode, existing)
|
||||
# already compiled
|
||||
return existing
|
||||
_update_into(self.nodes, new_node)
|
||||
return new_node
|
||||
|
||||
def update_exposure(self, new_exposure: ParsedExposure):
|
||||
def update_exposure(self, new_exposure: Exposure):
|
||||
_update_into(self.exposures, new_exposure)
|
||||
|
||||
def update_metric(self, new_metric: ParsedMetric):
|
||||
def update_metric(self, new_metric: Metric):
|
||||
_update_into(self.metrics, new_metric)
|
||||
|
||||
def update_node(self, new_node: ManifestNode):
|
||||
_update_into(self.nodes, new_node)
|
||||
|
||||
def update_source(self, new_source: ParsedSourceDefinition):
|
||||
def update_source(self, new_source: SourceDefinition):
|
||||
_update_into(self.sources, new_source)
|
||||
|
||||
def build_flat_graph(self):
|
||||
@@ -738,7 +731,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
def find_materialization_macro_by_name(
|
||||
self, project_name: str, materialization_name: str, adapter_type: str
|
||||
) -> Optional[ParsedMacro]:
|
||||
) -> Optional[Macro]:
|
||||
candidates: CandidateList = CandidateList(
|
||||
chain.from_iterable(
|
||||
self._materialization_candidates_for(
|
||||
@@ -943,8 +936,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
search_name = f"{target_source_name}.{target_table_name}"
|
||||
candidates = _search_packages(current_project, node_package)
|
||||
|
||||
source: Optional[ParsedSourceDefinition] = None
|
||||
disabled: Optional[List[ParsedSourceDefinition]] = None
|
||||
source: Optional[SourceDefinition] = None
|
||||
disabled: Optional[List[SourceDefinition]] = None
|
||||
|
||||
for pkg in candidates:
|
||||
source = self.source_lookup.find(search_name, pkg, self)
|
||||
@@ -968,8 +961,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
node_package: str,
|
||||
) -> MaybeMetricNode:
|
||||
|
||||
metric: Optional[ParsedMetric] = None
|
||||
disabled: Optional[List[ParsedMetric]] = None
|
||||
metric: Optional[Metric] = None
|
||||
disabled: Optional[List[Metric]] = None
|
||||
|
||||
candidates = _search_packages(current_project, node_package, target_metric_package)
|
||||
for pkg in candidates:
|
||||
@@ -992,7 +985,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> Optional[ParsedDocumentation]:
|
||||
) -> Optional[Documentation]:
|
||||
"""Resolve the given documentation. This follows the same algorithm as
|
||||
resolve_ref except the is_enabled checks are unnecessary as docs are
|
||||
always enabled.
|
||||
@@ -1044,7 +1037,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
|
||||
# Methods that were formerly in ParseResult
|
||||
|
||||
def add_macro(self, source_file: SourceFile, macro: ParsedMacro):
|
||||
def add_macro(self, source_file: SourceFile, macro: Macro):
|
||||
if macro.unique_id in self.macros:
|
||||
# detect that the macro exists and emit an error
|
||||
other_path = self.macros[macro.unique_id].original_file_path
|
||||
@@ -1086,30 +1079,30 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.sources[source.unique_id] = source # type: ignore
|
||||
source_file.sources.append(source.unique_id)
|
||||
|
||||
def add_node_nofile(self, node: ManifestNodes):
|
||||
def add_node_nofile(self, node: ManifestNode):
|
||||
# nodes can't be overwritten!
|
||||
_check_duplicates(node, self.nodes)
|
||||
self.nodes[node.unique_id] = node
|
||||
|
||||
def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None):
|
||||
def add_node(self, source_file: AnySourceFile, node: ManifestNode, test_from=None):
|
||||
self.add_node_nofile(node)
|
||||
if isinstance(source_file, SchemaSourceFile):
|
||||
if isinstance(node, ParsedGenericTestNode):
|
||||
if isinstance(node, GenericTestNode):
|
||||
assert test_from
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, ParsedMetric):
|
||||
if isinstance(node, Metric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, ParsedExposure):
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
source_file.nodes.append(node.unique_id)
|
||||
|
||||
def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure):
|
||||
def add_exposure(self, source_file: SchemaSourceFile, exposure: Exposure):
|
||||
_check_duplicates(exposure, self.exposures)
|
||||
self.exposures[exposure.unique_id] = exposure
|
||||
source_file.exposures.append(exposure.unique_id)
|
||||
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: ParsedMetric):
|
||||
def add_metric(self, source_file: SchemaSourceFile, metric: Metric):
|
||||
_check_duplicates(metric, self.metrics)
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
@@ -1121,20 +1114,20 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
else:
|
||||
self.disabled[node.unique_id] = [node]
|
||||
|
||||
def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None):
|
||||
def add_disabled(self, source_file: AnySourceFile, node: ResultNode, test_from=None):
|
||||
self.add_disabled_nofile(node)
|
||||
if isinstance(source_file, SchemaSourceFile):
|
||||
if isinstance(node, ParsedGenericTestNode):
|
||||
if isinstance(node, GenericTestNode):
|
||||
assert test_from
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, ParsedMetric):
|
||||
if isinstance(node, Metric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, ParsedExposure):
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
source_file.nodes.append(node.unique_id)
|
||||
|
||||
def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation):
|
||||
def add_doc(self, source_file: SourceFile, doc: Documentation):
|
||||
_check_duplicates(doc, self.docs)
|
||||
self.docs[doc.unique_id] = doc
|
||||
source_file.docs.append(doc.unique_id)
|
||||
@@ -1192,27 +1185,27 @@ class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
)
|
||||
sources: Mapping[UniqueID, ParsedSourceDefinition] = field(
|
||||
sources: Mapping[UniqueID, SourceDefinition] = field(
|
||||
metadata=dict(description=("The sources defined in the dbt project and its dependencies"))
|
||||
)
|
||||
macros: Mapping[UniqueID, ParsedMacro] = field(
|
||||
macros: Mapping[UniqueID, Macro] = field(
|
||||
metadata=dict(description=("The macros defined in the dbt project and its dependencies"))
|
||||
)
|
||||
docs: Mapping[UniqueID, ParsedDocumentation] = field(
|
||||
docs: Mapping[UniqueID, Documentation] = field(
|
||||
metadata=dict(description=("The docs defined in the dbt project and its dependencies"))
|
||||
)
|
||||
exposures: Mapping[UniqueID, ParsedExposure] = field(
|
||||
exposures: Mapping[UniqueID, Exposure] = field(
|
||||
metadata=dict(
|
||||
description=("The exposures defined in the dbt project and its dependencies")
|
||||
)
|
||||
)
|
||||
metrics: Mapping[UniqueID, ParsedMetric] = field(
|
||||
metrics: Mapping[UniqueID, Metric] = field(
|
||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||
)
|
||||
disabled: Optional[Mapping[UniqueID, List[CompileResultNode]]] = field(
|
||||
disabled: Optional[Mapping[UniqueID, List[ResultNode]]] = field(
|
||||
metadata=dict(description="A mapping of the disabled nodes in the target")
|
||||
)
|
||||
parent_map: Optional[NodeEdgeMap] = field(
|
||||
@@ -1242,7 +1235,7 @@ class WritableManifest(ArtifactMixin):
|
||||
return dct
|
||||
|
||||
|
||||
def _check_duplicates(value: HasUniqueID, src: Mapping[str, HasUniqueID]):
|
||||
def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]):
|
||||
if value.unique_id in src:
|
||||
raise_duplicate_resource_name(value, src[value.unique_id])
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ class MetricReference(object):
|
||||
|
||||
class ResolvedMetricReference(MetricReference):
|
||||
"""
|
||||
Simple proxy over a ParsedMetric which delegates property
|
||||
Simple proxy over a Metric which delegates property
|
||||
lookups to the underlying node. Also adds helper functions
|
||||
for working with metrics (ie. __str__ and templating functions)
|
||||
"""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -55,6 +55,12 @@ class LocalPackage(Package):
|
||||
RawVersion = Union[str, float]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TarballPackage(Package):
|
||||
tarball: str
|
||||
name: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class GitPackage(Package):
|
||||
git: str
|
||||
@@ -82,7 +88,7 @@ class RegistryPackage(Package):
|
||||
return [str(self.version)]
|
||||
|
||||
|
||||
PackageSpec = Union[LocalPackage, GitPackage, RegistryPackage]
|
||||
PackageSpec = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -216,7 +222,7 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
),
|
||||
)
|
||||
packages: List[PackageSpec] = field(default_factory=list)
|
||||
query_comment: Optional[Union[QueryComment, NoValue, str]] = NoValue()
|
||||
query_comment: Optional[Union[QueryComment, NoValue, str]] = field(default_factory=NoValue)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
@@ -251,7 +257,6 @@ class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract):
|
||||
static_parser: Optional[bool] = None
|
||||
indirect_selection: Optional[str] = None
|
||||
cache_selected_only: Optional[bool] = None
|
||||
event_buffer_size: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from dbt.contracts.graph.manifest import CompileResultNode
|
||||
from dbt.contracts.graph.unparsed import FreshnessThreshold
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ResultNode
|
||||
from dbt.contracts.util import (
|
||||
BaseArtifactMetadata,
|
||||
ArtifactMixin,
|
||||
@@ -11,11 +10,9 @@ from dbt.contracts.util import (
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import TimingInfoCollected
|
||||
from dbt.events.proto_types import RunResultMsg
|
||||
from dbt.logger import (
|
||||
TimingProcessor,
|
||||
JsonOnly,
|
||||
)
|
||||
from dbt.events.proto_types import RunResultMsg, TimingInfoMsg
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.logger import TimingProcessor
|
||||
from dbt.utils import lowercase, cast_to_str, cast_to_int
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
@@ -48,7 +45,14 @@ class TimingInfo(dbtClassMixin):
|
||||
def end(self):
|
||||
self.completed_at = datetime.utcnow()
|
||||
|
||||
def to_msg(self):
|
||||
timsg = TimingInfoMsg(
|
||||
name=self.name, started_at=self.started_at, completed_at=self.completed_at
|
||||
)
|
||||
return timsg
|
||||
|
||||
|
||||
# This is a context manager
|
||||
class collect_timing_info:
|
||||
def __init__(self, name: str):
|
||||
self.timing_info = TimingInfo(name=name)
|
||||
@@ -59,8 +63,13 @@ class collect_timing_info:
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.timing_info.end()
|
||||
with JsonOnly(), TimingProcessor(self.timing_info):
|
||||
fire_event(TimingInfoCollected())
|
||||
# Note: when legacy logger is removed, we can remove the following line
|
||||
with TimingProcessor(self.timing_info):
|
||||
fire_event(
|
||||
TimingInfoCollected(
|
||||
timing_info=self.timing_info.to_msg(), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class RunningStatus(StrEnum):
|
||||
@@ -128,13 +137,14 @@ class BaseResult(dbtClassMixin):
|
||||
msg.thread = self.thread_id
|
||||
msg.execution_time = self.execution_time
|
||||
msg.num_failures = cast_to_int(self.failures)
|
||||
# timing_info, adapter_response, message
|
||||
msg.timing_info = [ti.to_msg() for ti in self.timing]
|
||||
# adapter_response
|
||||
return msg
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeResult(BaseResult):
|
||||
node: CompileResultNode
|
||||
node: ResultNode
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -273,7 +283,7 @@ class RunOperationResultsArtifact(RunOperationResult, ArtifactMixin):
|
||||
|
||||
@dataclass
|
||||
class SourceFreshnessResult(NodeResult):
|
||||
node: ParsedSourceDefinition
|
||||
node: SourceDefinition
|
||||
status: FreshnessStatus
|
||||
max_loaded_at: datetime
|
||||
snapshotted_at: datetime
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Optional, List, Any, Dict, Sequence
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.results import (
|
||||
RunResult,
|
||||
RunResultsArtifact,
|
||||
@@ -32,7 +32,7 @@ class RemoteResult(VersionedSchema):
|
||||
class RemoteCompileResultMixin(RemoteResult):
|
||||
raw_code: str
|
||||
compiled_code: str
|
||||
node: CompileResultNode
|
||||
node: ResultNode
|
||||
timing: List[TimingInfo]
|
||||
|
||||
|
||||
|
||||
@@ -237,18 +237,43 @@ def rename_sql_attr(node_content: dict) -> dict:
|
||||
return node_content
|
||||
|
||||
|
||||
def upgrade_node_content(node_content):
|
||||
rename_sql_attr(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
|
||||
|
||||
def upgrade_seed_content(node_content):
|
||||
# Remove compilation related attributes
|
||||
for attr_name in (
|
||||
"language",
|
||||
"refs",
|
||||
"sources",
|
||||
"metrics",
|
||||
"depends_on",
|
||||
"compiled_path",
|
||||
"compiled",
|
||||
"compiled_code",
|
||||
"extra_ctes_injected",
|
||||
"extra_ctes",
|
||||
"relation_name",
|
||||
):
|
||||
if attr_name in node_content:
|
||||
del node_content[attr_name]
|
||||
|
||||
|
||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
for node_content in manifest.get("nodes", {}).values():
|
||||
node_content = rename_sql_attr(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
upgrade_node_content(node_content)
|
||||
if node_content["resource_type"] == "seed":
|
||||
upgrade_seed_content(node_content)
|
||||
for disabled in manifest.get("disabled", {}).values():
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
# so make sure all the nodes get the attr renamed
|
||||
for node_content in disabled:
|
||||
rename_sql_attr(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
upgrade_node_content(node_content)
|
||||
if node_content["resource_type"] == "seed":
|
||||
upgrade_seed_content(node_content)
|
||||
for metric_content in manifest.get("metrics", {}).values():
|
||||
# handle attr renames + value translation ("expression" -> "derived")
|
||||
metric_content = rename_metric_attr(metric_content)
|
||||
@@ -266,6 +291,7 @@ def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
for doc_content in manifest.get("docs", {}).values():
|
||||
if "root_path" in doc_content:
|
||||
del doc_content["root_path"]
|
||||
doc_content["resource_type"] = "doc"
|
||||
return manifest
|
||||
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@ Defines the base classes of `PinnedPackage` and `UnpinnedPackage`.
|
||||
|
||||
`downloads_directory` sets the directory packages will be downloaded to.
|
||||
|
||||
`_install` has retry logic if the download or untarring process hit exceptions (see `dbt.utils._connection_exception_retry`).
|
||||
|
||||
## `git.py`
|
||||
|
||||
Extends `PinnedPackage` and `UnpinnedPackage` specific to dbt packages defined with git urls.
|
||||
@@ -28,8 +30,10 @@ Extends `PinnedPackage` and `UnpinnedPackage` specific to dbt packages defined l
|
||||
|
||||
Extends `PinnedPackage` and `UnpinnedPackage` specific to dbt packages defined on the dbt Hub registry.
|
||||
|
||||
`install` has retry logic if the download or untarring process hit exceptions (see `dbt.utils._connection_exception_retry`).
|
||||
|
||||
## `resolver.py`
|
||||
|
||||
Resolves the package definition into package objects to download.
|
||||
|
||||
## `tarball.py`
|
||||
Extends `PinnedPackage` and `UnpinnedPackage` specific to dbt packages defined by a URL to a tarball hosted on an HTTP server.
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import abc
|
||||
import os
|
||||
import functools
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Generic, TypeVar
|
||||
|
||||
from dbt.clients import system
|
||||
from dbt.contracts.project import ProjectPackageMetadata
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import DepsSetDownloadDirectory
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
|
||||
DOWNLOADS_PATH = None
|
||||
|
||||
@@ -97,6 +100,34 @@ class PinnedPackage(BasePackage):
|
||||
def get_subdirectory(self):
|
||||
return None
|
||||
|
||||
def _install(self, project, renderer):
|
||||
metadata = self.fetch_metadata(project, renderer)
|
||||
|
||||
tar_name = f"{self.package}.{self.version}.tar.gz"
|
||||
tar_path = (Path(get_downloads_path()) / tar_name).resolve(strict=False)
|
||||
system.make_directory(str(tar_path.parent))
|
||||
|
||||
download_url = metadata.downloads.tarball
|
||||
deps_path = project.packages_install_path
|
||||
package_name = self.get_project_name(project, renderer)
|
||||
|
||||
download_untar_fn = functools.partial(
|
||||
self.download_and_untar, download_url, str(tar_path), deps_path, package_name
|
||||
)
|
||||
connection_exception_retry(download_untar_fn, 5)
|
||||
|
||||
def download_and_untar(self, download_url, tar_path, deps_path, package_name):
|
||||
"""
|
||||
Sometimes the download of the files fails and we want to retry. Sometimes the
|
||||
download appears successful but the file did not make it through as expected
|
||||
(generally due to a github incident). Either way we want to retry downloading
|
||||
and untarring to see if we can get a success. Call this within
|
||||
`_connection_exception_retry`
|
||||
"""
|
||||
|
||||
system.download(download_url, tar_path)
|
||||
system.untar_package(tar_path, deps_path, package_name)
|
||||
|
||||
|
||||
SomePinned = TypeVar("SomePinned", bound=PinnedPackage)
|
||||
SomeUnpinned = TypeVar("SomeUnpinned", bound="UnpinnedPackage")
|
||||
|
||||
@@ -1,23 +1,20 @@
|
||||
import os
|
||||
import functools
|
||||
from typing import List
|
||||
|
||||
from dbt import semver
|
||||
from dbt import flags
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.clients import registry, system
|
||||
from dbt.clients import registry
|
||||
from dbt.contracts.project import (
|
||||
RegistryPackageMetadata,
|
||||
RegistryPackage,
|
||||
)
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage
|
||||
from dbt.exceptions import (
|
||||
package_version_not_found,
|
||||
VersionsNotCompatibleException,
|
||||
DependencyException,
|
||||
package_not_found,
|
||||
)
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
|
||||
|
||||
class RegistryPackageMixin:
|
||||
@@ -60,32 +57,7 @@ class RegistryPinnedPackage(RegistryPackageMixin, PinnedPackage):
|
||||
return RegistryPackageMetadata.from_dict(dct)
|
||||
|
||||
def install(self, project, renderer):
|
||||
metadata = self.fetch_metadata(project, renderer)
|
||||
|
||||
tar_name = "{}.{}.tar.gz".format(self.package, self.version)
|
||||
tar_path = os.path.realpath(os.path.join(get_downloads_path(), tar_name))
|
||||
system.make_directory(os.path.dirname(tar_path))
|
||||
|
||||
download_url = metadata.downloads.tarball
|
||||
deps_path = project.packages_install_path
|
||||
package_name = self.get_project_name(project, renderer)
|
||||
|
||||
download_untar_fn = functools.partial(
|
||||
self.download_and_untar, download_url, tar_path, deps_path, package_name
|
||||
)
|
||||
connection_exception_retry(download_untar_fn, 5)
|
||||
|
||||
def download_and_untar(self, download_url, tar_path, deps_path, package_name):
|
||||
"""
|
||||
Sometimes the download of the files fails and we want to retry. Sometimes the
|
||||
download appears successful but the file did not make it through as expected
|
||||
(generally due to a github incident). Either way we want to retry downloading
|
||||
and untarring to see if we can get a success. Call this within
|
||||
`_connection_exception_retry`
|
||||
"""
|
||||
|
||||
system.download(download_url, tar_path)
|
||||
system.untar_package(tar_path, deps_path, package_name)
|
||||
self._install(project, renderer)
|
||||
|
||||
|
||||
class RegistryUnpinnedPackage(RegistryPackageMixin, UnpinnedPackage[RegistryPinnedPackage]):
|
||||
|
||||
@@ -7,16 +7,18 @@ from dbt.config import Project, RuntimeConfig
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer
|
||||
from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage
|
||||
from dbt.deps.local import LocalUnpinnedPackage
|
||||
from dbt.deps.tarball import TarballUnpinnedPackage
|
||||
from dbt.deps.git import GitUnpinnedPackage
|
||||
from dbt.deps.registry import RegistryUnpinnedPackage
|
||||
|
||||
from dbt.contracts.project import (
|
||||
LocalPackage,
|
||||
TarballPackage,
|
||||
GitPackage,
|
||||
RegistryPackage,
|
||||
)
|
||||
|
||||
PackageContract = Union[LocalPackage, GitPackage, RegistryPackage]
|
||||
PackageContract = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -69,6 +71,8 @@ class PackageListing:
|
||||
for contract in src:
|
||||
if isinstance(contract, LocalPackage):
|
||||
pkg = LocalUnpinnedPackage.from_contract(contract)
|
||||
elif isinstance(contract, TarballPackage):
|
||||
pkg = TarballUnpinnedPackage.from_contract(contract)
|
||||
elif isinstance(contract, GitPackage):
|
||||
pkg = GitUnpinnedPackage.from_contract(contract)
|
||||
elif isinstance(contract, RegistryPackage):
|
||||
|
||||
74
core/dbt/deps/tarball.py
Normal file
74
core/dbt/deps/tarball.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from dbt.contracts.project import RegistryPackageMetadata, TarballPackage
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage
|
||||
|
||||
|
||||
class TarballPackageMixin:
|
||||
def __init__(self, tarball: str) -> None:
|
||||
super().__init__()
|
||||
self.tarball = tarball
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.tarball
|
||||
|
||||
def source_type(self) -> str:
|
||||
return "tarball"
|
||||
|
||||
|
||||
class TarballPinnedPackage(TarballPackageMixin, PinnedPackage):
|
||||
def __init__(self, tarball: str, package: str) -> None:
|
||||
super().__init__(tarball)
|
||||
# setup to recycle RegistryPinnedPackage fns
|
||||
self.package = package
|
||||
self.version = "tarball"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.package
|
||||
|
||||
def get_version(self):
|
||||
return self.version
|
||||
|
||||
def nice_version_name(self):
|
||||
return f"tarball (url: {self.tarball})"
|
||||
|
||||
def _fetch_metadata(self, project, renderer):
|
||||
"""
|
||||
recycle RegistryPackageMetadata so that we can use the install and
|
||||
download_and_untar from RegistryPinnedPackage next.
|
||||
build RegistryPackageMetadata from info passed via packages.yml since no
|
||||
'metadata' service exists in this case.
|
||||
"""
|
||||
|
||||
dct = {
|
||||
"name": self.package,
|
||||
"packages": [], # note: required by RegistryPackageMetadata
|
||||
"downloads": {"tarball": self.tarball},
|
||||
}
|
||||
|
||||
return RegistryPackageMetadata.from_dict(dct)
|
||||
|
||||
def install(self, project, renderer):
|
||||
self._install(project, renderer)
|
||||
|
||||
|
||||
class TarballUnpinnedPackage(TarballPackageMixin, UnpinnedPackage[TarballPinnedPackage]):
|
||||
def __init__(
|
||||
self,
|
||||
tarball: str,
|
||||
package: str,
|
||||
) -> None:
|
||||
super().__init__(tarball)
|
||||
# setup to recycle RegistryPinnedPackage fns
|
||||
self.package = package
|
||||
self.version = "tarball"
|
||||
|
||||
@classmethod
|
||||
def from_contract(cls, contract: TarballPackage) -> "TarballUnpinnedPackage":
|
||||
return cls(tarball=contract.tarball, package=contract.name)
|
||||
|
||||
def incorporate(self, other: "TarballUnpinnedPackage") -> "TarballUnpinnedPackage":
|
||||
return TarballUnpinnedPackage(tarball=self.tarball, package=self.package)
|
||||
|
||||
def resolved(self) -> TarballPinnedPackage:
|
||||
return TarballPinnedPackage(tarball=self.tarball, package=self.package)
|
||||
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Binary file not shown.
@@ -1,6 +1,7 @@
|
||||
import traceback
|
||||
from dataclasses import dataclass
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.events.types import (
|
||||
AdapterEventDebug,
|
||||
AdapterEventInfo,
|
||||
@@ -15,27 +16,39 @@ class AdapterLogger:
|
||||
name: str
|
||||
|
||||
def debug(self, msg, *args):
|
||||
event = AdapterEventDebug(name=self.name, base_msg=msg, args=args)
|
||||
event = AdapterEventDebug(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def info(self, msg, *args):
|
||||
event = AdapterEventInfo(name=self.name, base_msg=msg, args=args)
|
||||
event = AdapterEventInfo(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def warning(self, msg, *args):
|
||||
event = AdapterEventWarning(name=self.name, base_msg=msg, args=args)
|
||||
event = AdapterEventWarning(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
def error(self, msg, *args):
|
||||
event = AdapterEventError(name=self.name, base_msg=msg, args=args)
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
# The default exc_info=True is what makes this method different
|
||||
def exception(self, msg, *args):
|
||||
event = AdapterEventError(name=self.name, base_msg=msg, args=args)
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
)
|
||||
event.exc_info = traceback.format_exc()
|
||||
fire_event(event)
|
||||
|
||||
def critical(self, msg, *args):
|
||||
event = AdapterEventError(name=self.name, base_msg=msg, args=args)
|
||||
event = AdapterEventError(
|
||||
name=self.name, base_msg=msg, args=args, node_info=get_node_info()
|
||||
)
|
||||
fire_event(event)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import os
|
||||
import threading
|
||||
from datetime import datetime
|
||||
@@ -43,6 +44,16 @@ def get_thread_name() -> str:
|
||||
return threading.current_thread().name
|
||||
|
||||
|
||||
# EventLevel is an Enum, but mixing in the 'str' type is suggested in the Python
|
||||
# documentation, and provides support for json conversion, which fails otherwise.
|
||||
class EventLevel(str, Enum):
|
||||
DEBUG = "debug"
|
||||
TEST = "test"
|
||||
INFO = "info"
|
||||
WARN = "warn"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""BaseEvent for proto message generated python events"""
|
||||
@@ -62,15 +73,15 @@ class BaseEvent:
|
||||
self.info.code = self.code()
|
||||
self.info.name = type(self).__name__
|
||||
|
||||
def level_tag(self) -> str:
|
||||
return "debug"
|
||||
|
||||
# This is here because although we know that info should always
|
||||
# exist, mypy doesn't.
|
||||
def log_level(self) -> str:
|
||||
def log_level(self) -> EventLevel:
|
||||
return self.info.level # type: ignore
|
||||
|
||||
def message(self):
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.DEBUG
|
||||
|
||||
def message(self) -> str:
|
||||
raise Exception("message() not implemented for event")
|
||||
|
||||
|
||||
@@ -85,32 +96,32 @@ class DynamicLevel(BaseEvent):
|
||||
class TestLevel(BaseEvent):
|
||||
__test__ = False
|
||||
|
||||
def level_tag(self) -> str:
|
||||
return "test"
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.TEST
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class DebugLevel(BaseEvent):
|
||||
def level_tag(self) -> str:
|
||||
return "debug"
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.DEBUG
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class InfoLevel(BaseEvent):
|
||||
def level_tag(self) -> str:
|
||||
return "info"
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.INFO
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class WarnLevel(BaseEvent):
|
||||
def level_tag(self) -> str:
|
||||
return "warn"
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.WARN
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class ErrorLevel(BaseEvent):
|
||||
def level_tag(self) -> str:
|
||||
return "error"
|
||||
def level_tag(self) -> EventLevel:
|
||||
return EventLevel.ERROR
|
||||
|
||||
|
||||
# Included to ensure classes with str-type message members are initialized correctly.
|
||||
|
||||
84
core/dbt/events/contextvars.py
Normal file
84
core/dbt/events/contextvars.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import contextlib
|
||||
import contextvars
|
||||
|
||||
from typing import Any, Generator, Mapping, Dict
|
||||
from dbt.events.proto_types import NodeInfo
|
||||
|
||||
|
||||
LOG_PREFIX = "log_"
|
||||
LOG_PREFIX_LEN = len(LOG_PREFIX)
|
||||
|
||||
_log_context_vars: Dict[str, contextvars.ContextVar] = {}
|
||||
|
||||
|
||||
def get_contextvars() -> Dict[str, Any]:
|
||||
rv = {}
|
||||
ctx = contextvars.copy_context()
|
||||
|
||||
for k in ctx:
|
||||
if k.name.startswith(LOG_PREFIX) and ctx[k] is not Ellipsis:
|
||||
rv[k.name[LOG_PREFIX_LEN:]] = ctx[k]
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def get_node_info():
|
||||
cvars = get_contextvars()
|
||||
if "node_info" in cvars:
|
||||
return cvars["node_info"]
|
||||
else:
|
||||
return NodeInfo()
|
||||
|
||||
|
||||
def clear_contextvars() -> None:
|
||||
ctx = contextvars.copy_context()
|
||||
for k in ctx:
|
||||
if k.name.startswith(LOG_PREFIX):
|
||||
k.set(Ellipsis)
|
||||
|
||||
|
||||
# put keys and values into context. Returns the contextvar.Token mapping
|
||||
# Save and pass to reset_contextvars
|
||||
def set_contextvars(**kwargs: Any) -> Mapping[str, contextvars.Token]:
|
||||
cvar_tokens = {}
|
||||
for k, v in kwargs.items():
|
||||
log_key = f"{LOG_PREFIX}{k}"
|
||||
try:
|
||||
var = _log_context_vars[log_key]
|
||||
except KeyError:
|
||||
var = contextvars.ContextVar(log_key, default=Ellipsis)
|
||||
_log_context_vars[log_key] = var
|
||||
|
||||
cvar_tokens[k] = var.set(v)
|
||||
|
||||
return cvar_tokens
|
||||
|
||||
|
||||
# reset by Tokens
|
||||
def reset_contextvars(**kwargs: contextvars.Token) -> None:
|
||||
for k, v in kwargs.items():
|
||||
log_key = f"{LOG_PREFIX}{k}"
|
||||
var = _log_context_vars[log_key]
|
||||
var.reset(v)
|
||||
|
||||
|
||||
# remove from contextvars
|
||||
def unset_contextvars(*keys: str) -> None:
|
||||
for k in keys:
|
||||
if k in _log_context_vars:
|
||||
log_key = f"{LOG_PREFIX}{k}"
|
||||
_log_context_vars[log_key].set(Ellipsis)
|
||||
|
||||
|
||||
# Context manager or decorator to set and unset the context vars
|
||||
@contextlib.contextmanager
|
||||
def log_contextvars(**kwargs: Any) -> Generator[None, None, None]:
|
||||
context = get_contextvars()
|
||||
saved = {k: context[k] for k in context.keys() & kwargs.keys()}
|
||||
|
||||
set_contextvars(**kwargs)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
unset_contextvars(*kwargs.keys())
|
||||
set_contextvars(**saved)
|
||||
212
core/dbt/events/eventmgr.py
Normal file
212
core/dbt/events/eventmgr.py
Normal file
@@ -0,0 +1,212 @@
|
||||
from colorama import Style
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
import json
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import threading
|
||||
from typing import Any, Callable, List, Optional, TextIO
|
||||
from uuid import uuid4
|
||||
|
||||
from dbt.events.base_types import BaseEvent, EventLevel
|
||||
|
||||
|
||||
# A Filter is a function which takes a BaseEvent and returns True if the event
|
||||
# should be logged, False otherwise.
|
||||
Filter = Callable[[BaseEvent], bool]
|
||||
|
||||
|
||||
# Default filter which logs every event
|
||||
def NoFilter(_: BaseEvent) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
# A Scrubber removes secrets from an input string, returning a sanitized string.
|
||||
Scrubber = Callable[[str], str]
|
||||
|
||||
|
||||
# Provide a pass-through scrubber implementation, also used as a default
|
||||
def NoScrubber(s: str) -> str:
|
||||
return s
|
||||
|
||||
|
||||
class LineFormat(Enum):
|
||||
PlainText = 1
|
||||
DebugText = 2
|
||||
Json = 3
|
||||
|
||||
|
||||
# Map from dbt event levels to python log levels
|
||||
_log_level_map = {
|
||||
EventLevel.DEBUG: 10,
|
||||
EventLevel.TEST: 10,
|
||||
EventLevel.INFO: 20,
|
||||
EventLevel.WARN: 30,
|
||||
EventLevel.ERROR: 40,
|
||||
}
|
||||
|
||||
|
||||
# We should consider fixing the problem, but log_level() can return a string for
|
||||
# DynamicLevel events, even thought it is supposed to return an EventLevel. This
|
||||
# function gets a string for the level, no matter what.
|
||||
def _get_level_str(e: BaseEvent) -> str:
|
||||
return e.log_level().value if isinstance(e.log_level(), EventLevel) else str(e.log_level())
|
||||
|
||||
|
||||
# We need this function for now because the numeric log severity levels in
|
||||
# Python do not match those for logbook, so we have to explicitly call the
|
||||
# correct function by name.
|
||||
def send_to_logger(l, level: str, log_line: str):
|
||||
if level == "test":
|
||||
l.debug(log_line)
|
||||
elif level == "debug":
|
||||
l.debug(log_line)
|
||||
elif level == "info":
|
||||
l.info(log_line)
|
||||
elif level == "warn":
|
||||
l.warning(log_line)
|
||||
elif level == "error":
|
||||
l.error(log_line)
|
||||
else:
|
||||
raise AssertionError(
|
||||
f"While attempting to log {log_line}, encountered the unhandled level: {level}"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoggerConfig:
|
||||
name: str
|
||||
filter: Filter = NoFilter
|
||||
scrubber: Scrubber = NoScrubber
|
||||
line_format: LineFormat = LineFormat.PlainText
|
||||
level: EventLevel = EventLevel.WARN
|
||||
use_colors: bool = False
|
||||
output_stream: Optional[TextIO] = None
|
||||
output_file_name: Optional[str] = None
|
||||
logger: Optional[Any] = None
|
||||
|
||||
|
||||
class _Logger:
|
||||
def __init__(self, event_manager: "EventManager", config: LoggerConfig) -> None:
|
||||
self.name: str = config.name
|
||||
self.filter: Filter = config.filter
|
||||
self.scrubber: Scrubber = config.scrubber
|
||||
self.level: EventLevel = config.level
|
||||
self.event_manager: EventManager = event_manager
|
||||
self._python_logger: Optional[logging.Logger] = config.logger
|
||||
self._stream: Optional[TextIO] = config.output_stream
|
||||
|
||||
if config.output_file_name:
|
||||
log = logging.getLogger(config.name)
|
||||
log.setLevel(_log_level_map[config.level])
|
||||
handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
backupCount=5,
|
||||
)
|
||||
|
||||
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
log.handlers.clear()
|
||||
log.addHandler(handler)
|
||||
|
||||
self._python_logger = log
|
||||
|
||||
def create_line(self, e: BaseEvent) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
def write_line(self, e: BaseEvent):
|
||||
line = self.create_line(e)
|
||||
python_level = _log_level_map[e.log_level()]
|
||||
if self._python_logger is not None:
|
||||
send_to_logger(self._python_logger, _get_level_str(e), line)
|
||||
elif self._stream is not None and _log_level_map[self.level] <= python_level:
|
||||
self._stream.write(line + "\n")
|
||||
|
||||
def flush(self):
|
||||
if self._python_logger is not None:
|
||||
for handler in self._python_logger.handlers:
|
||||
handler.flush()
|
||||
elif self._stream is not None:
|
||||
self._stream.flush()
|
||||
|
||||
|
||||
class _TextLogger(_Logger):
|
||||
def __init__(self, event_manager: "EventManager", config: LoggerConfig) -> None:
|
||||
super().__init__(event_manager, config)
|
||||
self.use_colors = config.use_colors
|
||||
self.use_debug_format = config.line_format == LineFormat.DebugText
|
||||
|
||||
def create_line(self, e: BaseEvent) -> str:
|
||||
return self.create_debug_line(e) if self.use_debug_format else self.create_info_line(e)
|
||||
|
||||
def create_info_line(self, e: BaseEvent) -> str:
|
||||
ts: str = datetime.utcnow().strftime("%H:%M:%S")
|
||||
scrubbed_msg: str = self.scrubber(e.message()) # type: ignore
|
||||
return f"{self._get_color_tag()}{ts} {scrubbed_msg}"
|
||||
|
||||
def create_debug_line(self, e: BaseEvent) -> str:
|
||||
log_line: str = ""
|
||||
# Create a separator if this is the beginning of an invocation
|
||||
# TODO: This is an ugly hack, get rid of it if we can
|
||||
if type(e).__name__ == "MainReportVersion":
|
||||
separator = 30 * "="
|
||||
log_line = f"\n\n{separator} {datetime.utcnow()} | {self.event_manager.invocation_id} {separator}\n"
|
||||
ts: str = datetime.utcnow().strftime("%H:%M:%S.%f")
|
||||
scrubbed_msg: str = self.scrubber(e.message()) # type: ignore
|
||||
level = _get_level_str(e)
|
||||
log_line += (
|
||||
f"{self._get_color_tag()}{ts} [{level:<5}]{self._get_thread_name()} {scrubbed_msg}"
|
||||
)
|
||||
return log_line
|
||||
|
||||
def _get_color_tag(self) -> str:
|
||||
return "" if not self.use_colors else Style.RESET_ALL
|
||||
|
||||
def _get_thread_name(self) -> str:
|
||||
thread_name = ""
|
||||
if threading.current_thread().name:
|
||||
thread_name = threading.current_thread().name
|
||||
thread_name = thread_name[:10]
|
||||
thread_name = thread_name.ljust(10, " ")
|
||||
thread_name = f" [{thread_name}]:"
|
||||
return thread_name
|
||||
|
||||
|
||||
class _JsonLogger(_Logger):
|
||||
def create_line(self, e: BaseEvent) -> str:
|
||||
from dbt.events.functions import event_to_dict
|
||||
|
||||
event_dict = event_to_dict(e)
|
||||
raw_log_line = json.dumps(event_dict, sort_keys=True)
|
||||
line = self.scrubber(raw_log_line) # type: ignore
|
||||
return line
|
||||
|
||||
|
||||
class EventManager:
|
||||
def __init__(self) -> None:
|
||||
self.loggers: List[_Logger] = []
|
||||
self.callbacks: List[Callable[[BaseEvent], None]] = []
|
||||
self.invocation_id: str = str(uuid4())
|
||||
|
||||
def fire_event(self, e: BaseEvent) -> None:
|
||||
for logger in self.loggers:
|
||||
if logger.filter(e): # type: ignore
|
||||
logger.write_line(e)
|
||||
|
||||
for callback in self.callbacks:
|
||||
callback(e)
|
||||
|
||||
def add_logger(self, config: LoggerConfig):
|
||||
logger = (
|
||||
_JsonLogger(self, config)
|
||||
if config.line_format == LineFormat.Json
|
||||
else _TextLogger(self, config)
|
||||
)
|
||||
logger.event_manager = self
|
||||
self.loggers.append(logger)
|
||||
|
||||
def flush(self):
|
||||
for logger in self.loggers:
|
||||
logger.flush()
|
||||
@@ -1,123 +1,144 @@
|
||||
import betterproto
|
||||
from colorama import Style
|
||||
|
||||
from dbt.events.base_types import NoStdOut, BaseEvent, NoFile, Cache
|
||||
from dbt.events.types import EventBufferFull, MainReportVersion, EmptyLine
|
||||
from dbt.events.proto_types import EventInfo
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
import dbt.flags as flags
|
||||
|
||||
from dbt.constants import METADATA_ENV_PREFIX
|
||||
|
||||
from dbt.logger import make_log_dir_if_missing, GLOBAL_LOGGER
|
||||
from datetime import datetime
|
||||
from dbt.events.base_types import BaseEvent, Cache, EventLevel, NoFile, NoStdOut
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.proto_types import EventInfo
|
||||
from dbt.events.types import EmptyLine
|
||||
import dbt.flags as flags
|
||||
from dbt.logger import GLOBAL_LOGGER, make_log_dir_if_missing
|
||||
from functools import partial
|
||||
import json
|
||||
import io
|
||||
from io import StringIO, TextIOWrapper
|
||||
import logbook
|
||||
import logging
|
||||
from logging import Logger
|
||||
import sys
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import os
|
||||
import sys
|
||||
from typing import Callable, Dict, Optional, TextIO
|
||||
import uuid
|
||||
import threading
|
||||
from typing import Optional, Union, Callable, Dict
|
||||
|
||||
from collections import deque
|
||||
|
||||
LOG_VERSION = 3
|
||||
EVENT_HISTORY = None
|
||||
|
||||
# create the global file logger with no configuration
|
||||
FILE_LOG = logging.getLogger("default_file")
|
||||
null_handler = logging.NullHandler()
|
||||
FILE_LOG.addHandler(null_handler)
|
||||
|
||||
# set up logger to go to stdout with defaults
|
||||
# setup_event_logger will be called once args have been parsed
|
||||
STDOUT_LOG = logging.getLogger("default_stdout")
|
||||
STDOUT_LOG.setLevel(logging.INFO)
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setLevel(logging.INFO)
|
||||
STDOUT_LOG.addHandler(stdout_handler)
|
||||
|
||||
format_color = True
|
||||
format_json = False
|
||||
invocation_id: Optional[str] = None
|
||||
metadata_vars: Optional[Dict[str, str]] = None
|
||||
|
||||
|
||||
def setup_event_logger(log_path, level_override=None):
|
||||
global format_json, format_color, STDOUT_LOG, FILE_LOG
|
||||
def setup_event_logger(log_path: str, level_override: Optional[EventLevel] = None):
|
||||
cleanup_event_logger()
|
||||
make_log_dir_if_missing(log_path)
|
||||
if flags.ENABLE_LEGACY_LOGGER:
|
||||
EVENT_MANAGER.add_logger(_get_logbook_log_config(level_override))
|
||||
else:
|
||||
EVENT_MANAGER.add_logger(_get_stdout_config(level_override))
|
||||
|
||||
format_json = flags.LOG_FORMAT == "json"
|
||||
# USE_COLORS can be None if the app just started and the cli flags
|
||||
# havent been applied yet
|
||||
format_color = True if flags.USE_COLORS else False
|
||||
# TODO this default should live somewhere better
|
||||
log_dest = os.path.join(log_path, "dbt.log")
|
||||
level = level_override or (logging.DEBUG if flags.DEBUG else logging.INFO)
|
||||
if _CAPTURE_STREAM:
|
||||
# Create second stdout logger to support test which want to know what's
|
||||
# being sent to stdout.
|
||||
capture_config = _get_stdout_config(level_override)
|
||||
capture_config.output_stream = _CAPTURE_STREAM
|
||||
EVENT_MANAGER.add_logger(capture_config)
|
||||
|
||||
# overwrite the STDOUT_LOG logger with the configured one
|
||||
STDOUT_LOG = logging.getLogger("configured_std_out")
|
||||
STDOUT_LOG.setLevel(level)
|
||||
# create and add the file logger to the event manager
|
||||
EVENT_MANAGER.add_logger(_get_logfile_config(os.path.join(log_path, "dbt.log")))
|
||||
|
||||
FORMAT = "%(message)s"
|
||||
stdout_passthrough_formatter = logging.Formatter(fmt=FORMAT)
|
||||
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(stdout_passthrough_formatter)
|
||||
stdout_handler.setLevel(level)
|
||||
# clear existing stdout TextIOWrapper stream handlers
|
||||
STDOUT_LOG.handlers = [
|
||||
h
|
||||
for h in STDOUT_LOG.handlers
|
||||
if not (hasattr(h, "stream") and isinstance(h.stream, TextIOWrapper)) # type: ignore
|
||||
]
|
||||
STDOUT_LOG.addHandler(stdout_handler)
|
||||
def _get_stdout_config(level: Optional[EventLevel] = None) -> LoggerConfig:
|
||||
fmt = LineFormat.PlainText
|
||||
if flags.LOG_FORMAT == "json":
|
||||
fmt = LineFormat.Json
|
||||
elif flags.DEBUG:
|
||||
fmt = LineFormat.DebugText
|
||||
|
||||
# overwrite the FILE_LOG logger with the configured one
|
||||
FILE_LOG = logging.getLogger("configured_file")
|
||||
FILE_LOG.setLevel(logging.DEBUG) # always debug regardless of user input
|
||||
|
||||
file_passthrough_formatter = logging.Formatter(fmt=FORMAT)
|
||||
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=log_dest, encoding="utf8", maxBytes=10 * 1024 * 1024, backupCount=5 # 10 mb
|
||||
return LoggerConfig(
|
||||
name="stdout_log",
|
||||
level=level or (EventLevel.DEBUG if flags.DEBUG else EventLevel.INFO),
|
||||
use_colors=bool(flags.USE_COLORS),
|
||||
line_format=fmt,
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(
|
||||
_stdout_filter, bool(flags.LOG_CACHE_EVENTS), bool(flags.DEBUG), bool(flags.QUIET)
|
||||
),
|
||||
output_stream=sys.stdout,
|
||||
)
|
||||
file_handler.setFormatter(file_passthrough_formatter)
|
||||
file_handler.setLevel(logging.DEBUG) # always debug regardless of user input
|
||||
FILE_LOG.handlers.clear()
|
||||
FILE_LOG.addHandler(file_handler)
|
||||
|
||||
|
||||
def _stdout_filter(
|
||||
log_cache_events: bool, debug_mode: bool, quiet_mode: bool, evt: BaseEvent
|
||||
) -> bool:
|
||||
return (
|
||||
not isinstance(evt, NoStdOut)
|
||||
and (not isinstance(evt, Cache) or log_cache_events)
|
||||
and (evt.log_level() != EventLevel.DEBUG or debug_mode)
|
||||
and (evt.log_level() == EventLevel.ERROR or not quiet_mode)
|
||||
and not (flags.LOG_FORMAT == "json" and type(evt) == EmptyLine)
|
||||
)
|
||||
|
||||
|
||||
def _get_logfile_config(log_path: str) -> LoggerConfig:
|
||||
return LoggerConfig(
|
||||
name="file_log",
|
||||
line_format=LineFormat.Json if flags.LOG_FORMAT == "json" else LineFormat.DebugText,
|
||||
use_colors=bool(flags.USE_COLORS),
|
||||
level=EventLevel.DEBUG, # File log is *always* debug level
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(_logfile_filter, bool(flags.LOG_CACHE_EVENTS)),
|
||||
output_file_name=log_path,
|
||||
)
|
||||
|
||||
|
||||
def _logfile_filter(log_cache_events: bool, evt: BaseEvent) -> bool:
|
||||
return (
|
||||
not isinstance(evt, NoFile)
|
||||
and not (isinstance(evt, Cache) and not log_cache_events)
|
||||
and not (flags.LOG_FORMAT == "json" and type(evt) == EmptyLine)
|
||||
)
|
||||
|
||||
|
||||
def _get_logbook_log_config(level: Optional[EventLevel] = None) -> LoggerConfig:
|
||||
config = _get_stdout_config(level)
|
||||
config.name = "logbook_log"
|
||||
config.filter = NoFilter if flags.LOG_CACHE_EVENTS else lambda e: not isinstance(e, Cache)
|
||||
config.logger = GLOBAL_LOGGER
|
||||
return config
|
||||
|
||||
|
||||
def env_scrubber(msg: str) -> str:
|
||||
return scrub_secrets(msg, env_secrets())
|
||||
|
||||
|
||||
def cleanup_event_logger():
|
||||
# Reset to a no-op manager to release streams associated with logs. This is
|
||||
# especially important for tests, since pytest replaces the stdout stream
|
||||
# during test runs, and closes the stream after the test is over.
|
||||
EVENT_MANAGER.loggers.clear()
|
||||
EVENT_MANAGER.callbacks.clear()
|
||||
|
||||
|
||||
# Since dbt-rpc does not do its own log setup, and since some events can
|
||||
# currently fire before logs can be configured by setup_event_logger(), we
|
||||
# create a default configuration with default settings and no file output.
|
||||
EVENT_MANAGER: EventManager = EventManager()
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logbook_log_config() if flags.ENABLE_LEGACY_LOGGER else _get_stdout_config()
|
||||
)
|
||||
|
||||
|
||||
# This global, and the following two functions for capturing stdout logs are
|
||||
# an unpleasant hack we intend to remove as part of API-ification. The GitHub
|
||||
# issue #6350 was opened for that work.
|
||||
_CAPTURE_STREAM: Optional[TextIO] = None
|
||||
|
||||
|
||||
# used for integration tests
|
||||
def capture_stdout_logs() -> StringIO:
|
||||
global STDOUT_LOG
|
||||
capture_buf = io.StringIO()
|
||||
stdout_capture_handler = logging.StreamHandler(capture_buf)
|
||||
stdout_handler.setLevel(logging.DEBUG)
|
||||
STDOUT_LOG.addHandler(stdout_capture_handler)
|
||||
return capture_buf
|
||||
def capture_stdout_logs(stream: TextIO):
|
||||
global _CAPTURE_STREAM
|
||||
_CAPTURE_STREAM = stream
|
||||
|
||||
|
||||
# used for integration tests
|
||||
def stop_capture_stdout_logs() -> None:
|
||||
global STDOUT_LOG
|
||||
STDOUT_LOG.handlers = [
|
||||
h
|
||||
for h in STDOUT_LOG.handlers
|
||||
if not (hasattr(h, "stream") and isinstance(h.stream, StringIO)) # type: ignore
|
||||
]
|
||||
def stop_capture_stdout_logs():
|
||||
global _CAPTURE_STREAM
|
||||
_CAPTURE_STREAM = None
|
||||
|
||||
|
||||
# returns a dictionary representation of the event fields.
|
||||
# the message may contain secrets which must be scrubbed at the usage site.
|
||||
def event_to_json(
|
||||
event: BaseEvent,
|
||||
) -> str:
|
||||
def event_to_json(event: BaseEvent) -> str:
|
||||
event_dict = event_to_dict(event)
|
||||
raw_log_line = json.dumps(event_dict, sort_keys=True)
|
||||
return raw_log_line
|
||||
@@ -126,92 +147,16 @@ def event_to_json(
|
||||
def event_to_dict(event: BaseEvent) -> dict:
|
||||
event_dict = dict()
|
||||
try:
|
||||
# We could use to_json here, but it wouldn't sort the keys.
|
||||
# The 'to_json' method just does json.dumps on the dict anyway.
|
||||
event_dict = event.to_dict(casing=betterproto.Casing.SNAKE, include_default_values=True) # type: ignore
|
||||
except AttributeError as exc:
|
||||
event_type = type(event).__name__
|
||||
raise Exception(f"type {event_type} is not serializable. {str(exc)}")
|
||||
# We don't want an empty NodeInfo in output
|
||||
if "node_info" in event_dict and event_dict["node_info"]["node_name"] == "":
|
||||
del event_dict["node_info"]
|
||||
return event_dict
|
||||
|
||||
|
||||
# translates an Event to a completely formatted text-based log line
|
||||
# type hinting everything as strings so we don't get any unintentional string conversions via str()
|
||||
def reset_color() -> str:
|
||||
global format_color
|
||||
return "" if not format_color else Style.RESET_ALL
|
||||
|
||||
|
||||
def create_info_text_log_line(e: BaseEvent) -> str:
|
||||
color_tag: str = reset_color()
|
||||
ts: str = get_ts().strftime("%H:%M:%S") # TODO: get this from the event.ts?
|
||||
scrubbed_msg: str = scrub_secrets(e.message(), env_secrets())
|
||||
log_line: str = f"{color_tag}{ts} {scrubbed_msg}"
|
||||
return log_line
|
||||
|
||||
|
||||
def create_debug_text_log_line(e: BaseEvent) -> str:
|
||||
log_line: str = ""
|
||||
# Create a separator if this is the beginning of an invocation
|
||||
if type(e) == MainReportVersion:
|
||||
separator = 30 * "="
|
||||
log_line = f"\n\n{separator} {get_ts()} | {get_invocation_id()} {separator}\n"
|
||||
color_tag: str = reset_color()
|
||||
ts: str = get_ts().strftime("%H:%M:%S.%f")
|
||||
scrubbed_msg: str = scrub_secrets(e.message(), env_secrets())
|
||||
# Make the levels all 5 characters so they line up
|
||||
level: str = f"{e.log_level():<5}"
|
||||
thread = ""
|
||||
if threading.current_thread().name:
|
||||
thread_name = threading.current_thread().name
|
||||
thread_name = thread_name[:10]
|
||||
thread_name = thread_name.ljust(10, " ")
|
||||
thread = f" [{thread_name}]:"
|
||||
log_line = log_line + f"{color_tag}{ts} [{level}]{thread} {scrubbed_msg}"
|
||||
return log_line
|
||||
|
||||
|
||||
# translates an Event to a completely formatted json log line
|
||||
def create_json_log_line(e: BaseEvent) -> Optional[str]:
|
||||
if type(e) == EmptyLine:
|
||||
return None # will not be sent to logger
|
||||
raw_log_line = event_to_json(e)
|
||||
return scrub_secrets(raw_log_line, env_secrets())
|
||||
|
||||
|
||||
# calls create_stdout_text_log_line() or create_json_log_line() according to logger config
|
||||
def create_log_line(e: BaseEvent, file_output=False) -> Optional[str]:
|
||||
global format_json
|
||||
if format_json:
|
||||
return create_json_log_line(e) # json output, both console and file
|
||||
elif file_output is True or flags.DEBUG:
|
||||
return create_debug_text_log_line(e) # default file output
|
||||
else:
|
||||
return create_info_text_log_line(e) # console output
|
||||
|
||||
|
||||
# allows for reuse of this obnoxious if else tree.
|
||||
# do not use for exceptions, it doesn't pass along exc_info, stack_info, or extra
|
||||
def send_to_logger(l: Union[Logger, logbook.Logger], level: str, log_line: str):
|
||||
if not log_line:
|
||||
return
|
||||
if level == "test":
|
||||
# TODO after implmenting #3977 send to new test level
|
||||
l.debug(log_line)
|
||||
elif level == "debug":
|
||||
l.debug(log_line)
|
||||
elif level == "info":
|
||||
l.info(log_line)
|
||||
elif level == "warn":
|
||||
l.warning(log_line)
|
||||
elif level == "error":
|
||||
l.error(log_line)
|
||||
else:
|
||||
raise AssertionError(
|
||||
f"While attempting to log {log_line}, encountered the unhandled level: {level}"
|
||||
)
|
||||
|
||||
|
||||
def warn_or_error(event, node=None):
|
||||
if flags.WARN_ERROR:
|
||||
from dbt.exceptions import raise_compiler_error
|
||||
@@ -233,39 +178,7 @@ def fire_event_if(conditional: bool, lazy_e: Callable[[], BaseEvent]) -> None:
|
||||
# (i.e. - mutating the event history, printing to stdout, logging
|
||||
# to files, etc.)
|
||||
def fire_event(e: BaseEvent) -> None:
|
||||
# skip logs when `--log-cache-events` is not passed
|
||||
if isinstance(e, Cache) and not flags.LOG_CACHE_EVENTS:
|
||||
return
|
||||
|
||||
add_to_event_history(e)
|
||||
|
||||
# backwards compatibility for plugins that require old logger (dbt-rpc)
|
||||
if flags.ENABLE_LEGACY_LOGGER:
|
||||
# using Event::message because the legacy logger didn't differentiate messages by
|
||||
# destination
|
||||
log_line = create_log_line(e)
|
||||
if log_line:
|
||||
send_to_logger(GLOBAL_LOGGER, level=e.log_level(), log_line=log_line)
|
||||
return # exit the function to avoid using the current logger as well
|
||||
|
||||
# always logs debug level regardless of user input
|
||||
if not isinstance(e, NoFile):
|
||||
log_line = create_log_line(e, file_output=True)
|
||||
# doesn't send exceptions to exception logger
|
||||
if log_line:
|
||||
send_to_logger(FILE_LOG, level=e.log_level(), log_line=log_line)
|
||||
|
||||
if not isinstance(e, NoStdOut):
|
||||
# explicitly checking the debug flag here so that potentially expensive-to-construct
|
||||
# log messages are not constructed if debug messages are never shown.
|
||||
if e.log_level() == "debug" and not flags.DEBUG:
|
||||
return # eat the message in case it was one of the expensive ones
|
||||
if e.log_level() != "error" and flags.QUIET:
|
||||
return # eat all non-exception messages in quiet mode
|
||||
|
||||
log_line = create_log_line(e)
|
||||
if log_line:
|
||||
send_to_logger(STDOUT_LOG, level=e.log_level(), log_line=log_line)
|
||||
EVENT_MANAGER.fire_event(e)
|
||||
|
||||
|
||||
def get_metadata_vars() -> Dict[str, str]:
|
||||
@@ -285,47 +198,13 @@ def reset_metadata_vars() -> None:
|
||||
|
||||
|
||||
def get_invocation_id() -> str:
|
||||
global invocation_id
|
||||
if invocation_id is None:
|
||||
invocation_id = str(uuid.uuid4())
|
||||
return invocation_id
|
||||
return EVENT_MANAGER.invocation_id
|
||||
|
||||
|
||||
def set_invocation_id() -> None:
|
||||
# This is primarily for setting the invocation_id for separate
|
||||
# commands in the dbt servers. It shouldn't be necessary for the CLI.
|
||||
global invocation_id
|
||||
invocation_id = str(uuid.uuid4())
|
||||
|
||||
|
||||
# exactly one time stamp per concrete event
|
||||
def get_ts() -> datetime:
|
||||
ts = datetime.utcnow()
|
||||
return ts
|
||||
|
||||
|
||||
# preformatted time stamp
|
||||
def get_ts_rfc3339() -> str:
|
||||
ts = get_ts()
|
||||
ts_rfc3339 = ts.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
return ts_rfc3339
|
||||
|
||||
|
||||
def add_to_event_history(event):
|
||||
if flags.EVENT_BUFFER_SIZE == 0:
|
||||
return
|
||||
global EVENT_HISTORY
|
||||
if EVENT_HISTORY is None:
|
||||
reset_event_history()
|
||||
EVENT_HISTORY.append(event)
|
||||
# We only set the EventBufferFull message for event buffers >= 10,000
|
||||
if flags.EVENT_BUFFER_SIZE >= 10000 and len(EVENT_HISTORY) == (flags.EVENT_BUFFER_SIZE - 1):
|
||||
fire_event(EventBufferFull())
|
||||
|
||||
|
||||
def reset_event_history():
|
||||
global EVENT_HISTORY
|
||||
EVENT_HISTORY = deque(maxlen=flags.EVENT_BUFFER_SIZE)
|
||||
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
|
||||
|
||||
|
||||
# Currently used to set the level in EventInfo, so logging events can
|
||||
|
||||
@@ -345,9 +345,10 @@ class AdapterEventDebug(betterproto.Message):
|
||||
"""E001"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
name: str = betterproto.string_field(2)
|
||||
base_msg: str = betterproto.string_field(3)
|
||||
args: List[str] = betterproto.string_field(4)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
base_msg: str = betterproto.string_field(4)
|
||||
args: List[str] = betterproto.string_field(5)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -355,9 +356,10 @@ class AdapterEventInfo(betterproto.Message):
|
||||
"""E002"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
name: str = betterproto.string_field(2)
|
||||
base_msg: str = betterproto.string_field(3)
|
||||
args: List[str] = betterproto.string_field(4)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
base_msg: str = betterproto.string_field(4)
|
||||
args: List[str] = betterproto.string_field(5)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -365,9 +367,10 @@ class AdapterEventWarning(betterproto.Message):
|
||||
"""E003"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
name: str = betterproto.string_field(2)
|
||||
base_msg: str = betterproto.string_field(3)
|
||||
args: List[str] = betterproto.string_field(4)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
base_msg: str = betterproto.string_field(4)
|
||||
args: List[str] = betterproto.string_field(5)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -375,10 +378,11 @@ class AdapterEventError(betterproto.Message):
|
||||
"""E004"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
name: str = betterproto.string_field(2)
|
||||
base_msg: str = betterproto.string_field(3)
|
||||
args: List[str] = betterproto.string_field(4)
|
||||
exc_info: str = betterproto.string_field(5)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
base_msg: str = betterproto.string_field(4)
|
||||
args: List[str] = betterproto.string_field(5)
|
||||
exc_info: str = betterproto.string_field(6)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -386,8 +390,9 @@ class NewConnection(betterproto.Message):
|
||||
"""E005"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_type: str = betterproto.string_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_type: str = betterproto.string_field(3)
|
||||
conn_name: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -419,8 +424,9 @@ class RollbackFailed(betterproto.Message):
|
||||
"""E009"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_name: str = betterproto.string_field(2)
|
||||
exc_info: str = betterproto.string_field(3)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
exc_info: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -428,7 +434,8 @@ class ConnectionClosed(betterproto.Message):
|
||||
"""E010"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_name: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -436,7 +443,8 @@ class ConnectionLeftOpen(betterproto.Message):
|
||||
"""E011"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_name: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -444,7 +452,8 @@ class Rollback(betterproto.Message):
|
||||
"""E012"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_name: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -472,8 +481,9 @@ class ConnectionUsed(betterproto.Message):
|
||||
"""E015"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_type: str = betterproto.string_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_type: str = betterproto.string_field(3)
|
||||
conn_name: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -481,8 +491,9 @@ class SQLQuery(betterproto.Message):
|
||||
"""E016"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_name: str = betterproto.string_field(2)
|
||||
sql: str = betterproto.string_field(3)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
sql: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -490,8 +501,9 @@ class SQLQueryStatus(betterproto.Message):
|
||||
"""E017"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
status: str = betterproto.string_field(2)
|
||||
elapsed: float = betterproto.float_field(3)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
status: str = betterproto.string_field(3)
|
||||
elapsed: float = betterproto.float_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -499,7 +511,8 @@ class SQLCommit(betterproto.Message):
|
||||
"""E018"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
conn_name: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
conn_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -667,7 +680,8 @@ class NewConnectionOpening(betterproto.Message):
|
||||
"""E037"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
connection_state: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
connection_state: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1247,7 +1261,8 @@ class JinjaLogWarning(betterproto.Message):
|
||||
"""I061"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
msg: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1339,7 +1354,8 @@ class JinjaLogInfo(betterproto.Message):
|
||||
"""M011"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
msg: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1347,7 +1363,8 @@ class JinjaLogDebug(betterproto.Message):
|
||||
"""M012"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
msg: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1647,7 +1664,6 @@ class NodeStart(betterproto.Message):
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
unique_id: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1656,7 +1672,6 @@ class NodeFinished(betterproto.Message):
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
unique_id: str = betterproto.string_field(3)
|
||||
run_result: "RunResultMsg" = betterproto.message_field(4)
|
||||
|
||||
|
||||
@@ -1675,14 +1690,7 @@ class ConcurrencyLine(betterproto.Message):
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
num_threads: int = betterproto.int32_field(2)
|
||||
target_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CompilingNode(betterproto.Message):
|
||||
"""Q028"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
unique_id: str = betterproto.string_field(2)
|
||||
node_count: int = betterproto.int32_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1690,7 +1698,7 @@ class WritingInjectedSQLForNode(betterproto.Message):
|
||||
"""Q029"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
unique_id: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1699,7 +1707,6 @@ class NodeCompiling(betterproto.Message):
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
unique_id: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1708,7 +1715,6 @@ class NodeExecuting(betterproto.Message):
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
unique_id: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1786,8 +1792,9 @@ class CatchableExceptionOnRun(betterproto.Message):
|
||||
"""W002"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
exc: str = betterproto.string_field(2)
|
||||
exc_info: str = betterproto.string_field(3)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
exc: str = betterproto.string_field(3)
|
||||
exc_info: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1905,6 +1912,8 @@ class TimingInfoCollected(betterproto.Message):
|
||||
"""Z010"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
timing_info: "TimingInfoMsg" = betterproto.message_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -2150,13 +2159,6 @@ class TrackingInitializeFailure(betterproto.Message):
|
||||
exc_info: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventBufferFull(betterproto.Message):
|
||||
"""Z045"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResultWarningMessage(betterproto.Message):
|
||||
"""Z046"""
|
||||
|
||||
@@ -265,41 +265,46 @@ message ExposureNameDeprecation {
|
||||
// E001
|
||||
message AdapterEventDebug {
|
||||
EventInfo info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
repeated string args = 4;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
string base_msg = 4;
|
||||
repeated string args = 5;
|
||||
}
|
||||
|
||||
// E002
|
||||
message AdapterEventInfo {
|
||||
EventInfo info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
repeated string args = 4;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
string base_msg = 4;
|
||||
repeated string args = 5;
|
||||
}
|
||||
|
||||
// E003
|
||||
message AdapterEventWarning {
|
||||
EventInfo info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
repeated string args = 4;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
string base_msg = 4;
|
||||
repeated string args = 5;
|
||||
}
|
||||
|
||||
// E004
|
||||
message AdapterEventError {
|
||||
EventInfo info = 1;
|
||||
string name = 2;
|
||||
string base_msg = 3;
|
||||
repeated string args = 4;
|
||||
string exc_info = 5;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
string base_msg = 4;
|
||||
repeated string args = 5;
|
||||
string exc_info = 6;
|
||||
}
|
||||
|
||||
// E005
|
||||
message NewConnection {
|
||||
EventInfo info = 1;
|
||||
string conn_type = 2;
|
||||
string conn_name = 3;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_type = 3;
|
||||
string conn_name = 4;
|
||||
}
|
||||
|
||||
// E006
|
||||
@@ -323,26 +328,30 @@ message ConnectionClosedInCleanup {
|
||||
// E009
|
||||
message RollbackFailed {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
string exc_info = 3;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_name = 3;
|
||||
string exc_info = 4;
|
||||
}
|
||||
|
||||
// E010
|
||||
message ConnectionClosed {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_name = 3;
|
||||
}
|
||||
|
||||
// E011
|
||||
message ConnectionLeftOpen {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_name = 3;
|
||||
}
|
||||
|
||||
// E012
|
||||
message Rollback {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_name = 3;
|
||||
}
|
||||
|
||||
// E013
|
||||
@@ -364,28 +373,32 @@ message ListRelations {
|
||||
// E015
|
||||
message ConnectionUsed {
|
||||
EventInfo info = 1;
|
||||
string conn_type = 2;
|
||||
string conn_name = 3;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_type = 3;
|
||||
string conn_name = 4;
|
||||
}
|
||||
|
||||
// E016
|
||||
message SQLQuery {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
string sql = 3;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_name = 3;
|
||||
string sql = 4;
|
||||
}
|
||||
|
||||
// E017
|
||||
message SQLQueryStatus {
|
||||
EventInfo info = 1;
|
||||
string status = 2;
|
||||
float elapsed = 3;
|
||||
NodeInfo node_info = 2;
|
||||
string status = 3;
|
||||
float elapsed = 4;
|
||||
}
|
||||
|
||||
// E018
|
||||
message SQLCommit {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string conn_name = 3;
|
||||
}
|
||||
|
||||
// E019
|
||||
@@ -507,7 +520,8 @@ message PluginLoadError {
|
||||
// E037
|
||||
message NewConnectionOpening {
|
||||
EventInfo info = 1;
|
||||
string connection_state = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string connection_state = 3;
|
||||
}
|
||||
|
||||
// E038
|
||||
@@ -946,7 +960,8 @@ message NodeNotFoundOrDisabled {
|
||||
// I061
|
||||
message JinjaLogWarning {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string msg = 3;
|
||||
}
|
||||
|
||||
// M - Deps generation
|
||||
@@ -1018,13 +1033,15 @@ message SelectorReportInvalidSelector {
|
||||
// M011
|
||||
message JinjaLogInfo {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string msg = 3;
|
||||
}
|
||||
|
||||
// M012
|
||||
message JinjaLogDebug {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
NodeInfo node_info = 2;
|
||||
string msg = 3;
|
||||
}
|
||||
|
||||
// M013
|
||||
@@ -1270,14 +1287,12 @@ message DefaultSelector {
|
||||
message NodeStart {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string unique_id = 3;
|
||||
}
|
||||
|
||||
// Q025
|
||||
message NodeFinished {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string unique_id = 3;
|
||||
RunResultMsg run_result = 4;
|
||||
}
|
||||
|
||||
@@ -1292,32 +1307,27 @@ message ConcurrencyLine {
|
||||
EventInfo info = 1;
|
||||
int32 num_threads = 2;
|
||||
string target_name = 3;
|
||||
int32 node_count = 4;
|
||||
}
|
||||
|
||||
// Q028
|
||||
message CompilingNode {
|
||||
EventInfo info = 1;
|
||||
string unique_id = 2;
|
||||
}
|
||||
// Skipped Q028
|
||||
|
||||
// Q029
|
||||
message WritingInjectedSQLForNode {
|
||||
EventInfo info = 1;
|
||||
string unique_id = 2;
|
||||
NodeInfo node_info = 2;
|
||||
}
|
||||
|
||||
// Q030
|
||||
message NodeCompiling {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string unique_id = 3;
|
||||
}
|
||||
|
||||
// Q031
|
||||
message NodeExecuting {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string unique_id = 3;
|
||||
}
|
||||
|
||||
// Q032
|
||||
@@ -1383,8 +1393,9 @@ message NoNodesSelected {
|
||||
// W002
|
||||
message CatchableExceptionOnRun {
|
||||
EventInfo info = 1;
|
||||
string exc = 2;
|
||||
string exc_info = 3;
|
||||
NodeInfo node_info = 2;
|
||||
string exc = 3;
|
||||
string exc_info = 4;
|
||||
}
|
||||
|
||||
// W003
|
||||
@@ -1476,6 +1487,8 @@ message SystemReportReturnCode {
|
||||
// Z010
|
||||
message TimingInfoCollected {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
TimingInfoMsg timing_info = 3;
|
||||
}
|
||||
|
||||
// Z011
|
||||
@@ -1661,10 +1674,7 @@ message TrackingInitializeFailure {
|
||||
string exc_info = 2;
|
||||
}
|
||||
|
||||
// Z045
|
||||
message EventBufferFull {
|
||||
EventInfo info = 1;
|
||||
}
|
||||
// Skipped Z045
|
||||
|
||||
// Z046
|
||||
message RunResultWarningMessage {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user