mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 06:31:27 +00:00
Compare commits
49 Commits
adding-sem
...
test-sqlpa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e603d9e2fa | ||
|
|
7d1c7518f8 | ||
|
|
30f8c6269a | ||
|
|
65fcbe7b79 | ||
|
|
f745e7c823 | ||
|
|
14f966c24f | ||
|
|
d0a6ea96e4 | ||
|
|
f3f1244eb0 | ||
|
|
0c067350da | ||
|
|
57a6963913 | ||
|
|
d2c9ee46a1 | ||
|
|
89fa60d86a | ||
|
|
e39505fec6 | ||
|
|
c82795a99f | ||
|
|
f0f4af5c8d | ||
|
|
3fe1502c15 | ||
|
|
cc2f259108 | ||
|
|
51d216186d | ||
|
|
bdf7c1ff82 | ||
|
|
f23dbb6f6b | ||
|
|
ec17ecec86 | ||
|
|
a3d326404d | ||
|
|
c785f320ea | ||
|
|
1fe0c32890 | ||
|
|
11752edef8 | ||
|
|
16cb498b56 | ||
|
|
03f6655f1a | ||
|
|
8dc3d0a531 | ||
|
|
c172687736 | ||
|
|
8a31eab181 | ||
|
|
4d0ee2fc47 | ||
|
|
ddb2f0f71d | ||
|
|
2bb2e73df9 | ||
|
|
790fecab92 | ||
|
|
f6926c0ed6 | ||
|
|
52be5ffaa6 | ||
|
|
31bfff81d9 | ||
|
|
3fbfa6e6f3 | ||
|
|
165035692f | ||
|
|
80f76e9e6e | ||
|
|
a2e7249c44 | ||
|
|
9700ff1866 | ||
|
|
e61a39f27f | ||
|
|
d934e713db | ||
|
|
ef9bb925d3 | ||
|
|
f73359b87c | ||
|
|
b4706c4dec | ||
|
|
b46d35c13f | ||
|
|
eba90863ed |
@@ -1,13 +1,15 @@
|
||||
[bumpversion]
|
||||
current_version = 1.5.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
((?P<prekind>a|b|rc)
|
||||
(?P<pre>\d+) # pre-release version num
|
||||
current_version = 1.4.5
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
(((?P<prekind>a|b|rc) # optional pre-release type
|
||||
?(?P<num>[\d]+?)) # optional pre-release version number
|
||||
\.?(?P<nightly>[a-z0-9]+\+[a-z]+)? # optional nightly release indicator
|
||||
)?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{pre}
|
||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||
{major}.{minor}.{patch}{prekind}{num}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
tag = False
|
||||
@@ -21,9 +23,11 @@ values =
|
||||
rc
|
||||
final
|
||||
|
||||
[bumpversion:part:pre]
|
||||
[bumpversion:part:num]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:nightly]
|
||||
|
||||
[bumpversion:file:core/setup.py]
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
|
||||
126
.changes/1.4.0.md
Normal file
126
.changes/1.4.0.md
Normal file
@@ -0,0 +1,126 @@
|
||||
## dbt-core 1.4.0 - January 25, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Cleaned up exceptions to directly raise in code. Also updated the existing exception to meet PEP guidelines.Removed use of all exception functions in the code base and marked them all as deprecated to be removed next minor release. ([#6339](https://github.com/dbt-labs/dbt-core/issues/6339), [#6393](https://github.com/dbt-labs/dbt-core/issues/6393), [#6460](https://github.com/dbt-labs/dbt-core/issues/6460))
|
||||
|
||||
### Features
|
||||
|
||||
- Added favor-state flag to optionally favor state nodes even if unselected node exists ([#5016](https://github.com/dbt-labs/dbt-core/issues/5016))
|
||||
- Update structured logging. Convert to using protobuf messages. Ensure events are enriched with node_info. ([#5610](https://github.com/dbt-labs/dbt-core/issues/5610))
|
||||
- incremental predicates ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680))
|
||||
- Friendlier error messages when packages.yml is malformed ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- Allow partitions in external tables to be supplied as a list ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- extend -f flag shorthand for seed command ([#5990](https://github.com/dbt-labs/dbt-core/issues/5990))
|
||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- Adding tarball install method for packages. Allowing package tarball to be specified via url in the packages.yaml. ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- Added an md5 function to the base context ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- Exposures support metrics in lineage ([#6057](https://github.com/dbt-labs/dbt-core/issues/6057))
|
||||
- Add support for Python 3.11 ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- Making timestamp optional for metrics ([#6398](https://github.com/dbt-labs/dbt-core/issues/6398))
|
||||
- The meta configuration field is now included in the node_info property of structured logs. ([#6216](https://github.com/dbt-labs/dbt-core/issues/6216))
|
||||
- Adds buildable selection mode ([#6365](https://github.com/dbt-labs/dbt-core/issues/6365))
|
||||
- --warn-error-options: Treat warnings as errors for specific events, based on user configuration ([#6165](https://github.com/dbt-labs/dbt-core/issues/6165))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992))
|
||||
- Add validation of enabled config for metrics, exposures and sources ([#6030](https://github.com/dbt-labs/dbt-core/issues/6030))
|
||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- Add functors to ensure event types with str-type attributes are initialized to spec, even when provided non-str type params. ([#5436](https://github.com/dbt-labs/dbt-core/issues/5436))
|
||||
- Allow hooks to fail without halting execution flow ([#5625](https://github.com/dbt-labs/dbt-core/issues/5625))
|
||||
- fix missing f-strings, convert old .format() messages to f-strings for consistency ([#6241](https://github.com/dbt-labs/dbt-core/issues/6241))
|
||||
- Clarify Error Message for how many models are allowed in a Python file ([#6245](https://github.com/dbt-labs/dbt-core/issues/6245))
|
||||
- Fix typo in util.py ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- After this, will be possible to use default values for dbt.config.get ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- Use full path for writing manifest ([#6055](https://github.com/dbt-labs/dbt-core/issues/6055))
|
||||
- add pre-commit install to make dev script in Makefile ([#6269](https://github.com/dbt-labs/dbt-core/issues/6269))
|
||||
- Late-rendering for `pre_` and `post_hook`s in `dbt_project.yml` ([#6411](https://github.com/dbt-labs/dbt-core/issues/6411))
|
||||
- [CT-1284] Change Python model default materialization to table ([#5989](https://github.com/dbt-labs/dbt-core/issues/5989))
|
||||
- [CT-1591] Don't parse empty Python files ([#6345](https://github.com/dbt-labs/dbt-core/issues/6345))
|
||||
- Repair a regression which prevented basic logging before the logging subsystem is completely configured. ([#6434](https://github.com/dbt-labs/dbt-core/issues/6434))
|
||||
- fix docs generate --defer by adding defer_to_manifest to before_run ([#6488](https://github.com/dbt-labs/dbt-core/issues/6488))
|
||||
- Bug when partial parsing with an empty schema file ([#4850](https://github.com/dbt-labs/dbt-core/issues/4850))
|
||||
- Fix DBT_FAVOR_STATE env var ([#5859](https://github.com/dbt-labs/dbt-core/issues/5859))
|
||||
- Restore historical behavior of certain disabled test messages, so that they are at the less obtrusive debug level, rather than the warning level. ([#6501](https://github.com/dbt-labs/dbt-core/issues/6501))
|
||||
- Bump mashumuro version to get regression fix and add unit test to verify that fix. ([#6428](https://github.com/dbt-labs/dbt-core/issues/6428))
|
||||
- Call update_event_status earlier for node results. Rename event 'HookFinished' -> FinishedRunningStats ([#6571](https://github.com/dbt-labs/dbt-core/issues/6571))
|
||||
- Provide backward compatibility for `get_merge_sql` arguments ([#6625](https://github.com/dbt-labs/dbt-core/issues/6625))
|
||||
- Fix behavior of --favor-state with --defer ([#6617](https://github.com/dbt-labs/dbt-core/issues/6617))
|
||||
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||
|
||||
### Docs
|
||||
|
||||
- minor doc correction ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- Generate API docs for new CLI interface ([dbt-docs/#5528](https://github.com/dbt-labs/dbt-docs/issues/5528))
|
||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323))
|
||||
- Alphabetize `core/dbt/README.md` ([dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368))
|
||||
- Updated minor typos encountered when skipping profile setup ([dbt-docs/#6529](https://github.com/dbt-labs/dbt-docs/issues/6529))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Put black config in explicit config ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946))
|
||||
- Added flat_graph attribute the Manifest class's deepcopy() coverage ([#5809](https://github.com/dbt-labs/dbt-core/issues/5809))
|
||||
- Add mypy configs so `mypy` passes from CLI ([#5983](https://github.com/dbt-labs/dbt-core/issues/5983))
|
||||
- Exception message cleanup. ([#6023](https://github.com/dbt-labs/dbt-core/issues/6023))
|
||||
- Add dmypy cache to gitignore ([#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- Provide useful errors when the value of 'materialized' is invalid ([#5229](https://github.com/dbt-labs/dbt-core/issues/5229))
|
||||
- Clean up string formatting ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- Fixed extra whitespace in strings introduced by black. ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- Remove the 'root_path' field from most nodes ([#6171](https://github.com/dbt-labs/dbt-core/issues/6171))
|
||||
- Combine certain logging events with different levels ([#6173](https://github.com/dbt-labs/dbt-core/issues/6173))
|
||||
- Convert threading tests to pytest ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Convert postgres index tests to pytest ([#5770](https://github.com/dbt-labs/dbt-core/issues/5770))
|
||||
- Convert use color tests to pytest ([#5771](https://github.com/dbt-labs/dbt-core/issues/5771))
|
||||
- Add github actions workflow to generate high level CLI API docs ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Functionality-neutral refactor of event logging system to improve encapsulation and modularity. ([#6139](https://github.com/dbt-labs/dbt-core/issues/6139))
|
||||
- Consolidate ParsedNode and CompiledNode classes ([#6383](https://github.com/dbt-labs/dbt-core/issues/6383))
|
||||
- Prevent doc gen workflow from running on forks ([#6386](https://github.com/dbt-labs/dbt-core/issues/6386))
|
||||
- Fix intermittent database connection failure in Windows CI test ([#6394](https://github.com/dbt-labs/dbt-core/issues/6394))
|
||||
- Refactor and clean up manifest nodes ([#6426](https://github.com/dbt-labs/dbt-core/issues/6426))
|
||||
- Restore important legacy logging behaviors, following refactor which removed them ([#6437](https://github.com/dbt-labs/dbt-core/issues/6437))
|
||||
- Treat dense text blobs as binary for `git grep` ([#6294](https://github.com/dbt-labs/dbt-core/issues/6294))
|
||||
- Prune partial parsing logging events ([#6313](https://github.com/dbt-labs/dbt-core/issues/6313))
|
||||
- Updating the deprecation warning in the metric attributes renamed event ([#6507](https://github.com/dbt-labs/dbt-core/issues/6507))
|
||||
- [CT-1693] Port severity test to Pytest ([#6466](https://github.com/dbt-labs/dbt-core/issues/6466))
|
||||
- [CT-1694] Deprecate event tracking tests ([#6467](https://github.com/dbt-labs/dbt-core/issues/6467))
|
||||
- Reorganize structured logging events to have two top keys ([#6311](https://github.com/dbt-labs/dbt-core/issues/6311))
|
||||
- Combine some logging events ([#1716](https://github.com/dbt-labs/dbt-core/issues/1716), [#1717](https://github.com/dbt-labs/dbt-core/issues/1717), [#1719](https://github.com/dbt-labs/dbt-core/issues/1719))
|
||||
- Check length of escaped strings in the adapter test ([#6566](https://github.com/dbt-labs/dbt-core/issues/6566))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core ([#5917](https://github.com/dbt-labs/dbt-core/pull/5917))
|
||||
- Bump black from 22.8.0 to 22.10.0 ([#6019](https://github.com/dbt-labs/dbt-core/pull/6019))
|
||||
- Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core ([#6108](https://github.com/dbt-labs/dbt-core/pull/6108))
|
||||
- Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core ([#6144](https://github.com/dbt-labs/dbt-core/pull/6144))
|
||||
- Bump mashumaro[msgpack] from 3.1.1 to 3.2 in /core ([#6375](https://github.com/dbt-labs/dbt-core/pull/6375))
|
||||
- Update agate requirement from <1.6.4,>=1.6 to >=1.6,<1.7.1 in /core ([#6506](https://github.com/dbt-labs/dbt-core/pull/6506))
|
||||
|
||||
### Contributors
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#5859](https://github.com/dbt-labs/dbt-core/issues/5859))
|
||||
- [@agpapa](https://github.com/agpapa) ([#6365](https://github.com/dbt-labs/dbt-core/issues/6365))
|
||||
- [@andy-clapson](https://github.com/andy-clapson) ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6398](https://github.com/dbt-labs/dbt-core/issues/6398), [#6507](https://github.com/dbt-labs/dbt-core/issues/6507))
|
||||
- [@chamini2](https://github.com/chamini2) ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- [@daniel-murray](https://github.com/daniel-murray) ([#5016](https://github.com/dbt-labs/dbt-core/issues/5016))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680), [#5990](https://github.com/dbt-labs/dbt-core/issues/5990), [#6625](https://github.com/dbt-labs/dbt-core/issues/6625))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([#6411](https://github.com/dbt-labs/dbt-core/issues/6411), [dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368), [#6394](https://github.com/dbt-labs/dbt-core/issues/6394), [#6294](https://github.com/dbt-labs/dbt-core/issues/6294), [#6566](https://github.com/dbt-labs/dbt-core/issues/6566))
|
||||
- [@devmessias](https://github.com/devmessias) ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- [@eltociear](https://github.com/eltociear) ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- [@eve-johns](https://github.com/eve-johns) ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- [@haritamar](https://github.com/haritamar) ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- [@josephberni](https://github.com/josephberni) ([#5016](https://github.com/dbt-labs/dbt-core/issues/5016))
|
||||
- [@joshuataylor](https://github.com/joshuataylor) ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- [@justbldwn](https://github.com/justbldwn) ([#6241](https://github.com/dbt-labs/dbt-core/issues/6241), [#6245](https://github.com/dbt-labs/dbt-core/issues/6245), [#6269](https://github.com/dbt-labs/dbt-core/issues/6269))
|
||||
- [@luke-bassett](https://github.com/luke-bassett) ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- [@max-sixty](https://github.com/max-sixty) ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946), [#5983](https://github.com/dbt-labs/dbt-core/issues/5983), [#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- [@mivanicova](https://github.com/mivanicova) ([#6488](https://github.com/dbt-labs/dbt-core/issues/6488))
|
||||
- [@nshuman1](https://github.com/nshuman1) ([dbt-docs/#6529](https://github.com/dbt-labs/dbt-docs/issues/6529))
|
||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- [@timle2](https://github.com/timle2) ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- [@tmastny](https://github.com/tmastny) ([#6216](https://github.com/dbt-labs/dbt-core/issues/6216))
|
||||
8
.changes/1.4.1.md
Normal file
8
.changes/1.4.1.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.4.1 - January 26, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
|
||||
### Contributors
|
||||
- [@Mathyoub](https://github.com/Mathyoub) ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
20
.changes/1.4.2.md
Normal file
20
.changes/1.4.2.md
Normal file
@@ -0,0 +1,20 @@
|
||||
## dbt-core 1.4.2 - February 23, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||
- Fix disabled definition in WritableManifest ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752))
|
||||
- Fix regression in logbook log output ([#7028](https://github.com/dbt-labs/dbt-core/issues/7028))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||
|
||||
### Contributors
|
||||
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||
5
.changes/1.4.3.md
Normal file
5
.changes/1.4.3.md
Normal file
@@ -0,0 +1,5 @@
|
||||
## dbt-core 1.4.3 - February 24, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix semver comparison logic by ensuring numeric values ([#7039](https://github.com/dbt-labs/dbt-core/issues/7039))
|
||||
8
.changes/1.4.4.md
Normal file
8
.changes/1.4.4.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.4.4 - February 28, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- add pytz dependency ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
|
||||
### Contributors
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
19
.changes/1.4.5.md
Normal file
19
.changes/1.4.5.md
Normal file
@@ -0,0 +1,19 @@
|
||||
## dbt-core 1.4.5 - March 10, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix compilation logic for ephemeral nodes ([#6885](https://github.com/dbt-labs/dbt-core/issues/6885))
|
||||
- allow adapters to change model name resolution in py models ([#7114](https://github.com/dbt-labs/dbt-core/issues/7114))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix JSON path to package overview docs ([dbt-docs/#390](https://github.com/dbt-labs/dbt-docs/issues/390))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
|
||||
### Contributors
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([#390](https://github.com/dbt-labs/dbt-core/issues/390))
|
||||
- [@nssalian](https://github.com/nssalian) ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
- [@rlh1994](https://github.com/rlh1994) ([#390](https://github.com/dbt-labs/dbt-core/issues/390))
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Adding the entity node
|
||||
time: 2023-01-18T13:48:04.487817-06:00
|
||||
custom:
|
||||
Author: callum-mcdata
|
||||
Issue: "6627"
|
||||
6
.changes/unreleased/Fixes-20230215-104536.yaml
Normal file
6
.changes/unreleased/Fixes-20230215-104536.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Make use of hashlib.md5() FIPS compliant
|
||||
time: 2023-02-15T10:45:36.755797+01:00
|
||||
custom:
|
||||
Author: nielspardon
|
||||
Issue: "6900"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix use of ConnectionReused logging event
|
||||
time: 2023-01-13T13:25:13.023168-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6168"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Update deprecated github action command
|
||||
time: 2023-01-17T11:17:37.046095-06:00
|
||||
custom:
|
||||
Author: davidbloss
|
||||
Issue: "6153"
|
||||
6
.changes/unreleased/Under the Hood-20230324-144050.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230324-144050.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Remove upper pin for hologram/jsonschema
|
||||
time: 2023-03-24T14:40:50.574108-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6775"
|
||||
@@ -97,22 +97,28 @@ footerFormat: |
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- /* Docs kind link back to dbt-docs instead of dbt-core issues */}}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
{{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $change.Custom.PR }}
|
||||
{{- else if eq $change.Kind "Docs"}}
|
||||
{{- $changeLink = "[dbt-docs/#nbr](https://github.com/dbt-labs/dbt-docs/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- $changes := splitList " " $change.Custom.PR }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- else }}
|
||||
{{- $changeLink = "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $change.Custom.Issue }}
|
||||
{{- $changes := splitList " " $change.Custom.Issue }}
|
||||
{{- range $issueNbr := $changes }}
|
||||
{{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }}
|
||||
{{- $IssueList = append $IssueList $changeLink }}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
{{- /* check if this contributor has other changes associated with them already */}}
|
||||
{{- if hasKey $contributorDict $author }}
|
||||
{{- $contributionList := get $contributorDict $author }}
|
||||
{{- $contributionList = append $contributionList $changeLink }}
|
||||
{{- $contributionList = concat $contributionList $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- else }}
|
||||
{{- $contributionList := list $changeLink }}
|
||||
{{- $contributionList := $IssueList }}
|
||||
{{- $contributorDict := set $contributorDict $author $contributionList }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
|
||||
2
.flake8
2
.flake8
@@ -9,4 +9,4 @@ ignore =
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
exclude = test
|
||||
|
||||
20
.github/_README.md
vendored
20
.github/_README.md
vendored
@@ -63,12 +63,12 @@ permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
```
|
||||
|
||||
|
||||
### Secrets
|
||||
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
||||
|
||||
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
||||
|
||||
|
||||
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
||||
|
||||
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
||||
@@ -105,7 +105,7 @@ Some triggers of note that we use:
|
||||
|
||||
```
|
||||
# **what?**
|
||||
# Describe what the action does.
|
||||
# Describe what the action does.
|
||||
|
||||
# **why?**
|
||||
# Why does this action exist?
|
||||
@@ -138,7 +138,7 @@ Some triggers of note that we use:
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=FILEPATH::$FILEPATH"
|
||||
```
|
||||
|
||||
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
||||
@@ -158,14 +158,14 @@ Some triggers of note that we use:
|
||||
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
||||
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
||||
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
||||
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set Variables
|
||||
id: variables
|
||||
run: |
|
||||
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
||||
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
||||
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=important_path::'performance/runner/Cargo.toml'"
|
||||
echo "::set-output name=release_id::${{github.event.inputs.release_id}}"
|
||||
echo "::set-output name=open_prs::${{github.event.inputs.open_prs}}"
|
||||
|
||||
job2:
|
||||
needs: [job1]
|
||||
@@ -190,7 +190,7 @@ ___
|
||||
### Actions from the Marketplace
|
||||
- Don’t use external actions for things that can easily be accomplished manually.
|
||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||
- Pin actions _we don't control_ to tags.
|
||||
- Pin actions _we don't control_ to tags.
|
||||
|
||||
### Connecting to AWS
|
||||
- Authenticate with the aws managed workflow
|
||||
@@ -208,7 +208,7 @@ ___
|
||||
|
||||
```yaml
|
||||
- name: Copy Artifacts from S3 via CLI
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
17
.github/actions/latest-wrangler/main.py
vendored
17
.github/actions/latest-wrangler/main.py
vendored
@@ -28,12 +28,11 @@ if __name__ == "__main__":
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
# everything is the latest if the package doesn't exist
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write("latest=True")
|
||||
gh_output.write("minor_latest=True")
|
||||
sys.exit(0)
|
||||
else:
|
||||
# everything is the latest if the package doesn't exist
|
||||
print(f"::set-output name=latest::{True}")
|
||||
print(f"::set-output name=minor_latest::{True}")
|
||||
sys.exit(0)
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
@@ -92,7 +91,5 @@ if __name__ == "__main__":
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write(f"latest={latest}")
|
||||
gh_output.write(f"minor_latest={minor_latest}")
|
||||
print(f"::set-output name=latest::{latest}")
|
||||
print(f"::set-output name=minor_latest::{minor_latest}")
|
||||
|
||||
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
@@ -101,9 +101,7 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
@@ -170,9 +168,7 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
|
||||
109
.github/workflows/nightly-release.yml
vendored
Normal file
109
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# **what?**
|
||||
# Nightly releases to GitHub and PyPI. This workflow produces the following outcome:
|
||||
# - generate and validate data for night release (commit SHA, version number, release branch);
|
||||
# - pass data to release workflow;
|
||||
# - night release will be pushed to GitHub as a draft release;
|
||||
# - night build will be pushed to test PyPI;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process for nightly builds
|
||||
#
|
||||
# **when?**
|
||||
# This workflow runs on schedule or can be run manually on demand.
|
||||
|
||||
name: Nightly Test Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch: # for manual triggering
|
||||
schedule:
|
||||
- cron: 0 9 * * *
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
RELEASE_BRANCH: "main"
|
||||
|
||||
jobs:
|
||||
aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }}
|
||||
version_number: ${{ steps.nightly-release-version.outputs.number }}
|
||||
release_branch: ${{ steps.release-branch.outputs.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.RELEASE_BRANCH }}
|
||||
|
||||
- name: "Resolve Commit To Release"
|
||||
id: resolve-commit-sha
|
||||
run: |
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get Current Version Number"
|
||||
id: version-number-sources
|
||||
run: |
|
||||
current_version=`awk -F"current_version = " '{print $2}' .bumpversion.cfg | tr '\n' ' '`
|
||||
echo "current_version=$current_version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Version And Parse Into Parts"
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.version-number-sources.outputs.current_version }}
|
||||
|
||||
- name: "Get Current Date"
|
||||
id: current-date
|
||||
run: echo "date=$(date +'%m%d%Y')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Generate Nightly Release Version Number"
|
||||
id: nightly-release-version
|
||||
run: |
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly"
|
||||
echo "number=$number" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||
uses: dbt-labs/actions/parse-semver@v1.1.0
|
||||
with:
|
||||
version: ${{ steps.nightly-release-version.outputs.number }}
|
||||
|
||||
- name: "Set Release Branch"
|
||||
id: release-branch
|
||||
run: |
|
||||
echo "name=${{ env.RELEASE_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
log-outputs-aggregate-release-data:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] Log Outputs"
|
||||
run: |
|
||||
echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
|
||||
release-github-pypi:
|
||||
needs: [aggregate-release-data]
|
||||
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
s3_bucket_name: "core-team-artifacts"
|
||||
package_test_command: "dbt --version"
|
||||
test_run: true
|
||||
nightly_release: true
|
||||
secrets: inherit
|
||||
12
.github/workflows/release-docker.yml
vendored
12
.github/workflows/release-docker.yml
vendored
@@ -41,9 +41,9 @@ jobs:
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=major::$MAJOR"
|
||||
echo "::set-output name=minor::$MINOR"
|
||||
echo "::set-output name=patch::$PATCH"
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
@@ -70,10 +70,8 @@ jobs:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v1
|
||||
|
||||
339
.github/workflows/release.yml
vendored
339
.github/workflows/release.yml
vendored
@@ -1,24 +1,110 @@
|
||||
# **what?**
|
||||
# Take the given commit, run unit tests specifically on that sha, build and
|
||||
# package it, and then release to GitHub and PyPi with that specific build
|
||||
|
||||
# Release workflow provides the following steps:
|
||||
# - checkout the given commit;
|
||||
# - validate version in sources and changelog file for given version;
|
||||
# - bump the version and generate a changelog if needed;
|
||||
# - merge all changes to the target branch if needed;
|
||||
# - run unit and integration tests against given commit;
|
||||
# - build and package that SHA;
|
||||
# - release it to GitHub and PyPI with that specific build;
|
||||
#
|
||||
# **why?**
|
||||
# Ensure an automated and tested release process
|
||||
|
||||
#
|
||||
# **when?**
|
||||
# This will only run manually with a given sha and version
|
||||
# This workflow can be run manually on demand or can be called by other workflows
|
||||
|
||||
name: Release to GitHub and PyPi
|
||||
name: Release to GitHub and PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sha:
|
||||
description: 'The last commit sha in the release'
|
||||
required: true
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: 'The release version number (i.e. 1.0.0b1)'
|
||||
required: true
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
workflow_call:
|
||||
inputs:
|
||||
sha:
|
||||
description: "The last commit sha in the release"
|
||||
type: string
|
||||
required: true
|
||||
target_branch:
|
||||
description: "The branch to release from"
|
||||
type: string
|
||||
required: true
|
||||
version_number:
|
||||
description: "The release version number (i.e. 1.0.0b1)"
|
||||
type: string
|
||||
required: true
|
||||
build_script_path:
|
||||
description: "Build script path"
|
||||
type: string
|
||||
default: "scripts/build-dist.sh"
|
||||
required: true
|
||||
env_setup_script_path:
|
||||
description: "Environment setup script path"
|
||||
type: string
|
||||
default: "scripts/env-setup.sh"
|
||||
required: false
|
||||
s3_bucket_name:
|
||||
description: "AWS S3 bucket name"
|
||||
type: string
|
||||
default: "core-team-artifacts"
|
||||
required: true
|
||||
package_test_command:
|
||||
description: "Package test command"
|
||||
type: string
|
||||
default: "dbt --version"
|
||||
required: true
|
||||
test_run:
|
||||
description: "Test run (Publish release as draft)"
|
||||
type: boolean
|
||||
default: true
|
||||
required: false
|
||||
nightly_release:
|
||||
description: "Nightly release to dev environment"
|
||||
type: boolean
|
||||
default: false
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write # this is the permission that allows creating a new release
|
||||
@@ -28,175 +114,116 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
unit:
|
||||
name: Unit test
|
||||
|
||||
log-inputs:
|
||||
name: Log Inputs
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install tox
|
||||
pip --version
|
||||
tox --version
|
||||
echo The last commit sha in the release: ${{ inputs.sha }}
|
||||
echo The branch to release from: ${{ inputs.target_branch }}
|
||||
echo The release version number: ${{ inputs.version_number }}
|
||||
echo Build script path: ${{ inputs.build_script_path }}
|
||||
echo Environment setup script path: ${{ inputs.env_setup_script_path }}
|
||||
echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }}
|
||||
echo Package test command: ${{ inputs.package_test_command }}
|
||||
echo Test run: ${{ inputs.test_run }}
|
||||
echo Nightly release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Run tox
|
||||
run: tox
|
||||
bump-version-generate-changelog:
|
||||
name: Bump package version, Generate changelog
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main
|
||||
|
||||
with:
|
||||
sha: ${{ inputs.sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
target_branch: ${{ inputs.target_branch }}
|
||||
env_setup_script_path: ${{ inputs.env_setup_script_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets: inherit
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.sha }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
- name: Print variables
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade setuptools wheel twine check-wheel-contents
|
||||
pip --version
|
||||
echo Final SHA : ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
echo Changelog path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
|
||||
- name: Build distributions
|
||||
run: ./scripts/build-dist.sh
|
||||
build-test-package:
|
||||
name: Build, Test, Package
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [bump-version-generate-changelog]
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
uses: dbt-labs/dbt-release/.github/workflows/build.yml@main
|
||||
|
||||
- name: Check distribution descriptions
|
||||
run: |
|
||||
twine check dist/*
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
build_script_path: ${{ inputs.build_script_path }}
|
||||
s3_bucket_name: ${{ inputs.s3_bucket_name }}
|
||||
package_test_command: ${{ inputs.package_test_command }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
- name: Check wheel contents
|
||||
run: |
|
||||
check-wheel-contents dist/*.whl --ignore W007,W008
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: |
|
||||
dist/
|
||||
!dist/dbt-${{github.event.inputs.version_number}}.tar.gz
|
||||
|
||||
test-build:
|
||||
name: verify packages
|
||||
|
||||
needs: [build, unit]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
pip install --upgrade wheel
|
||||
pip --version
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
||||
|
||||
- name: Show distributions
|
||||
run: ls -lh dist/
|
||||
|
||||
- name: Install wheel distributions
|
||||
run: |
|
||||
find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check wheel distributions
|
||||
run: |
|
||||
dbt --version
|
||||
|
||||
- name: Install source distributions
|
||||
run: |
|
||||
find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
|
||||
|
||||
- name: Check source distributions
|
||||
run: |
|
||||
dbt --version
|
||||
secrets:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
github-release:
|
||||
name: GitHub Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
|
||||
needs: test-build
|
||||
needs: [bump-version-generate-changelog, build-test-package]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: '.'
|
||||
|
||||
# Need to set an output variable because env variables can't be taken as input
|
||||
# This is needed for the next step with releasing to GitHub
|
||||
- name: Find release type
|
||||
id: release_type
|
||||
env:
|
||||
IS_PRERELEASE: ${{ contains(github.event.inputs.version_number, 'rc') || contains(github.event.inputs.version_number, 'b') }}
|
||||
run: |
|
||||
echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Creating GitHub Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: dbt-core v${{github.event.inputs.version_number}}
|
||||
tag_name: v${{github.event.inputs.version_number}}
|
||||
prerelease: ${{ steps.release_type.outputs.isPrerelease }}
|
||||
target_commitish: ${{github.event.inputs.sha}}
|
||||
body: |
|
||||
[Release notes](https://github.com/dbt-labs/dbt-core/blob/main/CHANGELOG.md)
|
||||
files: |
|
||||
dbt_postgres-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl
|
||||
dbt-postgres-${{github.event.inputs.version_number}}.tar.gz
|
||||
dbt-core-${{github.event.inputs.version_number}}.tar.gz
|
||||
with:
|
||||
sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }}
|
||||
version_number: ${{ inputs.version_number }}
|
||||
changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
pypi-release:
|
||||
name: Pypi release
|
||||
name: PyPI Release
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
needs: [github-release]
|
||||
|
||||
needs: github-release
|
||||
uses: dbt-labs/dbt-release/.github/workflows/pypi-release.yml@main
|
||||
|
||||
environment: PypiProd
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: dist
|
||||
path: 'dist'
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
test_run: ${{ inputs.test_run }}
|
||||
|
||||
- name: Publish distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.4.2
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
secrets:
|
||||
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }}
|
||||
|
||||
needs:
|
||||
[
|
||||
bump-version-generate-changelog,
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
|
||||
2
.github/workflows/version-bump.yml
vendored
2
.github/workflows/version-bump.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- name: Set branch value
|
||||
id: variables
|
||||
run: |
|
||||
echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=BRANCH_NAME::prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID"
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
|
||||
194
CHANGELOG.md
194
CHANGELOG.md
@@ -5,12 +5,204 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.4.5 - March 10, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix compilation logic for ephemeral nodes ([#6885](https://github.com/dbt-labs/dbt-core/issues/6885))
|
||||
- allow adapters to change model name resolution in py models ([#7114](https://github.com/dbt-labs/dbt-core/issues/7114))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix JSON path to package overview docs ([dbt-docs/#390](https://github.com/dbt-labs/dbt-docs/issues/390))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Moving simple_seed to adapter zone to help adapter test conversions ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
|
||||
### Contributors
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([#390](https://github.com/dbt-labs/dbt-core/issues/390))
|
||||
- [@nssalian](https://github.com/nssalian) ([#CT-1959](https://github.com/dbt-labs/dbt-core/issues/CT-1959))
|
||||
- [@rlh1994](https://github.com/rlh1994) ([#390](https://github.com/dbt-labs/dbt-core/issues/390))
|
||||
|
||||
|
||||
## dbt-core 1.4.4 - February 28, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- add pytz dependency ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
|
||||
### Contributors
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
|
||||
## dbt-core 1.4.3 - February 24, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix semver comparison logic by ensuring numeric values ([#7039](https://github.com/dbt-labs/dbt-core/issues/7039))
|
||||
|
||||
## dbt-core 1.4.2 - February 23, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Sort cli vars before hashing for partial parsing ([#6710](https://github.com/dbt-labs/dbt-core/issues/6710))
|
||||
- Remove pin on packaging and stop using it for prerelease comparisons ([#6834](https://github.com/dbt-labs/dbt-core/issues/6834))
|
||||
- Readd depends_on.macros to SeedNode, to support seeds with hooks calling macros ([#6806](https://github.com/dbt-labs/dbt-core/issues/6806))
|
||||
- Fix regression of --quiet cli parameter behavior ([#6749](https://github.com/dbt-labs/dbt-core/issues/6749))
|
||||
- Ensure results from hooks contain nodes when processing them ([#6796](https://github.com/dbt-labs/dbt-core/issues/6796))
|
||||
- Always flush stdout after logging ([#6901](https://github.com/dbt-labs/dbt-core/issues/6901))
|
||||
- Set relation_name in test nodes at compile time ([#6930](https://github.com/dbt-labs/dbt-core/issues/6930))
|
||||
- Fix disabled definition in WritableManifest ([#6752](https://github.com/dbt-labs/dbt-core/issues/6752))
|
||||
- Fix regression in logbook log output ([#7028](https://github.com/dbt-labs/dbt-core/issues/7028))
|
||||
|
||||
### Docs
|
||||
|
||||
- Fix JSON path to overview docs ([dbt-docs/#366](https://github.com/dbt-labs/dbt-docs/issues/366))
|
||||
|
||||
### Contributors
|
||||
- [@halvorlu](https://github.com/halvorlu) ([#366](https://github.com/dbt-labs/dbt-core/issues/366))
|
||||
|
||||
## dbt-core 1.4.1 - January 26, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- [Regression] exposure_content referenced incorrectly ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
|
||||
### Contributors
|
||||
- [@Mathyoub](https://github.com/Mathyoub) ([#6738](https://github.com/dbt-labs/dbt-core/issues/6738))
|
||||
|
||||
## dbt-core 1.4.0 - January 25, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Cleaned up exceptions to directly raise in code. Also updated the existing exception to meet PEP guidelines.Removed use of all exception functions in the code base and marked them all as deprecated to be removed next minor release. ([#6339](https://github.com/dbt-labs/dbt-core/issues/6339), [#6393](https://github.com/dbt-labs/dbt-core/issues/6393), [#6460](https://github.com/dbt-labs/dbt-core/issues/6460))
|
||||
|
||||
### Features
|
||||
|
||||
- Added favor-state flag to optionally favor state nodes even if unselected node exists ([#5016](https://github.com/dbt-labs/dbt-core/issues/5016))
|
||||
- Update structured logging. Convert to using protobuf messages. Ensure events are enriched with node_info. ([#5610](https://github.com/dbt-labs/dbt-core/issues/5610))
|
||||
- incremental predicates ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680))
|
||||
- Friendlier error messages when packages.yml is malformed ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- Allow partitions in external tables to be supplied as a list ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- extend -f flag shorthand for seed command ([#5990](https://github.com/dbt-labs/dbt-core/issues/5990))
|
||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- Adding tarball install method for packages. Allowing package tarball to be specified via url in the packages.yaml. ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- Added an md5 function to the base context ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- Exposures support metrics in lineage ([#6057](https://github.com/dbt-labs/dbt-core/issues/6057))
|
||||
- Add support for Python 3.11 ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- Making timestamp optional for metrics ([#6398](https://github.com/dbt-labs/dbt-core/issues/6398))
|
||||
- The meta configuration field is now included in the node_info property of structured logs. ([#6216](https://github.com/dbt-labs/dbt-core/issues/6216))
|
||||
- Adds buildable selection mode ([#6365](https://github.com/dbt-labs/dbt-core/issues/6365))
|
||||
- --warn-error-options: Treat warnings as errors for specific events, based on user configuration ([#6165](https://github.com/dbt-labs/dbt-core/issues/6165))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992))
|
||||
- Add validation of enabled config for metrics, exposures and sources ([#6030](https://github.com/dbt-labs/dbt-core/issues/6030))
|
||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- Add functors to ensure event types with str-type attributes are initialized to spec, even when provided non-str type params. ([#5436](https://github.com/dbt-labs/dbt-core/issues/5436))
|
||||
- Allow hooks to fail without halting execution flow ([#5625](https://github.com/dbt-labs/dbt-core/issues/5625))
|
||||
- fix missing f-strings, convert old .format() messages to f-strings for consistency ([#6241](https://github.com/dbt-labs/dbt-core/issues/6241))
|
||||
- Clarify Error Message for how many models are allowed in a Python file ([#6245](https://github.com/dbt-labs/dbt-core/issues/6245))
|
||||
- Fix typo in util.py ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- After this, will be possible to use default values for dbt.config.get ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- Use full path for writing manifest ([#6055](https://github.com/dbt-labs/dbt-core/issues/6055))
|
||||
- add pre-commit install to make dev script in Makefile ([#6269](https://github.com/dbt-labs/dbt-core/issues/6269))
|
||||
- Late-rendering for `pre_` and `post_hook`s in `dbt_project.yml` ([#6411](https://github.com/dbt-labs/dbt-core/issues/6411))
|
||||
- [CT-1284] Change Python model default materialization to table ([#5989](https://github.com/dbt-labs/dbt-core/issues/5989))
|
||||
- [CT-1591] Don't parse empty Python files ([#6345](https://github.com/dbt-labs/dbt-core/issues/6345))
|
||||
- Repair a regression which prevented basic logging before the logging subsystem is completely configured. ([#6434](https://github.com/dbt-labs/dbt-core/issues/6434))
|
||||
- fix docs generate --defer by adding defer_to_manifest to before_run ([#6488](https://github.com/dbt-labs/dbt-core/issues/6488))
|
||||
- Bug when partial parsing with an empty schema file ([#4850](https://github.com/dbt-labs/dbt-core/issues/4850))
|
||||
- Fix DBT_FAVOR_STATE env var ([#5859](https://github.com/dbt-labs/dbt-core/issues/5859))
|
||||
- Restore historical behavior of certain disabled test messages, so that they are at the less obtrusive debug level, rather than the warning level. ([#6501](https://github.com/dbt-labs/dbt-core/issues/6501))
|
||||
- Bump mashumuro version to get regression fix and add unit test to verify that fix. ([#6428](https://github.com/dbt-labs/dbt-core/issues/6428))
|
||||
- Call update_event_status earlier for node results. Rename event 'HookFinished' -> FinishedRunningStats ([#6571](https://github.com/dbt-labs/dbt-core/issues/6571))
|
||||
- Provide backward compatibility for `get_merge_sql` arguments ([#6625](https://github.com/dbt-labs/dbt-core/issues/6625))
|
||||
- Fix behavior of --favor-state with --defer ([#6617](https://github.com/dbt-labs/dbt-core/issues/6617))
|
||||
- Include adapter_response in NodeFinished run_result log event ([#6703](https://github.com/dbt-labs/dbt-core/issues/6703))
|
||||
|
||||
### Docs
|
||||
|
||||
- minor doc correction ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- Generate API docs for new CLI interface ([dbt-docs/#5528](https://github.com/dbt-labs/dbt-docs/issues/5528))
|
||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323))
|
||||
- Alphabetize `core/dbt/README.md` ([dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368))
|
||||
- Updated minor typos encountered when skipping profile setup ([dbt-docs/#6529](https://github.com/dbt-labs/dbt-docs/issues/6529))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Put black config in explicit config ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946))
|
||||
- Added flat_graph attribute the Manifest class's deepcopy() coverage ([#5809](https://github.com/dbt-labs/dbt-core/issues/5809))
|
||||
- Add mypy configs so `mypy` passes from CLI ([#5983](https://github.com/dbt-labs/dbt-core/issues/5983))
|
||||
- Exception message cleanup. ([#6023](https://github.com/dbt-labs/dbt-core/issues/6023))
|
||||
- Add dmypy cache to gitignore ([#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- Provide useful errors when the value of 'materialized' is invalid ([#5229](https://github.com/dbt-labs/dbt-core/issues/5229))
|
||||
- Clean up string formatting ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- Fixed extra whitespace in strings introduced by black. ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- Remove the 'root_path' field from most nodes ([#6171](https://github.com/dbt-labs/dbt-core/issues/6171))
|
||||
- Combine certain logging events with different levels ([#6173](https://github.com/dbt-labs/dbt-core/issues/6173))
|
||||
- Convert threading tests to pytest ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Convert postgres index tests to pytest ([#5770](https://github.com/dbt-labs/dbt-core/issues/5770))
|
||||
- Convert use color tests to pytest ([#5771](https://github.com/dbt-labs/dbt-core/issues/5771))
|
||||
- Add github actions workflow to generate high level CLI API docs ([#5942](https://github.com/dbt-labs/dbt-core/issues/5942))
|
||||
- Functionality-neutral refactor of event logging system to improve encapsulation and modularity. ([#6139](https://github.com/dbt-labs/dbt-core/issues/6139))
|
||||
- Consolidate ParsedNode and CompiledNode classes ([#6383](https://github.com/dbt-labs/dbt-core/issues/6383))
|
||||
- Prevent doc gen workflow from running on forks ([#6386](https://github.com/dbt-labs/dbt-core/issues/6386))
|
||||
- Fix intermittent database connection failure in Windows CI test ([#6394](https://github.com/dbt-labs/dbt-core/issues/6394))
|
||||
- Refactor and clean up manifest nodes ([#6426](https://github.com/dbt-labs/dbt-core/issues/6426))
|
||||
- Restore important legacy logging behaviors, following refactor which removed them ([#6437](https://github.com/dbt-labs/dbt-core/issues/6437))
|
||||
- Treat dense text blobs as binary for `git grep` ([#6294](https://github.com/dbt-labs/dbt-core/issues/6294))
|
||||
- Prune partial parsing logging events ([#6313](https://github.com/dbt-labs/dbt-core/issues/6313))
|
||||
- Updating the deprecation warning in the metric attributes renamed event ([#6507](https://github.com/dbt-labs/dbt-core/issues/6507))
|
||||
- [CT-1693] Port severity test to Pytest ([#6466](https://github.com/dbt-labs/dbt-core/issues/6466))
|
||||
- [CT-1694] Deprecate event tracking tests ([#6467](https://github.com/dbt-labs/dbt-core/issues/6467))
|
||||
- Reorganize structured logging events to have two top keys ([#6311](https://github.com/dbt-labs/dbt-core/issues/6311))
|
||||
- Combine some logging events ([#1716](https://github.com/dbt-labs/dbt-core/issues/1716), [#1717](https://github.com/dbt-labs/dbt-core/issues/1717), [#1719](https://github.com/dbt-labs/dbt-core/issues/1719))
|
||||
- Check length of escaped strings in the adapter test ([#6566](https://github.com/dbt-labs/dbt-core/issues/6566))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core ([#5917](https://github.com/dbt-labs/dbt-core/pull/5917))
|
||||
- Bump black from 22.8.0 to 22.10.0 ([#6019](https://github.com/dbt-labs/dbt-core/pull/6019))
|
||||
- Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core ([#6108](https://github.com/dbt-labs/dbt-core/pull/6108))
|
||||
- Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core ([#6144](https://github.com/dbt-labs/dbt-core/pull/6144))
|
||||
- Bump mashumaro[msgpack] from 3.1.1 to 3.2 in /core ([#6375](https://github.com/dbt-labs/dbt-core/pull/6375))
|
||||
- Update agate requirement from <1.6.4,>=1.6 to >=1.6,<1.7.1 in /core ([#6506](https://github.com/dbt-labs/dbt-core/pull/6506))
|
||||
|
||||
### Contributors
|
||||
- [@NiallRees](https://github.com/NiallRees) ([#5859](https://github.com/dbt-labs/dbt-core/issues/5859))
|
||||
- [@agpapa](https://github.com/agpapa) ([#6365](https://github.com/dbt-labs/dbt-core/issues/6365))
|
||||
- [@andy-clapson](https://github.com/andy-clapson) ([dbt-docs/#5791](https://github.com/dbt-labs/dbt-docs/issues/5791))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#6398](https://github.com/dbt-labs/dbt-core/issues/6398), [#6507](https://github.com/dbt-labs/dbt-core/issues/6507))
|
||||
- [@chamini2](https://github.com/chamini2) ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041))
|
||||
- [@daniel-murray](https://github.com/daniel-murray) ([#5016](https://github.com/dbt-labs/dbt-core/issues/5016))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5680](https://github.com/dbt-labs/dbt-core/issues/5680), [#5990](https://github.com/dbt-labs/dbt-core/issues/5990), [#6625](https://github.com/dbt-labs/dbt-core/issues/6625))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([#6411](https://github.com/dbt-labs/dbt-core/issues/6411), [dbt-docs/#6368](https://github.com/dbt-labs/dbt-docs/issues/6368), [#6394](https://github.com/dbt-labs/dbt-core/issues/6394), [#6294](https://github.com/dbt-labs/dbt-core/issues/6294), [#6566](https://github.com/dbt-labs/dbt-core/issues/6566))
|
||||
- [@devmessias](https://github.com/devmessias) ([#6309](https://github.com/dbt-labs/dbt-core/issues/6309))
|
||||
- [@eltociear](https://github.com/eltociear) ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904))
|
||||
- [@eve-johns](https://github.com/eve-johns) ([#6068](https://github.com/dbt-labs/dbt-core/issues/6068))
|
||||
- [@haritamar](https://github.com/haritamar) ([#6246](https://github.com/dbt-labs/dbt-core/issues/6246))
|
||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5486](https://github.com/dbt-labs/dbt-core/issues/5486))
|
||||
- [@josephberni](https://github.com/josephberni) ([#5016](https://github.com/dbt-labs/dbt-core/issues/5016))
|
||||
- [@joshuataylor](https://github.com/joshuataylor) ([#6147](https://github.com/dbt-labs/dbt-core/issues/6147))
|
||||
- [@justbldwn](https://github.com/justbldwn) ([#6241](https://github.com/dbt-labs/dbt-core/issues/6241), [#6245](https://github.com/dbt-labs/dbt-core/issues/6245), [#6269](https://github.com/dbt-labs/dbt-core/issues/6269))
|
||||
- [@luke-bassett](https://github.com/luke-bassett) ([#1350](https://github.com/dbt-labs/dbt-core/issues/1350))
|
||||
- [@max-sixty](https://github.com/max-sixty) ([#5946](https://github.com/dbt-labs/dbt-core/issues/5946), [#5983](https://github.com/dbt-labs/dbt-core/issues/5983), [#6028](https://github.com/dbt-labs/dbt-core/issues/6028))
|
||||
- [@mivanicova](https://github.com/mivanicova) ([#6488](https://github.com/dbt-labs/dbt-core/issues/6488))
|
||||
- [@nshuman1](https://github.com/nshuman1) ([dbt-docs/#6529](https://github.com/dbt-labs/dbt-docs/issues/6529))
|
||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880))
|
||||
- [@pgoslatara](https://github.com/pgoslatara) ([#5929](https://github.com/dbt-labs/dbt-core/issues/5929))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201))
|
||||
- [@timle2](https://github.com/timle2) ([#4205](https://github.com/dbt-labs/dbt-core/issues/4205))
|
||||
- [@tmastny](https://github.com/tmastny) ([#6216](https://github.com/dbt-labs/dbt-core/issues/6216))
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
|
||||
@@ -142,44 +142,44 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
conn_name: str
|
||||
if name is None:
|
||||
# if a name isn't specified, we'll re-use a single handle
|
||||
# named 'master'
|
||||
conn_name = "master"
|
||||
else:
|
||||
if not isinstance(name, str):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"For connection name, got {name} - not a string!"
|
||||
)
|
||||
assert isinstance(name, str)
|
||||
conn_name = name
|
||||
|
||||
conn_name: str = "master" if name is None else name
|
||||
|
||||
# Get a connection for this thread
|
||||
conn = self.get_if_exists()
|
||||
|
||||
if conn and conn.name == conn_name and conn.state == "open":
|
||||
# Found a connection and nothing to do, so just return it
|
||||
return conn
|
||||
|
||||
if conn is None:
|
||||
# Create a new connection
|
||||
conn = Connection(
|
||||
type=Identifier(self.TYPE),
|
||||
name=conn_name,
|
||||
name=None,
|
||||
state=ConnectionState.INIT,
|
||||
transaction_open=False,
|
||||
handle=None,
|
||||
credentials=self.profile.credentials,
|
||||
)
|
||||
conn.handle = LazyHandle(self.open)
|
||||
# Add the connection to thread_connections for this thread
|
||||
self.set_thread_connection(conn)
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
else: # existing connection either wasn't open or didn't have the right name
|
||||
if conn.state != "open":
|
||||
conn.handle = LazyHandle(self.open)
|
||||
if conn.name != conn_name:
|
||||
orig_conn_name: str = conn.name or ""
|
||||
conn.name = conn_name
|
||||
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
|
||||
|
||||
if conn.name == conn_name and conn.state == "open":
|
||||
return conn
|
||||
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
|
||||
if conn.state == "open":
|
||||
fire_event(ConnectionReused(conn_name=conn_name))
|
||||
else:
|
||||
conn.handle = LazyHandle(self.open)
|
||||
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -48,7 +48,6 @@ def print_compile_stats(stats):
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Entity: "entity",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -84,8 +83,6 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[exposure.resource_type] += 1
|
||||
for metric in manifest.metrics.values():
|
||||
stats[metric.resource_type] += 1
|
||||
for entity in manifest.entities.values():
|
||||
stats[entity.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
@@ -93,10 +90,11 @@ def _generate_stats(manifest: Manifest):
|
||||
|
||||
def _add_prepended_cte(prepended_ctes, new_cte):
|
||||
for cte in prepended_ctes:
|
||||
if cte.id == new_cte.id:
|
||||
if cte.id == new_cte.id and new_cte.sql:
|
||||
cte.sql = new_cte.sql
|
||||
return
|
||||
prepended_ctes.append(new_cte)
|
||||
if new_cte.sql:
|
||||
prepended_ctes.append(new_cte)
|
||||
|
||||
|
||||
def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
@@ -260,16 +258,18 @@ class Compiler:
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an unparsed node", model)
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
if len(model.extra_ctes) == 0:
|
||||
# SeedNodes don't have compilation attributes
|
||||
if not isinstance(model, SeedNode):
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
return (model, [])
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
@@ -278,7 +278,8 @@ class Compiler:
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
# ephemeral model. InjectedCTEs have a unique_id and sql.
|
||||
# extra_ctes start out with sql set to None, and the sql is set in this loop.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise DbtInternalError(
|
||||
@@ -291,23 +292,23 @@ class Compiler:
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise DbtInternalError(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
# This model has already been compiled and extra_ctes_injected, so it's been
|
||||
# through here before. We already checked above for extra_ctes_injected, but
|
||||
# checking again because updates maybe have happened in another thread.
|
||||
if cte_model.compiled is True and cte_model.extra_ctes_injected is True:
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
# Render the raw_code and set compiled to True
|
||||
cte_model = self._compile_code(cte_model, manifest, extra_context)
|
||||
# recursively call this method, sets extra_ctes_injected to True
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
# Write compiled SQL file
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
@@ -321,20 +322,21 @@ class Compiler:
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict(omit_none=True))
|
||||
manifest.update_node(model)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
return model, prepended_ctes
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
|
||||
# Sets compiled fields in the ManifestSQLNode passed in,
|
||||
# Sets compiled_code and compiled flag in the ManifestSQLNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_node(
|
||||
def _compile_code(
|
||||
self,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
@@ -343,16 +345,6 @@ class Compiler:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
data = node.to_dict(omit_none=True)
|
||||
data.update(
|
||||
{
|
||||
"compiled": False,
|
||||
"compiled_code": None,
|
||||
"extra_ctes_injected": False,
|
||||
"extra_ctes": [],
|
||||
}
|
||||
)
|
||||
|
||||
if node.language == ModelLanguage.python:
|
||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
||||
|
||||
@@ -383,6 +375,18 @@ class Compiler:
|
||||
|
||||
node.compiled = True
|
||||
|
||||
# relation_name is set at parse time, except for tests without store_failures,
|
||||
# but cli param can turn on store_failures, so we set here.
|
||||
if (
|
||||
node.resource_type == NodeType.Test
|
||||
and node.relation_name is None
|
||||
and node.is_relational
|
||||
):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
node.relation_name = relation_name
|
||||
|
||||
return node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
@@ -401,8 +405,6 @@ class Compiler:
|
||||
linker.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
elif dependency in manifest.metrics:
|
||||
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
elif dependency in manifest.entities:
|
||||
linker.dependency(node.unique_id, (manifest.entities[dependency].unique_id))
|
||||
else:
|
||||
raise GraphDependencyNotFoundError(node, dependency)
|
||||
|
||||
@@ -415,8 +417,6 @@ class Compiler:
|
||||
self.link_node(linker, exposure, manifest)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(linker, metric, manifest)
|
||||
for entity in manifest.entities.values():
|
||||
self.link_node(linker, entity, manifest)
|
||||
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
@@ -517,11 +517,11 @@ class Compiler:
|
||||
) -> ManifestSQLNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
the node into a compiled node, and then calls the
|
||||
RunTask.get_hook_sql. It calls '_compile_code' to render
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
if write:
|
||||
|
||||
@@ -12,7 +12,6 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt import flags, deprecations
|
||||
@@ -30,7 +29,7 @@ from dbt.graph import SelectionSpec
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.semver import VersionSpecifier, versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.utils import MultiDict, md5
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.config.selectors import SelectorDict
|
||||
from dbt.contracts.project import (
|
||||
@@ -381,7 +380,6 @@ class PartialProject(RenderComponents):
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
@@ -392,7 +390,6 @@ class PartialProject(RenderComponents):
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
metrics = cfg.metrics
|
||||
entities = cfg.entities
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -448,7 +445,6 @@ class PartialProject(RenderComponents):
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
metrics=metrics,
|
||||
entities=entities,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
@@ -553,7 +549,6 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -628,7 +623,6 @@ class Project:
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -668,7 +662,7 @@ class Project:
|
||||
return partial.render(renderer)
|
||||
|
||||
def hashed_name(self):
|
||||
return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
|
||||
return md5(self.project_name)
|
||||
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
|
||||
@@ -117,7 +117,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -313,7 +312,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"entities": self._get_config_paths(self.entities),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
@@ -502,7 +500,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -565,7 +562,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
|
||||
@@ -45,8 +45,6 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = unrendered.get("entities")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
else:
|
||||
@@ -72,8 +70,6 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = self.project.entities
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
else:
|
||||
|
||||
@@ -33,7 +33,6 @@ from dbt.contracts.graph.nodes import (
|
||||
Macro,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
SeedNode,
|
||||
SourceDefinition,
|
||||
Resource,
|
||||
@@ -1505,44 +1504,6 @@ def generate_parse_metrics(
|
||||
}
|
||||
|
||||
|
||||
class EntityRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
package = None
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
raise RefArgsError(node=self.model, args=args)
|
||||
self.validate_args(name, package)
|
||||
self.model.refs.append(list(args))
|
||||
return ""
|
||||
|
||||
def validate_args(self, name, package):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingError(
|
||||
f"In the entity associated with {self.model.original_file_path} "
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
|
||||
|
||||
def generate_parse_entities(
|
||||
entity: Entity,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
project = config.load_dependencies()[package_name]
|
||||
return {
|
||||
"ref": EntityRefResolver(
|
||||
None,
|
||||
entity,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# This class is currently used by the schema parser in order
|
||||
# to limit the number of macros in the context by using
|
||||
# the TestMacroNamespace
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import abc
|
||||
import itertools
|
||||
import hashlib
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -13,7 +12,7 @@ from typing import (
|
||||
Callable,
|
||||
)
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.utils import translate_aliases
|
||||
from dbt.utils import translate_aliases, md5
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NewConnectionOpening
|
||||
from dbt.events.contextvars import get_node_info
|
||||
@@ -142,7 +141,7 @@ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
||||
raise NotImplementedError("unique_field not implemented for base credentials class")
|
||||
|
||||
def hashed_unique_field(self) -> str:
|
||||
return hashlib.md5(self.unique_field.encode("utf-8")).hexdigest()
|
||||
return md5(self.unique_field)
|
||||
|
||||
def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]:
|
||||
"""Return an ordered iterator of key/value pairs for pretty-printing."""
|
||||
|
||||
@@ -227,7 +227,6 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
entities: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
|
||||
@@ -29,7 +29,6 @@ from dbt.contracts.graph.nodes import (
|
||||
GenericTestNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
UnpatchedSourceDefinition,
|
||||
ManifestNode,
|
||||
GraphMemberNode,
|
||||
@@ -213,39 +212,6 @@ class MetricLookup(dbtClassMixin):
|
||||
return manifest.metrics[unique_id]
|
||||
|
||||
|
||||
class EntityLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, search_name, package)
|
||||
|
||||
def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"):
|
||||
unique_id = self.get_unique_id(search_name, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_entity(self, entity: Entity):
|
||||
if entity.search_name not in self.storage:
|
||||
self.storage[entity.search_name] = {}
|
||||
|
||||
self.storage[entity.search_name][entity.package_name] = entity.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for entity in manifest.entities.values():
|
||||
if hasattr(entity, "name"):
|
||||
self.add_entity(entity)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> Entity:
|
||||
if unique_id not in manifest.entities:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Entity {unique_id} found in cache but not found in manifest"
|
||||
)
|
||||
return manifest.entities[unique_id]
|
||||
|
||||
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
@@ -490,9 +456,6 @@ class Disabled(Generic[D]):
|
||||
MaybeMetricNode = Optional[Union[Metric, Disabled[Metric]]]
|
||||
|
||||
|
||||
MaybeEntityNode = Optional[Union[Entity, Disabled[Entity]]]
|
||||
|
||||
|
||||
MaybeDocumentation = Optional[Documentation]
|
||||
|
||||
|
||||
@@ -636,7 +599,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs: MutableMapping[str, Documentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, Exposure] = field(default_factory=dict)
|
||||
metrics: MutableMapping[str, Metric] = field(default_factory=dict)
|
||||
entities: MutableMapping[str, Entity] = field(default_factory=dict)
|
||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
@@ -658,9 +620,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_metric_lookup: Optional[MetricLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_entity_lookup: Optional[EntityLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_disabled_lookup: Optional[DisabledLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
@@ -687,33 +646,12 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
obj._lock = flags.MP_CONTEXT.Lock()
|
||||
return obj
|
||||
|
||||
def sync_update_node(self, new_node: ManifestNode) -> ManifestNode:
|
||||
"""update the node with a lock. The only time we should want to lock is
|
||||
when compiling an ephemeral ancestor of a node at runtime, because
|
||||
multiple threads could be just-in-time compiling the same ephemeral
|
||||
dependency, and we want them to have a consistent view of the manifest.
|
||||
|
||||
If the existing node is not compiled, update it with the new node and
|
||||
return that. If the existing node is compiled, do not update the
|
||||
manifest and return the existing node.
|
||||
"""
|
||||
with self._lock:
|
||||
existing = self.nodes[new_node.unique_id]
|
||||
if getattr(existing, "compiled", False):
|
||||
# already compiled
|
||||
return existing
|
||||
_update_into(self.nodes, new_node)
|
||||
return new_node
|
||||
|
||||
def update_exposure(self, new_exposure: Exposure):
|
||||
_update_into(self.exposures, new_exposure)
|
||||
|
||||
def update_metric(self, new_metric: Metric):
|
||||
_update_into(self.metrics, new_metric)
|
||||
|
||||
def update_entity(self, new_entity: Entity):
|
||||
_update_into(self.entities, new_entity)
|
||||
|
||||
def update_node(self, new_node: ManifestNode):
|
||||
_update_into(self.nodes, new_node)
|
||||
|
||||
@@ -729,7 +667,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.flat_graph = {
|
||||
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
||||
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
||||
"entities": {k: v.to_dict(omit_none=False) for k, v in self.entities.items()},
|
||||
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
||||
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
||||
}
|
||||
@@ -792,7 +729,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.nodes.values(),
|
||||
self.sources.values(),
|
||||
self.metrics.values(),
|
||||
self.entities.values(),
|
||||
)
|
||||
for resource in all_resources:
|
||||
resource_type_plural = resource.resource_type.pluralize()
|
||||
@@ -821,7 +757,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
||||
entities={k: _deepcopy(v) for k, v in self.entities.items()},
|
||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||
metadata=self.metadata,
|
||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||
@@ -838,7 +773,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.sources.values(),
|
||||
self.exposures.values(),
|
||||
self.metrics.values(),
|
||||
self.entities.values(),
|
||||
)
|
||||
)
|
||||
forward_edges, backward_edges = build_node_edges(edge_members)
|
||||
@@ -864,7 +798,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs=self.docs,
|
||||
exposures=self.exposures,
|
||||
metrics=self.metrics,
|
||||
entities=self.entities,
|
||||
selectors=self.selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=self.disabled,
|
||||
@@ -886,8 +819,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return self.exposures[unique_id]
|
||||
elif unique_id in self.metrics:
|
||||
return self.metrics[unique_id]
|
||||
elif unique_id in self.entities:
|
||||
return self.entities[unique_id]
|
||||
else:
|
||||
# something terrible has happened
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
@@ -924,12 +855,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._metric_lookup = MetricLookup(self)
|
||||
return self._metric_lookup
|
||||
|
||||
@property
|
||||
def entity_lookup(self) -> EntityLookup:
|
||||
if self._entity_lookup is None:
|
||||
self._entity_lookup = EntityLookup(self)
|
||||
return self._entity_lookup
|
||||
|
||||
def rebuild_ref_lookup(self):
|
||||
self._ref_lookup = RefableLookup(self)
|
||||
|
||||
@@ -1030,31 +955,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
def resolve_entity(
|
||||
self,
|
||||
target_entity_name: str,
|
||||
target_entity_package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeEntityNode:
|
||||
|
||||
entity: Optional[Entity] = None
|
||||
disabled: Optional[List[Entity]] = None
|
||||
|
||||
candidates = _search_packages(current_project, node_package, target_entity_package)
|
||||
for pkg in candidates:
|
||||
entity = self.entity_lookup.find(target_entity_name, pkg, self)
|
||||
|
||||
if entity is not None and entity.config.enabled:
|
||||
return entity
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.disabled_lookup.find(f"{target_entity_name}", pkg)
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
# Called by DocsRuntimeContext.doc
|
||||
def resolve_doc(
|
||||
self,
|
||||
@@ -1165,11 +1065,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
|
||||
def add_entity(self, source_file: SchemaSourceFile, entity: Entity):
|
||||
_check_duplicates(entity, self.entities)
|
||||
self.entities[entity.unique_id] = entity
|
||||
source_file.entities.append(entity.unique_id)
|
||||
|
||||
def add_disabled_nofile(self, node: GraphMemberNode):
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
if node.unique_id in self.disabled:
|
||||
@@ -1185,8 +1080,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, Metric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, Entity):
|
||||
source_file.entities.append(node.unique_id)
|
||||
if isinstance(node, Exposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
@@ -1214,7 +1107,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.docs,
|
||||
self.exposures,
|
||||
self.metrics,
|
||||
self.entities,
|
||||
self.selectors,
|
||||
self.files,
|
||||
self.metadata,
|
||||
@@ -1227,7 +1119,6 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._source_lookup,
|
||||
self._ref_lookup,
|
||||
self._metric_lookup,
|
||||
self._entity_lookup,
|
||||
self._disabled_lookup,
|
||||
self._analysis_lookup,
|
||||
)
|
||||
@@ -1269,13 +1160,10 @@ class WritableManifest(ArtifactMixin):
|
||||
metrics: Mapping[UniqueID, Metric] = field(
|
||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||
)
|
||||
entities: Mapping[UniqueID, Entity] = field(
|
||||
metadata=dict(description=("The entities defined in the dbt project and its dependencies"))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||
)
|
||||
disabled: Optional[Mapping[UniqueID, List[ResultNode]]] = field(
|
||||
disabled: Optional[Mapping[UniqueID, List[GraphMemberNode]]] = field(
|
||||
metadata=dict(description="A mapping of the disabled nodes in the target")
|
||||
)
|
||||
parent_map: Optional[NodeEdgeMap] = field(
|
||||
@@ -1296,8 +1184,7 @@ class WritableManifest(ArtifactMixin):
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(self):
|
||||
# return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||
return []
|
||||
return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
for unique_id, node in dct["nodes"].items():
|
||||
|
||||
@@ -368,11 +368,6 @@ class MetricConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class EntityConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExposureConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
@@ -609,7 +604,6 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
|
||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||
NodeType.Metric: MetricConfig,
|
||||
NodeType.Entity: EntityConfig,
|
||||
NodeType.Exposure: ExposureConfig,
|
||||
NodeType.Source: SourceConfig,
|
||||
NodeType.Seed: SeedConfig,
|
||||
|
||||
@@ -37,6 +37,7 @@ from dbt.contracts.graph.unparsed import (
|
||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||
from dbt.events.proto_types import NodeInfo
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.exceptions import ParsingError
|
||||
from dbt.events.types import (
|
||||
SeedIncreased,
|
||||
SeedExceedsLimitSamePath,
|
||||
@@ -55,7 +56,6 @@ from .model_config import (
|
||||
TestConfig,
|
||||
SourceConfig,
|
||||
MetricConfig,
|
||||
EntityConfig,
|
||||
ExposureConfig,
|
||||
EmptySnapshotConfig,
|
||||
SnapshotConfig,
|
||||
@@ -273,7 +273,7 @@ class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType):
|
||||
@classmethod
|
||||
def _deserialize(cls, dct: Dict[str, int]):
|
||||
# The serialized ParsedNodes do not differ from each other
|
||||
# in fields that would allow 'from_dict' to distinguish
|
||||
# in fields that would allow 'from_dict' to distinguis
|
||||
# between them.
|
||||
resource_type = dct["resource_type"]
|
||||
if resource_type == "model":
|
||||
@@ -393,7 +393,6 @@ class CompiledNode(ParsedNode):
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
compiled_path: Optional[str] = None
|
||||
compiled: bool = False
|
||||
@@ -411,8 +410,10 @@ class CompiledNode(ParsedNode):
|
||||
do if extra_ctes were an OrderedDict
|
||||
"""
|
||||
for cte in self.extra_ctes:
|
||||
# Because it's possible that multiple threads are compiling the
|
||||
# node at the same time, we don't want to overwrite already compiled
|
||||
# sql in the extra_ctes with empty sql.
|
||||
if cte.id == cte_id:
|
||||
cte.sql = sql
|
||||
break
|
||||
else:
|
||||
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
|
||||
@@ -484,6 +485,7 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
# seeds need the root_path because the contents are not loaded initially
|
||||
# and we need the root_path to load the seed later
|
||||
root_path: Optional[str] = None
|
||||
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
|
||||
|
||||
def same_seeds(self, other: "SeedNode") -> bool:
|
||||
# for seeds, we check the hashes. If the hashes are different types,
|
||||
@@ -525,6 +527,39 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
"""Seeds are never empty"""
|
||||
return False
|
||||
|
||||
def _disallow_implicit_dependencies(self):
|
||||
"""Disallow seeds to take implicit upstream dependencies via pre/post hooks"""
|
||||
# Seeds are root nodes in the DAG. They cannot depend on other nodes.
|
||||
# However, it's possible to define pre- and post-hooks on seeds, and for those
|
||||
# hooks to include {{ ref(...) }}. This worked in previous versions, but it
|
||||
# was never officially documented or supported behavior. Let's raise an explicit error,
|
||||
# which will surface during parsing if the user has written code such that we attempt
|
||||
# to capture & record a ref/source/metric call on the SeedNode.
|
||||
# For more details: https://github.com/dbt-labs/dbt-core/issues/6806
|
||||
hooks = [f'- pre_hook: "{hook.sql}"' for hook in self.config.pre_hook] + [
|
||||
f'- post_hook: "{hook.sql}"' for hook in self.config.post_hook
|
||||
]
|
||||
hook_list = "\n".join(hooks)
|
||||
message = f"""
|
||||
Seeds cannot depend on other nodes. dbt detected a seed with a pre- or post-hook
|
||||
that calls 'ref', 'source', or 'metric', either directly or indirectly via other macros.
|
||||
|
||||
Error raised for '{self.unique_id}', which has these hooks defined: \n{hook_list}
|
||||
"""
|
||||
raise ParsingError(message)
|
||||
|
||||
@property
|
||||
def refs(self):
|
||||
self._disallow_implicit_dependencies()
|
||||
|
||||
@property
|
||||
def sources(self):
|
||||
self._disallow_implicit_dependencies()
|
||||
|
||||
@property
|
||||
def metrics(self):
|
||||
self._disallow_implicit_dependencies()
|
||||
|
||||
def same_body(self, other) -> bool:
|
||||
return self.same_seeds(other)
|
||||
|
||||
@@ -533,8 +568,8 @@ class SeedNode(ParsedNode): # No SQLDefaults!
|
||||
return []
|
||||
|
||||
@property
|
||||
def depends_on_macros(self):
|
||||
return []
|
||||
def depends_on_macros(self) -> List[str]:
|
||||
return self.depends_on.macros
|
||||
|
||||
@property
|
||||
def extra_ctes(self):
|
||||
@@ -908,7 +943,6 @@ class Exposure(GraphNode):
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
@@ -1000,7 +1034,6 @@ class Metric(GraphNode):
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
@@ -1069,63 +1102,6 @@ class Metric(GraphNode):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Entity(GraphNode):
|
||||
name: str
|
||||
model: str
|
||||
description: str
|
||||
dimensions: List[str]
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Entity]})
|
||||
model_unique_id: Optional[str] = None
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: EntityConfig = field(default_factory=EntityConfig)
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return self.depends_on.nodes
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return self.name
|
||||
|
||||
def same_model(self, old: "Entity") -> bool:
|
||||
return self.model == old.model
|
||||
|
||||
def same_dimensions(self, old: "Entity") -> bool:
|
||||
return self.dimensions == old.dimensions
|
||||
|
||||
def same_description(self, old: "Entity") -> bool:
|
||||
return self.description == old.description
|
||||
|
||||
def same_config(self, old: "Entity") -> bool:
|
||||
return self.config.same_contents(
|
||||
self.unrendered_config,
|
||||
old.unrendered_config,
|
||||
)
|
||||
|
||||
def same_contents(self, old: Optional["Entity"]) -> bool:
|
||||
# existing when it didn't before is a change!
|
||||
# metadata/tags changes are not "changes"
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
self.same_model(old)
|
||||
and self.same_dimensions(old)
|
||||
and self.same_description(old)
|
||||
and self.same_config(old)
|
||||
and True
|
||||
)
|
||||
|
||||
|
||||
# ====================================
|
||||
# Patches
|
||||
# ====================================
|
||||
@@ -1187,7 +1163,6 @@ GraphMemberNode = Union[
|
||||
ResultNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
]
|
||||
|
||||
# All "nodes" (or node-like objects) in this file
|
||||
|
||||
@@ -533,21 +533,3 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
|
||||
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
||||
raise ValidationError("Derived metrics cannot have a 'model' property")
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnparsedEntity(dbtClassMixin, Replaceable):
|
||||
"""This class is used for entity information"""
|
||||
|
||||
name: str
|
||||
model: str
|
||||
description: str = ""
|
||||
dimensions: List[str] = field(default_factory=list)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super(UnparsedEntity, cls).validate(data)
|
||||
# TODO: Add validation here around include/exclude and others
|
||||
|
||||
@@ -214,7 +214,6 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
sources: Dict[str, Any] = field(default_factory=dict)
|
||||
tests: Dict[str, Any] = field(default_factory=dict)
|
||||
metrics: Dict[str, Any] = field(default_factory=dict)
|
||||
entities: Dict[str, Any] = field(default_factory=dict)
|
||||
exposures: Dict[str, Any] = field(default_factory=dict)
|
||||
vars: Optional[Dict[str, Any]] = field(
|
||||
default=None,
|
||||
|
||||
@@ -13,7 +13,7 @@ from dbt.events.types import TimingInfoCollected
|
||||
from dbt.events.proto_types import RunResultMsg, TimingInfoMsg
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.logger import TimingProcessor
|
||||
from dbt.utils import lowercase, cast_to_str, cast_to_int
|
||||
from dbt.utils import lowercase, cast_to_str, cast_to_int, cast_dict_to_dict_of_strings
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
import agate
|
||||
@@ -130,7 +130,6 @@ class BaseResult(dbtClassMixin):
|
||||
return data
|
||||
|
||||
def to_msg(self):
|
||||
# TODO: add more fields
|
||||
msg = RunResultMsg()
|
||||
msg.status = str(self.status)
|
||||
msg.message = cast_to_str(self.message)
|
||||
@@ -138,7 +137,7 @@ class BaseResult(dbtClassMixin):
|
||||
msg.execution_time = self.execution_time
|
||||
msg.num_failures = cast_to_int(self.failures)
|
||||
msg.timing_info = [ti.to_msg() for ti in self.timing]
|
||||
# adapter_response
|
||||
msg.adapter_response = cast_dict_to_dict_of_strings(self.adapter_response)
|
||||
return msg
|
||||
|
||||
|
||||
|
||||
@@ -250,7 +250,6 @@ def upgrade_seed_content(node_content):
|
||||
"refs",
|
||||
"sources",
|
||||
"metrics",
|
||||
"depends_on",
|
||||
"compiled_path",
|
||||
"compiled",
|
||||
"compiled_code",
|
||||
@@ -260,6 +259,8 @@ def upgrade_seed_content(node_content):
|
||||
):
|
||||
if attr_name in node_content:
|
||||
del node_content[attr_name]
|
||||
# In v1.4, we switched SeedNode.depends_on from DependsOn to MacroDependsOn
|
||||
node_content.get("depends_on", {}).pop("nodes", None)
|
||||
|
||||
|
||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
@@ -283,7 +284,7 @@ def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
if "root_path" in exposure_content:
|
||||
del exposure_content["root_path"]
|
||||
for source_content in manifest.get("sources", {}).values():
|
||||
if "root_path" in exposure_content:
|
||||
if "root_path" in source_content:
|
||||
del source_content["root_path"]
|
||||
for macro_content in manifest.get("macros", {}).values():
|
||||
if "root_path" in macro_content:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import hashlib
|
||||
from typing import List, Optional
|
||||
|
||||
from dbt.clients import git, system
|
||||
@@ -12,10 +11,11 @@ from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||
from dbt.exceptions import ExecutableError, MultipleVersionGitDepsError
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import EnsureGitInstalled, DepsUnpinned
|
||||
from dbt.utils import md5
|
||||
|
||||
|
||||
def md5sum(s: str):
|
||||
return hashlib.md5(s.encode("latin-1")).hexdigest()
|
||||
return md5(s, "latin-1")
|
||||
|
||||
|
||||
class GitPackageMixin:
|
||||
|
||||
@@ -88,41 +88,40 @@ class _Logger:
|
||||
self.level: EventLevel = config.level
|
||||
self.event_manager: EventManager = event_manager
|
||||
self._python_logger: Optional[logging.Logger] = config.logger
|
||||
self._stream: Optional[TextIO] = config.output_stream
|
||||
|
||||
if config.output_stream is not None:
|
||||
stream_handler = logging.StreamHandler(config.output_stream)
|
||||
self._python_logger = self._get_python_log_for_handler(stream_handler)
|
||||
|
||||
if config.output_file_name:
|
||||
log = logging.getLogger(config.name)
|
||||
log.setLevel(_log_level_map[config.level])
|
||||
handler = RotatingFileHandler(
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
backupCount=5,
|
||||
)
|
||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||
|
||||
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
log.handlers.clear()
|
||||
log.addHandler(handler)
|
||||
|
||||
self._python_logger = log
|
||||
def _get_python_log_for_handler(self, handler: logging.Handler):
|
||||
log = logging.getLogger(self.name)
|
||||
log.setLevel(_log_level_map[self.level])
|
||||
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
log.handlers.clear()
|
||||
log.addHandler(handler)
|
||||
return log
|
||||
|
||||
def create_line(self, msg: EventMsg) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
def write_line(self, msg: EventMsg):
|
||||
line = self.create_line(msg)
|
||||
python_level = _log_level_map[EventLevel(msg.info.level)]
|
||||
if self._python_logger is not None:
|
||||
send_to_logger(self._python_logger, msg.info.level, line)
|
||||
elif self._stream is not None and _log_level_map[self.level] <= python_level:
|
||||
self._stream.write(line + "\n")
|
||||
|
||||
def flush(self):
|
||||
if self._python_logger is not None:
|
||||
for handler in self._python_logger.handlers:
|
||||
handler.flush()
|
||||
elif self._stream is not None:
|
||||
self._stream.flush()
|
||||
|
||||
|
||||
class _TextLogger(_Logger):
|
||||
|
||||
@@ -18,6 +18,14 @@ LOG_VERSION = 3
|
||||
metadata_vars: Optional[Dict[str, str]] = None
|
||||
|
||||
|
||||
# The "fallback" logger is used as a stop-gap so that console logging works before the logging
|
||||
# configuration is fully loaded.
|
||||
def setup_fallback_logger(use_legacy: bool, level: EventLevel) -> None:
|
||||
cleanup_event_logger()
|
||||
config = _get_logbook_log_config(level) if use_legacy else _get_stdout_config(level)
|
||||
EVENT_MANAGER.add_logger(config)
|
||||
|
||||
|
||||
def setup_event_logger(log_path: str, level_override: Optional[EventLevel] = None):
|
||||
cleanup_event_logger()
|
||||
make_log_dir_if_missing(log_path)
|
||||
@@ -94,6 +102,7 @@ def _get_logbook_log_config(level: Optional[EventLevel] = None) -> LoggerConfig:
|
||||
config.name = "logbook_log"
|
||||
config.filter = NoFilter if flags.LOG_CACHE_EVENTS else lambda e: not isinstance(e.data, Cache)
|
||||
config.logger = GLOBAL_LOGGER
|
||||
config.output_stream = None
|
||||
return config
|
||||
|
||||
|
||||
@@ -113,9 +122,7 @@ def cleanup_event_logger():
|
||||
# currently fire before logs can be configured by setup_event_logger(), we
|
||||
# create a default configuration with default settings and no file output.
|
||||
EVENT_MANAGER: EventManager = EventManager()
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logbook_log_config() if flags.ENABLE_LEGACY_LOGGER else _get_stdout_config()
|
||||
)
|
||||
setup_fallback_logger(bool(flags.ENABLE_LEGACY_LOGGER), EventLevel.INFO)
|
||||
|
||||
|
||||
# This global, and the following two functions for capturing stdout logs are
|
||||
|
||||
@@ -531,7 +531,6 @@ class ConnectionReused(betterproto.Message):
|
||||
"""E006"""
|
||||
|
||||
conn_name: str = betterproto.string_field(1)
|
||||
orig_conn_name: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -1055,6 +1054,23 @@ class UnableToPartialParseMsg(betterproto.Message):
|
||||
data: "UnableToPartialParse" = betterproto.message_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StateCheckVarsHash(betterproto.Message):
|
||||
"""I025"""
|
||||
|
||||
checksum: str = betterproto.string_field(1)
|
||||
vars: str = betterproto.string_field(2)
|
||||
profile: str = betterproto.string_field(3)
|
||||
target: str = betterproto.string_field(4)
|
||||
version: str = betterproto.string_field(5)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StateCheckVarsHashMsg(betterproto.Message):
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
data: "StateCheckVarsHash" = betterproto.message_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PartialParsingNotEnabled(betterproto.Message):
|
||||
"""I028"""
|
||||
@@ -2847,6 +2863,19 @@ class RunResultWarningMessageMsg(betterproto.Message):
|
||||
data: "RunResultWarningMessage" = betterproto.message_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Note(betterproto.Message):
|
||||
"""Z050"""
|
||||
|
||||
msg: str = betterproto.string_field(1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NoteMsg(betterproto.Message):
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
data: "Note" = betterproto.message_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IntegrationTestInfo(betterproto.Message):
|
||||
"""T001"""
|
||||
|
||||
@@ -419,7 +419,6 @@ message NewConnectionMsg {
|
||||
// E006
|
||||
message ConnectionReused {
|
||||
string conn_name = 1;
|
||||
string orig_conn_name = 2;
|
||||
}
|
||||
|
||||
message ConnectionReusedMsg {
|
||||
@@ -839,7 +838,21 @@ message UnableToPartialParseMsg {
|
||||
UnableToPartialParse data = 2;
|
||||
}
|
||||
|
||||
// Skipped I025, I026, I027
|
||||
// I025
|
||||
message StateCheckVarsHash {
|
||||
string checksum = 1;
|
||||
string vars = 2;
|
||||
string profile = 3;
|
||||
string target = 4;
|
||||
string version = 5;
|
||||
}
|
||||
|
||||
message StateCheckVarsHashMsg {
|
||||
EventInfo info = 1;
|
||||
StateCheckVarsHash data = 2;
|
||||
}
|
||||
|
||||
// Skipped I026, I027
|
||||
|
||||
|
||||
// I028
|
||||
@@ -2258,6 +2271,16 @@ message RunResultWarningMessageMsg {
|
||||
RunResultWarningMessage data = 2;
|
||||
}
|
||||
|
||||
// Z050
|
||||
message Note {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message NoteMsg {
|
||||
EventInfo info = 1;
|
||||
Note data = 2;
|
||||
}
|
||||
|
||||
// T - Integration tests
|
||||
|
||||
// T001
|
||||
|
||||
@@ -449,7 +449,7 @@ class ConnectionReused(DebugLevel, pt.ConnectionReused):
|
||||
return "E006"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Re-using an available connection from the pool (formerly {self.orig_conn_name}, now {self.conn_name})"
|
||||
return f"Re-using an available connection from the pool (formerly {self.conn_name})"
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -843,6 +843,15 @@ class UnableToPartialParse(InfoLevel, pt.UnableToPartialParse):
|
||||
return f"Unable to do partial parsing because {self.reason}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class StateCheckVarsHash(DebugLevel, pt.StateCheckVarsHash):
|
||||
def code(self):
|
||||
return "I025"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"checksum: {self.checksum}, vars: {self.vars}, profile: {self.profile}, target: {self.target}, version: {self.version}"
|
||||
|
||||
|
||||
# Skipped I025, I026, I026, I027
|
||||
|
||||
|
||||
@@ -2345,3 +2354,14 @@ class RunResultWarningMessage(WarnLevel, EventStringFunctor, pt.RunResultWarning
|
||||
def message(self) -> str:
|
||||
# This is the message on the result object, cannot be formatted in event
|
||||
return self.msg
|
||||
|
||||
|
||||
# The Note event provides a way to log messages which aren't likely to be useful as more structured events.
|
||||
# For conslole formatting text like empty lines and separator bars, use the Formatting event instead.
|
||||
@dataclass
|
||||
class Note(InfoLevel, pt.Note):
|
||||
def code(self):
|
||||
return "Z050"
|
||||
|
||||
def message(self) -> str:
|
||||
return self.msg
|
||||
|
||||
@@ -20,7 +20,7 @@ from .selector_spec import (
|
||||
|
||||
INTERSECTION_DELIMITER = ","
|
||||
|
||||
DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*", "entity:*"]
|
||||
DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*"]
|
||||
DEFAULT_EXCLUDES: List[str] = []
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ from dbt.contracts.graph.nodes import (
|
||||
SourceDefinition,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
GraphMemberNode,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
@@ -52,8 +51,8 @@ class GraphQueue:
|
||||
node = self.manifest.expect(node_id)
|
||||
if node.resource_type != NodeType.Model:
|
||||
return False
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure or Metric or Entity
|
||||
assert not isinstance(node, (SourceDefinition, Exposure, Metric, Entity))
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure or Metric
|
||||
assert not isinstance(node, (SourceDefinition, Exposure, Metric))
|
||||
if node.is_ephemeral:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -163,9 +163,6 @@ class NodeSelector(MethodManager):
|
||||
elif unique_id in self.manifest.metrics:
|
||||
metric = self.manifest.metrics[unique_id]
|
||||
return metric.config.enabled
|
||||
elif unique_id in self.manifest.entities:
|
||||
entity = self.manifest.entities[unique_id]
|
||||
return entity.config.enabled
|
||||
node = self.manifest.nodes[unique_id]
|
||||
return not node.empty and node.config.enabled
|
||||
|
||||
@@ -185,8 +182,6 @@ class NodeSelector(MethodManager):
|
||||
node = self.manifest.exposures[unique_id]
|
||||
elif unique_id in self.manifest.metrics:
|
||||
node = self.manifest.metrics[unique_id]
|
||||
elif unique_id in self.manifest.entities:
|
||||
node = self.manifest.entities[unique_id]
|
||||
else:
|
||||
raise DbtInternalError(f"Node {unique_id} not found in the manifest!")
|
||||
return self.node_is_match(node)
|
||||
|
||||
@@ -12,7 +12,6 @@ from dbt.contracts.graph.nodes import (
|
||||
SingularTestNode,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
GenericTestNode,
|
||||
SourceDefinition,
|
||||
ResultNode,
|
||||
@@ -44,7 +43,6 @@ class MethodName(StrEnum):
|
||||
State = "state"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Entity = "entity"
|
||||
Result = "result"
|
||||
SourceStatus = "source_status"
|
||||
|
||||
@@ -73,7 +71,7 @@ def is_selected_node(fqn: List[str], node_selector: str):
|
||||
return True
|
||||
|
||||
|
||||
SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric, Entity]
|
||||
SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric]
|
||||
|
||||
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
@@ -120,14 +118,6 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
continue
|
||||
yield unique_id, metric
|
||||
|
||||
def entity_nodes(self, included_nodes: Set[UniqueId]) -> Iterator[Tuple[UniqueId, Entity]]:
|
||||
|
||||
for key, metric in self.manifest.entities.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, metric
|
||||
|
||||
def all_nodes(
|
||||
self, included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, SelectorTarget]]:
|
||||
@@ -136,7 +126,6 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
self.source_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes),
|
||||
self.metric_nodes(included_nodes),
|
||||
self.entity_nodes(included_nodes),
|
||||
)
|
||||
|
||||
def configurable_nodes(
|
||||
@@ -147,12 +136,11 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def non_source_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
) -> Iterator[Tuple[UniqueId, Union[Exposure, ManifestNode, Metric, Entity]]]:
|
||||
) -> Iterator[Tuple[UniqueId, Union[Exposure, ManifestNode, Metric]]]:
|
||||
yield from chain(
|
||||
self.parsed_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes),
|
||||
self.metric_nodes(included_nodes),
|
||||
self.entity_nodes(included_nodes),
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -282,33 +270,6 @@ class MetricSelectorMethod(SelectorMethod):
|
||||
yield node
|
||||
|
||||
|
||||
class EntitySelectorMethod(SelectorMethod):
|
||||
"""TODO: Add a description of what this selector method is doing"""
|
||||
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
parts = selector.split(".")
|
||||
target_package = SELECTOR_GLOB
|
||||
if len(parts) == 1:
|
||||
target_name = parts[0]
|
||||
elif len(parts) == 2:
|
||||
target_package, target_name = parts
|
||||
else:
|
||||
msg = (
|
||||
'Invalid entity selector value "{}". Entities must be of '
|
||||
"the form ${{entity_name}} or "
|
||||
"${{entity_package.entity_name}}"
|
||||
).format(selector)
|
||||
raise DbtRuntimeError(msg)
|
||||
|
||||
for node, real_node in self.entity_nodes(included_nodes):
|
||||
if target_package not in (real_node.package_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_name not in (real_node.name, SELECTOR_GLOB):
|
||||
continue
|
||||
|
||||
yield node
|
||||
|
||||
|
||||
class PathSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
"""Yields nodes from included that match the given path."""
|
||||
@@ -569,8 +530,6 @@ class StateSelectorMethod(SelectorMethod):
|
||||
previous_node = manifest.exposures[node]
|
||||
elif node in manifest.metrics:
|
||||
previous_node = manifest.metrics[node]
|
||||
elif node in manifest.entities:
|
||||
previous_node = manifest.entities[node]
|
||||
|
||||
if checker(previous_node, real_node):
|
||||
yield node
|
||||
@@ -657,7 +616,6 @@ class MethodManager:
|
||||
MethodName.State: StateSelectorMethod,
|
||||
MethodName.Exposure: ExposureSelectorMethod,
|
||||
MethodName.Metric: MetricSelectorMethod,
|
||||
MethodName.Entity: EntitySelectorMethod,
|
||||
MethodName.Result: ResultSelectorMethod,
|
||||
MethodName.SourceStatus: SourceStatusSelectorMethod,
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
|
||||
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}
|
||||
-- back compat for old kwarg name
|
||||
{% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}
|
||||
{{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
|
||||
{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}
|
||||
{%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}
|
||||
{%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%}
|
||||
{%- set merge_update_columns = config.get('merge_update_columns') -%}
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
{% macro resolve_model_name(input_model_name) %}
|
||||
{{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro default__resolve_model_name(input_model_name) -%}
|
||||
{{ input_model_name | string | replace('"', '\"') }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro build_ref_function(model) %}
|
||||
|
||||
{%- set ref_dict = {} -%}
|
||||
{%- for _ref in model.refs -%}
|
||||
{%- set resolved = ref(*_ref) -%}
|
||||
{%- do ref_dict.update({_ref | join("."): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}
|
||||
{%- do ref_dict.update({_ref | join('.'): resolve_model_name(resolved)}) -%}
|
||||
{%- endfor -%}
|
||||
|
||||
def ref(*args,dbt_load_df_function):
|
||||
refs = {{ ref_dict | tojson }}
|
||||
key = ".".join(args)
|
||||
key = '.'.join(args)
|
||||
return dbt_load_df_function(refs[key])
|
||||
|
||||
{% endmacro %}
|
||||
@@ -18,12 +26,12 @@ def ref(*args,dbt_load_df_function):
|
||||
{%- set source_dict = {} -%}
|
||||
{%- for _source in model.sources -%}
|
||||
{%- set resolved = source(*_source) -%}
|
||||
{%- do source_dict.update({_source | join("."): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}
|
||||
{%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}
|
||||
{%- endfor -%}
|
||||
|
||||
def source(*args, dbt_load_df_function):
|
||||
sources = {{ source_dict | tojson }}
|
||||
key = ".".join(args)
|
||||
key = '.'.join(args)
|
||||
return dbt_load_df_function(sources[key])
|
||||
|
||||
{% endmacro %}
|
||||
@@ -33,8 +41,8 @@ def source(*args, dbt_load_df_function):
|
||||
{% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}
|
||||
{%- for key, default in config_dbt_used -%}
|
||||
{# weird type testing with enum, would be much easier to write this logic in Python! #}
|
||||
{%- if key == 'language' -%}
|
||||
{%- set value = 'python' -%}
|
||||
{%- if key == "language" -%}
|
||||
{%- set value = "python" -%}
|
||||
{%- endif -%}
|
||||
{%- set value = model.config.get(key, default) -%}
|
||||
{%- do config_dict.update({key: value}) -%}
|
||||
@@ -62,11 +70,12 @@ class config:
|
||||
|
||||
class this:
|
||||
"""dbt.this() or dbt.this.identifier"""
|
||||
database = '{{ this.database }}'
|
||||
schema = '{{ this.schema }}'
|
||||
identifier = '{{ this.identifier }}'
|
||||
database = "{{ this.database }}"
|
||||
schema = "{{ this.schema }}"
|
||||
identifier = "{{ this.identifier }}"
|
||||
{% set this_relation_name = resolve_model_name(this) %}
|
||||
def __repr__(self):
|
||||
return '{{ this }}'
|
||||
return '{{ this_relation_name }}'
|
||||
|
||||
|
||||
class dbtObj:
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -11,8 +11,9 @@ from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import dbt.version
|
||||
from dbt.events.functions import fire_event, setup_event_logger, LOG_VERSION
|
||||
from dbt.events.functions import fire_event, setup_event_logger, setup_fallback_logger, LOG_VERSION
|
||||
from dbt.events.types import (
|
||||
EventLevel,
|
||||
MainEncounteredError,
|
||||
MainKeyboardInterrupt,
|
||||
MainReportVersion,
|
||||
@@ -178,6 +179,13 @@ def handle_and_check(args):
|
||||
# Set flags from args, user config, and env vars
|
||||
user_config = read_user_config(flags.PROFILES_DIR) # This is read again later
|
||||
flags.set_from_args(parsed, user_config)
|
||||
|
||||
# If the user has asked to supress non-error logging on the cli, we want to respect that as soon as possible,
|
||||
# so that any non-error logging done before full log config is loaded and ready is filtered accordingly.
|
||||
setup_fallback_logger(
|
||||
bool(flags.ENABLE_LEGACY_LOGGER), EventLevel.ERROR if flags.QUIET else EventLevel.INFO
|
||||
)
|
||||
|
||||
dbt.tracking.initialize_from_flags()
|
||||
# Set log_format from flags
|
||||
parsed.cls.set_log_format()
|
||||
@@ -486,7 +494,7 @@ def _build_snapshot_subparser(subparsers, base_subparser):
|
||||
return sub
|
||||
|
||||
|
||||
def _add_defer_argument(*subparsers):
|
||||
def _add_defer_arguments(*subparsers):
|
||||
for sub in subparsers:
|
||||
sub.add_optional_argument_inverse(
|
||||
"--defer",
|
||||
@@ -499,10 +507,6 @@ def _add_defer_argument(*subparsers):
|
||||
""",
|
||||
default=flags.DEFER_MODE,
|
||||
)
|
||||
|
||||
|
||||
def _add_favor_state_argument(*subparsers):
|
||||
for sub in subparsers:
|
||||
sub.add_optional_argument_inverse(
|
||||
"--favor-state",
|
||||
enable_help="""
|
||||
@@ -580,7 +584,7 @@ def _build_docs_generate_subparser(subparsers, base_subparser):
|
||||
Do not run "dbt compile" as part of docs generation
|
||||
""",
|
||||
)
|
||||
_add_defer_argument(generate_sub)
|
||||
_add_defer_arguments(generate_sub)
|
||||
return generate_sub
|
||||
|
||||
|
||||
@@ -1192,9 +1196,7 @@ def parse_args(args, cls=DBTArgumentParser):
|
||||
# list_sub sets up its own arguments.
|
||||
_add_selection_arguments(run_sub, compile_sub, generate_sub, test_sub, snapshot_sub, seed_sub)
|
||||
# --defer
|
||||
_add_defer_argument(run_sub, test_sub, build_sub, snapshot_sub, compile_sub)
|
||||
# --favor-state
|
||||
_add_favor_state_argument(run_sub, test_sub, build_sub, snapshot_sub)
|
||||
_add_defer_arguments(run_sub, test_sub, build_sub, snapshot_sub, compile_sub)
|
||||
# --full-refresh
|
||||
_add_table_mutability_arguments(run_sub, compile_sub, build_sub)
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ class NodeType(StrEnum):
|
||||
Macro = "macro"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Entity = "entity"
|
||||
|
||||
@classmethod
|
||||
def executable(cls) -> List["NodeType"]:
|
||||
@@ -53,14 +52,11 @@ class NodeType(StrEnum):
|
||||
cls.Analysis,
|
||||
cls.Exposure,
|
||||
cls.Metric,
|
||||
cls.Entity,
|
||||
]
|
||||
|
||||
def pluralize(self) -> str:
|
||||
if self is self.Analysis:
|
||||
return "analyses"
|
||||
if self is self.Entity:
|
||||
return "entities"
|
||||
return f"{self}s"
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import hashlib
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
@@ -35,6 +34,7 @@ from dbt.exceptions import (
|
||||
UndefinedMacroError,
|
||||
)
|
||||
from dbt.parser.search import FileBlock
|
||||
from dbt.utils import md5
|
||||
|
||||
|
||||
def synthesize_generic_test_names(
|
||||
@@ -72,7 +72,7 @@ def synthesize_generic_test_names(
|
||||
|
||||
if len(full_name) >= 64:
|
||||
test_trunc_identifier = test_identifier[:30]
|
||||
label = hashlib.md5(full_name.encode("utf-8")).hexdigest()
|
||||
label = md5(full_name)
|
||||
short_name = "{}_{}".format(test_trunc_identifier, label)
|
||||
else:
|
||||
short_name = full_name
|
||||
@@ -272,7 +272,7 @@ class TestBuilder(Generic[Testable]):
|
||||
column_name=column_name,
|
||||
name=self.name,
|
||||
key=key,
|
||||
err_msg=e.msg,
|
||||
err_msg=e.msg
|
||||
)
|
||||
|
||||
if value is not None:
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Dict, Optional, Mapping, Callable, Any, List, Type, Union, Tu
|
||||
from itertools import chain
|
||||
import time
|
||||
from dbt.events.base_types import EventLevel
|
||||
import pprint
|
||||
|
||||
import dbt.exceptions
|
||||
import dbt.tracking
|
||||
@@ -29,6 +30,8 @@ from dbt.events.types import (
|
||||
ParsedFileLoadFailed,
|
||||
InvalidDisabledTargetInTestNode,
|
||||
NodeNotFoundOrDisabled,
|
||||
StateCheckVarsHash,
|
||||
Note,
|
||||
)
|
||||
from dbt.logger import DbtProcessState
|
||||
from dbt.node_types import NodeType
|
||||
@@ -56,7 +59,6 @@ from dbt.contracts.graph.nodes import (
|
||||
ColumnInfo,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
SeedNode,
|
||||
ManifestNode,
|
||||
ResultNode,
|
||||
@@ -341,7 +343,7 @@ class ManifestLoader:
|
||||
project, project_parser_files[project.project_name], parser_types
|
||||
)
|
||||
|
||||
# Now that we've loaded most of the nodes (except for schema tests, sources, metrics, entities)
|
||||
# Now that we've loaded most of the nodes (except for schema tests, sources, metrics)
|
||||
# load up the Lookup objects to resolve them by name, so the SourceFiles store
|
||||
# the unique_id instead of the name. Sources are loaded from yaml files, so
|
||||
# aren't in place yet
|
||||
@@ -377,7 +379,7 @@ class ManifestLoader:
|
||||
# copy the selectors from the root_project to the manifest
|
||||
self.manifest.selectors = self.root_project.manifest_selectors
|
||||
|
||||
# update the refs, sources, docs, entities and metrics depends_on.nodes
|
||||
# update the refs, sources, docs and metrics depends_on.nodes
|
||||
# These check the created_at time on the nodes to
|
||||
# determine whether they need processing.
|
||||
start_process = time.perf_counter()
|
||||
@@ -385,7 +387,6 @@ class ManifestLoader:
|
||||
self.process_refs(self.root_project.project_name)
|
||||
self.process_docs(self.root_project)
|
||||
self.process_metrics(self.root_project)
|
||||
self.process_entities(self.root_project)
|
||||
|
||||
# update tracking data
|
||||
self._perf_info.process_manifest_elapsed = time.perf_counter() - start_process
|
||||
@@ -571,6 +572,12 @@ class ManifestLoader:
|
||||
reason="config vars, config profile, or config target have changed"
|
||||
)
|
||||
)
|
||||
fire_event(
|
||||
Note(
|
||||
msg=f"previous checksum: {manifest.state_check.vars_hash.checksum}, current checksum: {self.manifest.state_check.vars_hash.checksum}"
|
||||
),
|
||||
level=EventLevel.DEBUG,
|
||||
)
|
||||
valid = False
|
||||
reparse_reason = ReparseReason.vars_changed
|
||||
if self.manifest.state_check.profile_hash != manifest.state_check.profile_hash:
|
||||
@@ -704,16 +711,28 @@ class ManifestLoader:
|
||||
# arg vars, but since any changes to that file will cause state_check
|
||||
# to not pass, it doesn't matter. If we move to more granular checking
|
||||
# of env_vars, that would need to change.
|
||||
# We are using the parsed cli_vars instead of config.args.vars, in order
|
||||
# to sort them and avoid reparsing because of ordering issues.
|
||||
stringified_cli_vars = pprint.pformat(config.cli_vars)
|
||||
vars_hash = FileHash.from_contents(
|
||||
"\x00".join(
|
||||
[
|
||||
getattr(config.args, "vars", "{}") or "{}",
|
||||
stringified_cli_vars,
|
||||
getattr(config.args, "profile", "") or "",
|
||||
getattr(config.args, "target", "") or "",
|
||||
__version__,
|
||||
]
|
||||
)
|
||||
)
|
||||
fire_event(
|
||||
StateCheckVarsHash(
|
||||
checksum=vars_hash.checksum,
|
||||
vars=stringified_cli_vars,
|
||||
profile=config.args.profile,
|
||||
target=config.args.target,
|
||||
version=__version__,
|
||||
)
|
||||
)
|
||||
|
||||
# Create a FileHash of the env_vars in the project
|
||||
key_list = list(config.project_env_vars.keys())
|
||||
@@ -840,10 +859,6 @@ class ManifestLoader:
|
||||
if metric.created_at < self.started_at:
|
||||
continue
|
||||
_process_refs_for_metric(self.manifest, current_project, metric)
|
||||
for entity in self.manifest.entities.values():
|
||||
if entity.created_at < self.started_at:
|
||||
continue
|
||||
_process_refs_for_entity(self.manifest, current_project, entity)
|
||||
|
||||
# Takes references in 'metrics' array of nodes and exposures, finds the target
|
||||
# node, and updates 'depends_on.nodes' with the unique id
|
||||
@@ -864,23 +879,6 @@ class ManifestLoader:
|
||||
continue
|
||||
_process_metrics_for_node(self.manifest, current_project, exposure)
|
||||
|
||||
# Takes references in 'entities' array of nodes and exposures, finds the target
|
||||
# node, and updates 'depends_on.nodes' with the unique id
|
||||
def process_entities(self, config: RuntimeConfig):
|
||||
current_project = config.project_name
|
||||
for node in self.manifest.nodes.values():
|
||||
if node.created_at < self.started_at:
|
||||
continue
|
||||
_process_entities_for_node(self.manifest, current_project, node)
|
||||
for entity in self.manifest.entities.values():
|
||||
if entity.created_at < self.started_at:
|
||||
continue
|
||||
_process_entities_for_node(self.manifest, current_project, entity)
|
||||
for exposure in self.manifest.exposures.values():
|
||||
if exposure.created_at < self.started_at:
|
||||
continue
|
||||
_process_entities_for_node(self.manifest, current_project, exposure)
|
||||
|
||||
# nodes: node and column descriptions
|
||||
# sources: source and table descriptions, column descriptions
|
||||
# macros: macro argument descriptions
|
||||
@@ -936,16 +934,6 @@ class ManifestLoader:
|
||||
config.project_name,
|
||||
)
|
||||
_process_docs_for_metrics(ctx, metric)
|
||||
for entity in self.manifest.entities.values():
|
||||
if entity.created_at < self.started_at:
|
||||
continue
|
||||
ctx = generate_runtime_docs_context(
|
||||
config,
|
||||
entity,
|
||||
self.manifest,
|
||||
config.project_name,
|
||||
)
|
||||
_process_docs_for_entities(ctx, entity)
|
||||
|
||||
# Loops through all nodes and exposures, for each element in
|
||||
# 'sources' array finds the source node and updates the
|
||||
@@ -1136,10 +1124,6 @@ def _process_docs_for_metrics(context: Dict[str, Any], metric: Metric) -> None:
|
||||
metric.description = get_rendered(metric.description, context)
|
||||
|
||||
|
||||
def _process_docs_for_entities(context: Dict[str, Any], entity: Entity) -> None:
|
||||
entity.description = get_rendered(entity.description, context)
|
||||
|
||||
|
||||
def _process_refs_for_exposure(manifest: Manifest, current_project: str, exposure: Exposure):
|
||||
"""Given a manifest and exposure in that manifest, process its refs"""
|
||||
for ref in exposure.refs:
|
||||
@@ -1227,48 +1211,6 @@ def _process_refs_for_metric(manifest: Manifest, current_project: str, metric: M
|
||||
manifest.update_metric(metric)
|
||||
|
||||
|
||||
def _process_refs_for_entity(manifest: Manifest, current_project: str, entity: Entity):
|
||||
"""Given a manifest and an entity in that manifest, process its refs"""
|
||||
for ref in entity.refs:
|
||||
target_model: Optional[Union[Disabled, ManifestNode]] = None
|
||||
target_model_name: str
|
||||
target_model_package: Optional[str] = None
|
||||
|
||||
if len(ref) == 1:
|
||||
target_model_name = ref[0]
|
||||
elif len(ref) == 2:
|
||||
target_model_package, target_model_name = ref
|
||||
else:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Refs should always be 1 or 2 arguments - got {len(ref)}"
|
||||
)
|
||||
|
||||
target_model = manifest.resolve_ref(
|
||||
target_model_name,
|
||||
target_model_package,
|
||||
current_project,
|
||||
entity.package_name,
|
||||
)
|
||||
|
||||
if target_model is None or isinstance(target_model, Disabled):
|
||||
# This may raise. Even if it doesn't, we don't want to add
|
||||
# this entity to the graph b/c there is no destination entity
|
||||
entity.config.enabled = False
|
||||
invalid_target_fail_unless_test(
|
||||
node=entity,
|
||||
target_name=target_model_name,
|
||||
target_kind="node",
|
||||
target_package=target_model_package,
|
||||
disabled=(isinstance(target_model, Disabled)),
|
||||
)
|
||||
continue
|
||||
|
||||
target_model_id = target_model.unique_id
|
||||
|
||||
entity.depends_on.nodes.append(target_model_id)
|
||||
manifest.update_entity(entity)
|
||||
|
||||
|
||||
def _process_metrics_for_node(
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
@@ -1318,55 +1260,6 @@ def _process_metrics_for_node(
|
||||
node.depends_on.nodes.append(target_metric_id)
|
||||
|
||||
|
||||
def _process_entities_for_node(
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
node: Union[ManifestNode, Entity, Exposure],
|
||||
):
|
||||
"""Given a manifest and a node in that manifest, process its entities"""
|
||||
|
||||
if isinstance(node, SeedNode):
|
||||
return
|
||||
|
||||
for entity in node.entities:
|
||||
target_entity: Optional[Union[Disabled, Entity]] = None
|
||||
target_entity_name: str
|
||||
target_entity_package: Optional[str] = None
|
||||
|
||||
if len(entity) == 1:
|
||||
target_entity_name = entity[0]
|
||||
elif len(entity) == 2:
|
||||
target_entity_package, target_entity_name = entity
|
||||
else:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
f"Entity references should always be 1 or 2 arguments - got {len(entity)}"
|
||||
)
|
||||
|
||||
target_entity = manifest.resolve_entity(
|
||||
target_entity_name,
|
||||
target_entity_package,
|
||||
current_project,
|
||||
node.package_name,
|
||||
)
|
||||
|
||||
if target_entity is None or isinstance(target_entity, Disabled):
|
||||
# This may raise. Even if it doesn't, we don't want to add
|
||||
# this node to the graph b/c there is no destination node
|
||||
node.config.enabled = False
|
||||
invalid_target_fail_unless_test(
|
||||
node=node,
|
||||
target_name=target_entity_name,
|
||||
target_kind="source",
|
||||
target_package=target_entity_package,
|
||||
disabled=(isinstance(target_entity, Disabled)),
|
||||
)
|
||||
continue
|
||||
|
||||
target_entity_id = target_entity.unique_id
|
||||
|
||||
node.depends_on.nodes.append(target_entity_id)
|
||||
|
||||
|
||||
def _process_refs_for_node(manifest: Manifest, current_project: str, node: ManifestNode):
|
||||
"""Given a manifest and a node in that manifest, process its refs"""
|
||||
|
||||
@@ -1441,7 +1334,6 @@ def _process_sources_for_exposure(manifest: Manifest, current_project: str, expo
|
||||
manifest.update_exposure(exposure)
|
||||
|
||||
|
||||
# TODO: Remove this code because metrics can't be based on sources
|
||||
def _process_sources_for_metric(manifest: Manifest, current_project: str, metric: Metric):
|
||||
target_source: Optional[Union[Disabled, SourceDefinition]] = None
|
||||
for source_name, table_name in metric.sources:
|
||||
|
||||
@@ -8,7 +8,6 @@ from dbt.contracts.files import (
|
||||
parse_file_type_to_parser,
|
||||
)
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.base_types import EventLevel
|
||||
from dbt.events.types import (
|
||||
PartialParsingEnabled,
|
||||
PartialParsingFile,
|
||||
@@ -156,11 +155,7 @@ class PartialParsing:
|
||||
self.macro_child_map = self.saved_manifest.build_macro_child_map()
|
||||
deleted = len(deleted) + len(deleted_schema_files)
|
||||
changed = len(changed) + len(changed_schema_files)
|
||||
event = PartialParsingEnabled(deleted=deleted, added=len(added), changed=changed)
|
||||
if os.environ.get("DBT_PP_TEST"):
|
||||
fire_event(event, level=EventLevel.INFO)
|
||||
else:
|
||||
fire_event(event)
|
||||
fire_event(PartialParsingEnabled(deleted=deleted, added=len(added), changed=changed))
|
||||
self.file_diff = file_diff
|
||||
|
||||
# generate the list of files that need parsing
|
||||
@@ -242,7 +237,7 @@ class PartialParsing:
|
||||
self.remove_source_override_target(source)
|
||||
|
||||
def delete_disabled(self, unique_id, file_id):
|
||||
# This node/metric/entity/exposure is disabled. Find it and remove it from disabled dictionary.
|
||||
# This node/metric/exposure is disabled. Find it and remove it from disabled dictionary.
|
||||
for dis_index, dis_node in enumerate(self.saved_manifest.disabled[unique_id]):
|
||||
if dis_node.file_id == file_id:
|
||||
node = dis_node
|
||||
@@ -441,18 +436,6 @@ class PartialParsing:
|
||||
if metric_element:
|
||||
self.delete_schema_metric(schema_file, metric_element)
|
||||
self.merge_patch(schema_file, "metrics", metric_element)
|
||||
elif unique_id in self.saved_manifest.entities:
|
||||
entity = self.saved_manifest.entities[unique_id]
|
||||
file_id = entity.file_id
|
||||
if file_id in self.saved_files and file_id not in self.file_diff["deleted"]:
|
||||
schema_file = self.saved_files[file_id]
|
||||
entities = []
|
||||
if "entities" in schema_file.dict_from_yaml:
|
||||
entities = schema_file.dict_from_yaml["entities"]
|
||||
entity_element = self.get_schema_element(entities, entity.name)
|
||||
if entity_element:
|
||||
self.delete_schema_entity(schema_file, entity_element)
|
||||
self.merge_patch(schema_file, "entities", entity_element)
|
||||
elif unique_id in self.saved_manifest.macros:
|
||||
macro = self.saved_manifest.macros[unique_id]
|
||||
file_id = macro.file_id
|
||||
@@ -758,29 +741,6 @@ class PartialParsing:
|
||||
self.delete_schema_metric(schema_file, elem)
|
||||
self.merge_patch(schema_file, dict_key, elem)
|
||||
|
||||
# entities
|
||||
dict_key = "entities"
|
||||
entity_diff = self.get_diff_for("entities", saved_yaml_dict, new_yaml_dict)
|
||||
if entity_diff["changed"]:
|
||||
for entity in entity_diff["changed"]:
|
||||
self.delete_schema_entity(schema_file, entity)
|
||||
self.merge_patch(schema_file, dict_key, entity)
|
||||
if entity_diff["deleted"]:
|
||||
for entity in entity_diff["deleted"]:
|
||||
self.delete_schema_entity(schema_file, entity)
|
||||
if entity_diff["added"]:
|
||||
for entity in entity_diff["added"]:
|
||||
self.merge_patch(schema_file, dict_key, entity)
|
||||
# Handle schema file updates due to env_var changes
|
||||
if dict_key in env_var_changes and dict_key in new_yaml_dict:
|
||||
for name in env_var_changes[dict_key]:
|
||||
if name in entity_diff["changed_or_deleted_names"]:
|
||||
continue
|
||||
elem = self.get_schema_element(new_yaml_dict[dict_key], name)
|
||||
if elem:
|
||||
self.delete_schema_entity(schema_file, elem)
|
||||
self.merge_patch(schema_file, dict_key, elem)
|
||||
|
||||
# Take a "section" of the schema file yaml dictionary from saved and new schema files
|
||||
# and determine which parts have changed
|
||||
def get_diff_for(self, key, saved_yaml_dict, new_yaml_dict):
|
||||
@@ -956,24 +916,6 @@ class PartialParsing:
|
||||
elif unique_id in self.saved_manifest.disabled:
|
||||
self.delete_disabled(unique_id, schema_file.file_id)
|
||||
|
||||
# entities are created only from schema files, but also can be referred to by other nodes
|
||||
def delete_schema_entity(self, schema_file, entity_dict):
|
||||
entity_name = entity_dict["name"]
|
||||
entities = schema_file.entities.copy()
|
||||
for unique_id in entities:
|
||||
if unique_id in self.saved_manifest.entities:
|
||||
entity = self.saved_manifest.entities[unique_id]
|
||||
if entity.name == entity_name:
|
||||
# Need to find everything that referenced this entity and schedule for parsing
|
||||
if unique_id in self.saved_manifest.child_map:
|
||||
self.schedule_nodes_for_parsing(self.saved_manifest.child_map[unique_id])
|
||||
self.deleted_manifest.entities[unique_id] = self.saved_manifest.entities.pop(
|
||||
unique_id
|
||||
)
|
||||
schema_file.entities.remove(unique_id)
|
||||
elif unique_id in self.saved_manifest.disabled:
|
||||
self.delete_disabled(unique_id, schema_file.file_id)
|
||||
|
||||
def get_schema_element(self, elem_list, elem_name):
|
||||
for element in elem_list:
|
||||
if "name" in element and element["name"] == elem_name:
|
||||
|
||||
@@ -3,7 +3,6 @@ import os
|
||||
import pathlib
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from hashlib import md5
|
||||
from typing import Iterable, Dict, Any, Union, List, Optional, Generic, TypeVar, Type
|
||||
|
||||
from dbt.dataclass_schema import ValidationError, dbtClassMixin
|
||||
@@ -22,12 +21,11 @@ from dbt.context.configured import generate_schema_yml_context, SchemaYamlVars
|
||||
from dbt.context.providers import (
|
||||
generate_parse_exposure,
|
||||
generate_parse_metrics,
|
||||
generate_parse_entities,
|
||||
generate_test_context,
|
||||
)
|
||||
from dbt.context.macro_resolver import MacroResolver
|
||||
from dbt.contracts.files import FileHash, SchemaSourceFile
|
||||
from dbt.contracts.graph.model_config import MetricConfig, ExposureConfig, EntityConfig
|
||||
from dbt.contracts.graph.model_config import MetricConfig, ExposureConfig
|
||||
from dbt.contracts.graph.nodes import (
|
||||
ParsedNodePatch,
|
||||
ColumnInfo,
|
||||
@@ -36,7 +34,6 @@ from dbt.contracts.graph.nodes import (
|
||||
UnpatchedSourceDefinition,
|
||||
Exposure,
|
||||
Metric,
|
||||
Entity,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
HasColumnDocs,
|
||||
@@ -49,7 +46,6 @@ from dbt.contracts.graph.unparsed import (
|
||||
UnparsedNodeUpdate,
|
||||
UnparsedExposure,
|
||||
UnparsedMetric,
|
||||
UnparsedEntity,
|
||||
UnparsedSourceDefinition,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
@@ -83,7 +79,7 @@ from dbt.parser.generic_test_builders import (
|
||||
TestBlock,
|
||||
Testable,
|
||||
)
|
||||
from dbt.utils import get_pseudo_test_path, coerce_dict_str
|
||||
from dbt.utils import get_pseudo_test_path, coerce_dict_str, md5
|
||||
|
||||
|
||||
TestDef = Union[str, Dict[str, Any]]
|
||||
@@ -97,7 +93,6 @@ schema_file_keys = (
|
||||
"analyses",
|
||||
"exposures",
|
||||
"metrics",
|
||||
"entities",
|
||||
)
|
||||
|
||||
|
||||
@@ -118,7 +113,6 @@ class ParserRef:
|
||||
def __init__(self):
|
||||
self.column_info: Dict[str, ColumnInfo] = {}
|
||||
|
||||
# TODO: Mimic this for dimension information at the entity level
|
||||
def add(
|
||||
self,
|
||||
column: Union[HasDocs, UnparsedColumn],
|
||||
@@ -227,8 +221,8 @@ class SchemaParser(SimpleParser[GenericTestBlock, GenericTestNode]):
|
||||
return str(data)
|
||||
|
||||
hashable_metadata = repr(get_hashable_md(test_metadata))
|
||||
hash_string = "".join([name, hashable_metadata]).encode("utf-8")
|
||||
test_hash = md5(hash_string).hexdigest()[-HASH_LENGTH:]
|
||||
hash_string = "".join([name, hashable_metadata])
|
||||
test_hash = md5(hash_string)[-HASH_LENGTH:]
|
||||
|
||||
dct = {
|
||||
"alias": name,
|
||||
@@ -541,11 +535,6 @@ class SchemaParser(SimpleParser[GenericTestBlock, GenericTestNode]):
|
||||
metric_parser = MetricParser(self, yaml_block)
|
||||
metric_parser.parse()
|
||||
|
||||
# parse entities
|
||||
if "entities" in dct:
|
||||
entity_parser = EntityParser(self, yaml_block)
|
||||
entity_parser.parse()
|
||||
|
||||
|
||||
def check_format_version(file_path, yaml_dct) -> None:
|
||||
if "version" not in yaml_dct:
|
||||
@@ -1193,107 +1182,3 @@ class MetricParser(YamlReader):
|
||||
except (ValidationError, JSONValidationError) as exc:
|
||||
raise YamlParseDictError(self.yaml.path, self.key, data, exc)
|
||||
self.parse_metric(unparsed)
|
||||
|
||||
|
||||
class EntityParser(YamlReader):
|
||||
def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock):
|
||||
super().__init__(schema_parser, yaml, NodeType.Entity.pluralize())
|
||||
self.schema_parser = schema_parser
|
||||
self.yaml = yaml
|
||||
|
||||
def parse_entity(self, unparsed: UnparsedEntity):
|
||||
package_name = self.project.project_name
|
||||
unique_id = f"{NodeType.Entity}.{package_name}.{unparsed.name}"
|
||||
path = self.yaml.path.relative_path
|
||||
|
||||
fqn = self.schema_parser.get_fqn_prefix(path)
|
||||
fqn.append(unparsed.name)
|
||||
|
||||
config = self._generate_entity_config(
|
||||
target=unparsed,
|
||||
fqn=fqn,
|
||||
package_name=package_name,
|
||||
rendered=True,
|
||||
)
|
||||
|
||||
config = config.finalize_and_validate()
|
||||
|
||||
unrendered_config = self._generate_entity_config(
|
||||
target=unparsed,
|
||||
fqn=fqn,
|
||||
package_name=package_name,
|
||||
rendered=False,
|
||||
)
|
||||
|
||||
if not isinstance(config, EntityConfig):
|
||||
raise DbtInternalError(
|
||||
f"Calculated a {type(config)} for an entity, but expected a EntityConfig"
|
||||
)
|
||||
|
||||
parsed = Entity(
|
||||
resource_type=NodeType.Entity,
|
||||
package_name=package_name,
|
||||
path=path,
|
||||
original_file_path=self.yaml.path.original_file_path,
|
||||
unique_id=unique_id,
|
||||
fqn=fqn,
|
||||
model=unparsed.model,
|
||||
name=unparsed.name,
|
||||
description=unparsed.description,
|
||||
dimensions=unparsed.dimensions,
|
||||
meta=unparsed.meta,
|
||||
tags=unparsed.tags,
|
||||
config=config,
|
||||
unrendered_config=unrendered_config,
|
||||
)
|
||||
|
||||
ctx = generate_parse_entities(
|
||||
parsed,
|
||||
self.root_project,
|
||||
self.schema_parser.manifest,
|
||||
package_name,
|
||||
)
|
||||
|
||||
if parsed.model is not None:
|
||||
model_ref = "{{ " + parsed.model + " }}"
|
||||
get_rendered(model_ref, ctx, parsed)
|
||||
|
||||
# if the metric is disabled we do not want it included in the manifest, only in the disabled dict
|
||||
if parsed.config.enabled:
|
||||
# self.manifest.add_metric(self.yaml.file, parsed)
|
||||
self.manifest.add_entity(self.yaml.file, parsed)
|
||||
else:
|
||||
self.manifest.add_disabled(self.yaml.file, parsed)
|
||||
|
||||
def _generate_entity_config(
|
||||
self, target: UnparsedEntity, fqn: List[str], package_name: str, rendered: bool
|
||||
):
|
||||
generator: BaseContextConfigGenerator
|
||||
if rendered:
|
||||
generator = ContextConfigGenerator(self.root_project)
|
||||
else:
|
||||
generator = UnrenderedConfigGenerator(self.root_project)
|
||||
|
||||
# configs with precendence set
|
||||
precedence_configs = dict()
|
||||
# first apply metric configs
|
||||
precedence_configs.update(target.config)
|
||||
|
||||
return generator.calculate_node_config(
|
||||
config_call_dict={},
|
||||
fqn=fqn,
|
||||
resource_type=NodeType.Entity,
|
||||
project_name=package_name,
|
||||
base=False,
|
||||
patch_config_dict=precedence_configs,
|
||||
)
|
||||
|
||||
def parse(self):
|
||||
for data in self.get_key_dicts():
|
||||
try:
|
||||
UnparsedEntity.validate(data)
|
||||
unparsed = UnparsedEntity.from_dict(data)
|
||||
|
||||
except (ValidationError, JSONValidationError) as exc:
|
||||
raise YamlParseDictError(self.yaml.path, self.key, data, exc)
|
||||
self.parse_entity(unparsed)
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
import warnings
|
||||
from typing import List
|
||||
|
||||
from packaging import version as packaging_version
|
||||
|
||||
from dbt.exceptions import VersionsNotCompatibleError
|
||||
import dbt.utils
|
||||
|
||||
@@ -70,6 +67,11 @@ $
|
||||
_VERSION_REGEX = re.compile(_VERSION_REGEX_PAT_STR, re.VERBOSE)
|
||||
|
||||
|
||||
def _cmp(a, b):
|
||||
"""Return negative if a<b, zero if a==b, positive if a>b."""
|
||||
return (a > b) - (a < b)
|
||||
|
||||
|
||||
@dataclass
|
||||
class VersionSpecifier(VersionSpecification):
|
||||
def to_version_string(self, skip_matcher=False):
|
||||
@@ -142,13 +144,19 @@ class VersionSpecifier(VersionSpecification):
|
||||
return 1
|
||||
if b is None:
|
||||
return -1
|
||||
# This suppresses the LegacyVersion deprecation warning
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", category=DeprecationWarning)
|
||||
if packaging_version.parse(a) > packaging_version.parse(b):
|
||||
|
||||
# Check the prerelease component only
|
||||
prcmp = self._nat_cmp(a, b)
|
||||
if prcmp != 0: # either -1 or 1
|
||||
return prcmp
|
||||
# else is equal and will fall through
|
||||
|
||||
else: # major/minor/patch, should all be numbers
|
||||
if int(a) > int(b):
|
||||
return 1
|
||||
elif packaging_version.parse(a) < packaging_version.parse(b):
|
||||
elif int(a) < int(b):
|
||||
return -1
|
||||
# else is equal and will fall through
|
||||
|
||||
equal = (
|
||||
self.matcher == Matchers.GREATER_THAN_OR_EQUAL
|
||||
@@ -212,6 +220,29 @@ class VersionSpecifier(VersionSpecification):
|
||||
def is_exact(self):
|
||||
return self.matcher == Matchers.EXACT
|
||||
|
||||
@classmethod
|
||||
def _nat_cmp(cls, a, b):
|
||||
def cmp_prerelease_tag(a, b):
|
||||
if isinstance(a, int) and isinstance(b, int):
|
||||
return _cmp(a, b)
|
||||
elif isinstance(a, int):
|
||||
return -1
|
||||
elif isinstance(b, int):
|
||||
return 1
|
||||
else:
|
||||
return _cmp(a, b)
|
||||
|
||||
a, b = a or "", b or ""
|
||||
a_parts, b_parts = a.split("."), b.split(".")
|
||||
a_parts = [int(x) if re.match(r"^\d+$", x) else x for x in a_parts]
|
||||
b_parts = [int(x) if re.match(r"^\d+$", x) else x for x in b_parts]
|
||||
for sub_a, sub_b in zip(a_parts, b_parts):
|
||||
cmp_result = cmp_prerelease_tag(sub_a, sub_b)
|
||||
if cmp_result != 0:
|
||||
return cmp_result
|
||||
else:
|
||||
return _cmp(len(a), len(b))
|
||||
|
||||
|
||||
@dataclass
|
||||
class VersionRange:
|
||||
|
||||
@@ -83,6 +83,7 @@ class CompileTask(GraphRunnableTask):
|
||||
adapter=adapter,
|
||||
other=deferred_manifest,
|
||||
selected=selected_uids,
|
||||
favor_state=bool(self.args.favor_state),
|
||||
)
|
||||
# TODO: is it wrong to write the manifest here? I think it's right...
|
||||
self.write_manifest()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
|
||||
from dbt.contracts.graph.nodes import Exposure, SourceDefinition, Metric, Entity
|
||||
from dbt.contracts.graph.nodes import Exposure, SourceDefinition, Metric
|
||||
from dbt.graph import ResourceTypeSelector
|
||||
from dbt.task.runnable import GraphRunnableTask, ManifestTask
|
||||
from dbt.task.test import TestSelector
|
||||
@@ -22,7 +22,6 @@ class ListTask(GraphRunnableTask):
|
||||
NodeType.Source,
|
||||
NodeType.Exposure,
|
||||
NodeType.Metric,
|
||||
NodeType.Entity,
|
||||
)
|
||||
)
|
||||
ALL_RESOURCE_VALUES = DEFAULT_RESOURCE_VALUES | frozenset((NodeType.Analysis,))
|
||||
@@ -83,8 +82,6 @@ class ListTask(GraphRunnableTask):
|
||||
yield self.manifest.exposures[node]
|
||||
elif node in self.manifest.metrics:
|
||||
yield self.manifest.metrics[node]
|
||||
elif node in self.manifest.entities:
|
||||
yield self.manifest.entities[node]
|
||||
else:
|
||||
raise DbtRuntimeError(
|
||||
f'Got an unexpected result from node selection: "{node}"'
|
||||
@@ -108,11 +105,6 @@ class ListTask(GraphRunnableTask):
|
||||
# metrics are searched for by pkg.metric_name
|
||||
metric_selector = ".".join([node.package_name, node.name])
|
||||
yield f"metric:{metric_selector}"
|
||||
elif node.resource_type == NodeType.Entity:
|
||||
assert isinstance(node, Entity)
|
||||
# entities are searched for by pkg.entity_name
|
||||
entity_selector = ".".join([node.package_name, node.name])
|
||||
yield f"entity:{entity_selector}"
|
||||
else:
|
||||
# everything else is from `fqn`
|
||||
yield ".".join(node.fqn)
|
||||
|
||||
@@ -443,7 +443,7 @@ class RunTask(CompileTask):
|
||||
database_schema_set: Set[Tuple[Optional[str], str]] = {
|
||||
(r.node.database, r.node.schema)
|
||||
for r in results
|
||||
if r.node.is_relational
|
||||
if (hasattr(r, "node") and r.node.is_relational)
|
||||
and r.status not in (NodeStatus.Error, NodeStatus.Fail, NodeStatus.Skipped)
|
||||
}
|
||||
|
||||
|
||||
@@ -91,7 +91,9 @@ class TestRunner(CompileRunner):
|
||||
def before_execute(self):
|
||||
self.print_start_line()
|
||||
|
||||
def execute_test(self, test: TestNode, manifest: Manifest) -> TestResultData:
|
||||
def execute_test(
|
||||
self, test: TestNode, manifest: Manifest
|
||||
) -> TestResultData:
|
||||
context = generate_runtime_model_context(test, self.config, manifest)
|
||||
|
||||
materialization_macro = manifest.find_materialization_macro_by_name(
|
||||
@@ -99,9 +101,7 @@ class TestRunner(CompileRunner):
|
||||
)
|
||||
|
||||
if materialization_macro is None:
|
||||
raise MissingMaterializationError(
|
||||
materialization=test.get_materialization(), adapter_type=self.adapter.type()
|
||||
)
|
||||
raise MissingMaterializationError(materialization=test.get_materialization(), adapter_type=self.adapter.type())
|
||||
|
||||
if "config" not in context:
|
||||
raise DbtInternalError(
|
||||
|
||||
4
core/dbt/tests/fixtures/project.py
vendored
4
core/dbt/tests/fixtures/project.py
vendored
@@ -249,9 +249,7 @@ def clean_up_logging():
|
||||
# otherwise this will fail. So to test errors in those areas, you need to copy the files
|
||||
# into the project in the tests instead of putting them in the fixtures.
|
||||
@pytest.fixture(scope="class")
|
||||
def adapter(
|
||||
unique_schema, project_root, profiles_root, profiles_yml, dbt_project_yml, clean_up_logging
|
||||
):
|
||||
def adapter(unique_schema, project_root, profiles_root, profiles_yml, dbt_project_yml, clean_up_logging):
|
||||
# The profiles.yml and dbt_project.yml should already be written out
|
||||
args = Namespace(
|
||||
profiles_dir=str(profiles_root), project_dir=str(project_root), target=None, profile=None
|
||||
|
||||
@@ -12,12 +12,7 @@ from dbt.adapters.factory import Adapter
|
||||
from dbt.main import handle_and_check
|
||||
from dbt.logger import log_manager
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.events.functions import (
|
||||
fire_event,
|
||||
capture_stdout_logs,
|
||||
stop_capture_stdout_logs,
|
||||
reset_metadata_vars,
|
||||
)
|
||||
from dbt.events.functions import fire_event, capture_stdout_logs, stop_capture_stdout_logs, reset_metadata_vars
|
||||
from dbt.events.test_types import IntegrationTestDebug
|
||||
|
||||
# =============================================================================
|
||||
@@ -29,6 +24,8 @@ from dbt.events.test_types import IntegrationTestDebug
|
||||
# rm_file
|
||||
# write_file
|
||||
# read_file
|
||||
# mkdir
|
||||
# rm_dir
|
||||
# get_artifact
|
||||
# update_config_file
|
||||
# write_config_file
|
||||
@@ -156,6 +153,22 @@ def read_file(*paths):
|
||||
return contents
|
||||
|
||||
|
||||
# To create a directory
|
||||
def mkdir(directory_path):
|
||||
try:
|
||||
os.makedirs(directory_path)
|
||||
except FileExistsError:
|
||||
raise FileExistsError(f"{directory_path} already exists.")
|
||||
|
||||
|
||||
# To remove a directory
|
||||
def rm_dir(directory_path):
|
||||
try:
|
||||
shutil.rmtree(directory_path)
|
||||
except FileNotFoundError:
|
||||
raise FileNotFoundError(f"{directory_path} does not exist.")
|
||||
|
||||
|
||||
# Get an artifact (usually from the target directory) such as
|
||||
# manifest.json or catalog.json to use in a test
|
||||
def get_artifact(*paths):
|
||||
|
||||
@@ -10,6 +10,7 @@ import jinja2
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
from tarfile import ReadError
|
||||
import time
|
||||
from pathlib import PosixPath, WindowsPath
|
||||
@@ -252,16 +253,19 @@ def get_pseudo_hook_path(hook_name):
|
||||
return os.path.join(*path_parts)
|
||||
|
||||
|
||||
def md5(string):
|
||||
return hashlib.md5(string.encode("utf-8")).hexdigest()
|
||||
def md5(string, charset="utf-8"):
|
||||
if sys.version_info >= (3, 9):
|
||||
return hashlib.md5(string.encode(charset), usedforsecurity=False).hexdigest()
|
||||
else:
|
||||
return hashlib.md5(string.encode(charset)).hexdigest()
|
||||
|
||||
|
||||
def get_hash(model):
|
||||
return hashlib.md5(model.unique_id.encode("utf-8")).hexdigest()
|
||||
return md5(model.unique_id)
|
||||
|
||||
|
||||
def get_hashed_contents(model):
|
||||
return hashlib.md5(model.raw_code.encode("utf-8")).hexdigest()
|
||||
return md5(model.raw_code)
|
||||
|
||||
|
||||
def flatten_nodes(dep_list):
|
||||
|
||||
@@ -235,5 +235,5 @@ def _get_adapter_plugin_names() -> Iterator[str]:
|
||||
yield plugin_name
|
||||
|
||||
|
||||
__version__ = "1.5.0a1"
|
||||
__version__ = "1.4.5"
|
||||
installed = get_installed_version()
|
||||
|
||||
@@ -25,7 +25,7 @@ with open(os.path.join(this_directory, "README.md")) as f:
|
||||
|
||||
|
||||
package_name = "dbt-core"
|
||||
package_version = "1.5.0a1"
|
||||
package_version = "1.4.5"
|
||||
description = """With dbt, data analysts and engineers can build analytics \
|
||||
the way engineers build applications."""
|
||||
|
||||
@@ -51,19 +51,20 @@ setup(
|
||||
"betterproto==1.2.5",
|
||||
"click>=7.0,<9",
|
||||
"colorama>=0.3.9,<0.4.7",
|
||||
"hologram>=0.0.14,<=0.0.15",
|
||||
"hologram>=0.0.14,<=0.0.16",
|
||||
"isodate>=0.6,<0.7",
|
||||
"logbook>=1.5,<1.6",
|
||||
"mashumaro[msgpack]==3.3.1",
|
||||
"minimal-snowplow-tracker==0.0.2",
|
||||
"networkx>=2.3,<2.8.1;python_version<'3.8'",
|
||||
"networkx>=2.3,<3;python_version>='3.8'",
|
||||
"packaging>=20.9,<22.0",
|
||||
"sqlparse>=0.2.3,<0.5",
|
||||
"packaging>20.9",
|
||||
"sqlparse>=0.2.3,<0.4.4",
|
||||
"dbt-extractor~=0.4.1",
|
||||
"typing-extensions>=3.7.4",
|
||||
"werkzeug>=1,<3",
|
||||
"pathspec>=0.9,<0.11",
|
||||
"pytz>=2015.7",
|
||||
# the following are all to match snowflake-connector-python
|
||||
"requests<3.0.0",
|
||||
"idna>=2.5,<4",
|
||||
|
||||
@@ -17,7 +17,6 @@ pytest-dotenv
|
||||
pytest-logbook
|
||||
pytest-mock
|
||||
pytest-xdist
|
||||
pytz
|
||||
sphinx
|
||||
tox>=3.13
|
||||
twine
|
||||
|
||||
@@ -14,12 +14,12 @@ FROM --platform=$build_for python:3.10.7-slim-bullseye as base
|
||||
# N.B. The refs updated automagically every release via bumpversion
|
||||
# N.B. dbt-postgres is currently found in the core codebase so a value of dbt-core@<some_version> is correct
|
||||
|
||||
ARG dbt_core_ref=dbt-core@v1.5.0a1
|
||||
ARG dbt_postgres_ref=dbt-core@v1.5.0a1
|
||||
ARG dbt_redshift_ref=dbt-redshift@v1.5.0a1
|
||||
ARG dbt_bigquery_ref=dbt-bigquery@v1.5.0a1
|
||||
ARG dbt_snowflake_ref=dbt-snowflake@v1.5.0a1
|
||||
ARG dbt_spark_ref=dbt-spark@v1.5.0a1
|
||||
ARG dbt_core_ref=dbt-core@v1.4.5
|
||||
ARG dbt_postgres_ref=dbt-core@v1.4.5
|
||||
ARG dbt_redshift_ref=dbt-redshift@v1.4.0
|
||||
ARG dbt_bigquery_ref=dbt-bigquery@v1.4.0
|
||||
ARG dbt_snowflake_ref=dbt-snowflake@v1.4.0
|
||||
ARG dbt_spark_ref=dbt-spark@v1.4.0
|
||||
# special case args
|
||||
ARG dbt_spark_version=all
|
||||
ARG dbt_third_party
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "1.5.0a1"
|
||||
version = "1.4.5"
|
||||
|
||||
@@ -41,7 +41,7 @@ def _dbt_psycopg2_name():
|
||||
|
||||
|
||||
package_name = "dbt-postgres"
|
||||
package_version = "1.5.0a1"
|
||||
package_version = "1.4.5"
|
||||
description = """The postgres adapter plugin for dbt (data build tool)"""
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -6,6 +6,6 @@ namespace_packages = true
|
||||
|
||||
[tool.black]
|
||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||
force-exclude = 'test/'
|
||||
force-exclude = 'test'
|
||||
line-length = 99
|
||||
target-version = ['py38']
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "CatalogArtifact(metadata: dbt.contracts.results.CatalogMetadata, nodes: Dict[str, dbt.contracts.results.CatalogTable], sources: Dict[str, dbt.contracts.results.CatalogTable], errors: Optional[List[str]] = None, _compile_results: Optional[Any] = None)",
|
||||
"description": "CatalogArtifact(metadata: dbt.contracts.results.CatalogMetadata, nodes: Dict[str, dbt.contracts.results.CatalogTable], sources: Dict[str, dbt.contracts.results.CatalogTable], errors: Union[List[str], NoneType] = None, _compile_results: Union[Any, NoneType] = None)",
|
||||
"definitions": {
|
||||
"CatalogMetadata": {
|
||||
"type": "object",
|
||||
@@ -48,12 +48,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.789289Z"
|
||||
"default": "2022-04-15T20:38:22.701177Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -64,7 +64,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -75,7 +75,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "CatalogMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "CatalogMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"CatalogTable": {
|
||||
"type": "object",
|
||||
@@ -112,7 +112,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "CatalogTable(metadata: dbt.contracts.results.TableMetadata, columns: Dict[str, dbt.contracts.results.ColumnMetadata], stats: Dict[str, dbt.contracts.results.StatsItem], unique_id: Optional[str] = None)"
|
||||
"description": "CatalogTable(metadata: dbt.contracts.results.TableMetadata, columns: Dict[str, dbt.contracts.results.ColumnMetadata], stats: Dict[str, dbt.contracts.results.StatsItem], unique_id: Union[str, NoneType] = None)"
|
||||
},
|
||||
"TableMetadata": {
|
||||
"type": "object",
|
||||
@@ -163,7 +163,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "TableMetadata(type: str, schema: str, name: str, database: Optional[str] = None, comment: Optional[str] = None, owner: Optional[str] = None)"
|
||||
"description": "TableMetadata(type: str, schema: str, name: str, database: Union[str, NoneType] = None, comment: Union[str, NoneType] = None, owner: Union[str, NoneType] = None)"
|
||||
},
|
||||
"ColumnMetadata": {
|
||||
"type": "object",
|
||||
@@ -194,7 +194,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "ColumnMetadata(type: str, index: int, name: str, comment: Optional[str] = None)"
|
||||
"description": "ColumnMetadata(type: str, index: int, name: str, comment: Union[str, NoneType] = None)"
|
||||
},
|
||||
"StatsItem": {
|
||||
"type": "object",
|
||||
@@ -241,7 +241,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "StatsItem(id: str, label: str, value: Union[bool, str, float, NoneType], include: bool, description: Optional[str] = None)"
|
||||
"description": "StatsItem(id: str, label: str, value: Union[bool, str, float, NoneType], include: bool, description: Union[str, NoneType] = None)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
5984
schemas/dbt/manifest/v5.json
Normal file
5984
schemas/dbt/manifest/v5.json
Normal file
File diff suppressed because it is too large
Load Diff
6209
schemas/dbt/manifest/v6.json
Normal file
6209
schemas/dbt/manifest/v6.json
Normal file
File diff suppressed because it is too large
Load Diff
6575
schemas/dbt/manifest/v7.json
Normal file
6575
schemas/dbt/manifest/v7.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,6 @@
|
||||
"docs",
|
||||
"exposures",
|
||||
"metrics",
|
||||
"entities",
|
||||
"selectors"
|
||||
],
|
||||
"properties": {
|
||||
@@ -86,13 +85,6 @@
|
||||
},
|
||||
"description": "The metrics defined in the dbt project and its dependencies"
|
||||
},
|
||||
"entities": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Entity"
|
||||
},
|
||||
"description": "The entities defined in the dbt project and its dependencies"
|
||||
},
|
||||
"selectors": {
|
||||
"type": "object",
|
||||
"description": "The selectors defined in selectors.yml"
|
||||
@@ -181,7 +173,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], entities: Mapping[str, dbt.contracts.graph.nodes.Entity], selectors: Mapping[str, Any], disabled: Optional[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition]]]], parent_map: Optional[Dict[str, List[str]]], child_map: Optional[Dict[str, List[str]]])",
|
||||
"description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], selectors: Mapping[str, Any], disabled: Optional[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition]]]], parent_map: Optional[Dict[str, List[str]]], child_map: Optional[Dict[str, List[str]]])",
|
||||
"definitions": {
|
||||
"ManifestMetadata": {
|
||||
"type": "object",
|
||||
@@ -193,12 +185,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.4.1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.790304Z"
|
||||
"default": "2023-02-09T10:04:47.350768Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -209,7 +201,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "f795bc66-f417-4007-af6e-f2e513d33790"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -414,7 +406,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.792257
|
||||
"default": 1675937087.353436
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -468,16 +460,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -522,7 +504,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "AnalysisNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "AnalysisNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"FileHash": {
|
||||
"type": "object",
|
||||
@@ -971,7 +953,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.79368
|
||||
"default": 1675937087.355371
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1025,16 +1007,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1079,7 +1051,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SingularTestNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "SingularTestNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"TestConfig": {
|
||||
"type": "object",
|
||||
@@ -1340,7 +1312,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.795094
|
||||
"default": 1675937087.356482
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1394,16 +1366,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1458,7 +1420,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "HookNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, index: Optional[int] = None)"
|
||||
"description": "HookNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, index: Optional[int] = None)"
|
||||
},
|
||||
"ModelNode": {
|
||||
"type": "object",
|
||||
@@ -1607,7 +1569,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.7959611
|
||||
"default": 1675937087.357701
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1661,16 +1623,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1715,7 +1667,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "ModelNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "ModelNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"RPCNode": {
|
||||
"type": "object",
|
||||
@@ -1864,7 +1816,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.796774
|
||||
"default": 1675937087.358761
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1918,16 +1870,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -1972,7 +1914,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "RPCNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "RPCNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"SqlNode": {
|
||||
"type": "object",
|
||||
@@ -2121,7 +2063,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.797567
|
||||
"default": 1675937087.359803
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2175,16 +2117,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -2229,7 +2161,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SqlNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "SqlNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"GenericTestNode": {
|
||||
"type": "object",
|
||||
@@ -2374,7 +2306,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.79852
|
||||
"default": 1675937087.361009
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2428,16 +2360,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -2502,7 +2424,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "GenericTestNode(test_metadata: dbt.contracts.graph.nodes.TestMetadata, database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, column_name: Optional[str] = None, file_key_name: Optional[str] = None)"
|
||||
"description": "GenericTestNode(test_metadata: dbt.contracts.graph.nodes.TestMetadata, database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.TestConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None, column_name: Optional[str] = None, file_key_name: Optional[str] = None)"
|
||||
},
|
||||
"TestMetadata": {
|
||||
"type": "object",
|
||||
@@ -2655,7 +2577,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.79998
|
||||
"default": 1675937087.364386
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2709,16 +2631,6 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
@@ -2763,7 +2675,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SnapshotNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
"description": "SnapshotNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', language: str = 'sql', refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Optional[str] = None, compiled: bool = False, compiled_code: Optional[str] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Optional[str] = None)"
|
||||
},
|
||||
"SnapshotConfig": {
|
||||
"type": "object",
|
||||
@@ -3118,7 +3030,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.801306
|
||||
"default": 1675937087.366245
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -3147,10 +3059,16 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/MacroDependsOn",
|
||||
"default": {
|
||||
"macros": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SeedNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', root_path: Optional[str] = None)"
|
||||
"description": "SeedNode(database: Optional[str], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, build_path: Optional[str] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Optional[str] = None, raw_code: str = '', root_path: Optional[str] = None, depends_on: dbt.contracts.graph.nodes.MacroDependsOn = <factory>)"
|
||||
},
|
||||
"SeedConfig": {
|
||||
"type": "object",
|
||||
@@ -3317,6 +3235,21 @@
|
||||
"additionalProperties": true,
|
||||
"description": "SeedConfig(_extra: Dict[str, Any] = <factory>, enabled: bool = True, alias: Optional[str] = None, schema: Optional[str] = None, database: Optional[str] = None, tags: Union[List[str], str] = <factory>, meta: Dict[str, Any] = <factory>, materialized: str = 'seed', incremental_strategy: Optional[str] = None, persist_docs: Dict[str, Any] = <factory>, post_hook: List[dbt.contracts.graph.model_config.Hook] = <factory>, pre_hook: List[dbt.contracts.graph.model_config.Hook] = <factory>, quoting: Dict[str, Any] = <factory>, column_types: Dict[str, Any] = <factory>, full_refresh: Optional[bool] = None, unique_key: Union[str, List[str], NoneType] = None, on_schema_change: Optional[str] = 'ignore', grants: Dict[str, Any] = <factory>, packages: List[str] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, quote_columns: Optional[bool] = None)"
|
||||
},
|
||||
"MacroDependsOn": {
|
||||
"type": "object",
|
||||
"required": [],
|
||||
"properties": {
|
||||
"macros": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Used only in the Macro class"
|
||||
},
|
||||
"SourceDefinition": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -3483,7 +3416,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.802621
|
||||
"default": 1675937087.368067
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
@@ -3593,12 +3526,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.4.1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.787436Z"
|
||||
"default": "2023-02-09T10:04:47.347023Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -3609,7 +3542,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "f795bc66-f417-4007-af6e-f2e513d33790"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -3620,7 +3553,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.4.1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"SourceFreshnessRuntimeError": {
|
||||
"type": "object",
|
||||
@@ -3962,7 +3895,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.8031092
|
||||
"default": 1675937087.368656
|
||||
},
|
||||
"supported_languages": {
|
||||
"oneOf": [
|
||||
@@ -3985,21 +3918,6 @@
|
||||
"additionalProperties": false,
|
||||
"description": "Macro(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, macro_sql: str, depends_on: dbt.contracts.graph.nodes.MacroDependsOn = <factory>, description: str = '', meta: Dict[str, Any] = <factory>, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Optional[str] = None, arguments: List[dbt.contracts.graph.unparsed.MacroArgument] = <factory>, created_at: float = <factory>, supported_languages: Optional[List[dbt.node_types.ModelLanguage]] = None)"
|
||||
},
|
||||
"MacroDependsOn": {
|
||||
"type": "object",
|
||||
"required": [],
|
||||
"properties": {
|
||||
"macros": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Used only in the Macro class"
|
||||
},
|
||||
"MacroArgument": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -4218,23 +4136,13 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.8040562
|
||||
"default": 1675937087.369866
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Exposure(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], type: dbt.contracts.graph.unparsed.ExposureType, owner: dbt.contracts.graph.unparsed.ExposureOwner, description: str = '', label: Optional[str] = None, maturity: Optional[dbt.contracts.graph.unparsed.MaturityType] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.ExposureConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, url: Optional[str] = None, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
"description": "Exposure(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], type: dbt.contracts.graph.unparsed.ExposureType, owner: dbt.contracts.graph.unparsed.ExposureOwner, description: str = '', label: Optional[str] = None, maturity: Optional[dbt.contracts.graph.unparsed.MaturityType] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.ExposureConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, url: Optional[str] = None, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
},
|
||||
"ExposureOwner": {
|
||||
"type": "object",
|
||||
@@ -4445,23 +4353,13 @@
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.804972
|
||||
"default": 1675937087.371092
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Metric(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], description: str, label: str, calculation_method: str, expression: str, filters: List[dbt.contracts.graph.unparsed.MetricFilter], time_grains: List[str], dimensions: List[str], timestamp: Optional[str] = None, window: Optional[dbt.contracts.graph.unparsed.MetricTime] = None, model: Optional[str] = None, model_unique_id: Optional[str] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.MetricConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, sources: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
"description": "Metric(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], description: str, label: str, calculation_method: str, expression: str, filters: List[dbt.contracts.graph.unparsed.MetricFilter], time_grains: List[str], dimensions: List[str], timestamp: Optional[str] = None, window: Optional[dbt.contracts.graph.unparsed.MetricTime] = None, model: Optional[str] = None, model_unique_id: Optional[str] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.MetricConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, sources: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
},
|
||||
"MetricFilter": {
|
||||
"type": "object",
|
||||
@@ -4529,148 +4427,6 @@
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"description": "MetricConfig(_extra: Dict[str, Any] = <factory>, enabled: bool = True)"
|
||||
},
|
||||
"Entity": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"resource_type",
|
||||
"package_name",
|
||||
"path",
|
||||
"original_file_path",
|
||||
"unique_id",
|
||||
"fqn",
|
||||
"model",
|
||||
"description",
|
||||
"dimensions"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"resource_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"entity"
|
||||
]
|
||||
},
|
||||
"package_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"original_file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"unique_id": {
|
||||
"type": "string"
|
||||
},
|
||||
"fqn": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"model": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"dimensions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"model_unique_id": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"meta": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"config": {
|
||||
"$ref": "#/definitions/EntityConfig",
|
||||
"default": {
|
||||
"enabled": true
|
||||
}
|
||||
},
|
||||
"unrendered_config": {
|
||||
"type": "object",
|
||||
"default": {}
|
||||
},
|
||||
"sources": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
"macros": [],
|
||||
"nodes": []
|
||||
}
|
||||
},
|
||||
"refs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1674510977.805523
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Entity(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], model: str, description: str, dimensions: List[str], model_unique_id: Optional[str] = None, meta: Dict[str, Any] = <factory>, tags: List[str] = <factory>, config: dbt.contracts.graph.model_config.EntityConfig = <factory>, unrendered_config: Dict[str, Any] = <factory>, sources: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[List[str]] = <factory>, entities: List[List[str]] = <factory>, created_at: float = <factory>)"
|
||||
},
|
||||
"EntityConfig": {
|
||||
"type": "object",
|
||||
"required": [],
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"description": "EntityConfig(_extra: Dict[str, Any] = <factory>, enabled: bool = True)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -37,12 +37,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.788708Z"
|
||||
"default": "2022-04-15T20:38:22.700175Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -53,7 +53,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -64,7 +64,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "BaseArtifactMetadata(dbt_schema_version: str, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "BaseArtifactMetadata(dbt_schema_version: str, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"RunResultOutput": {
|
||||
"type": "object",
|
||||
@@ -148,7 +148,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "RunResultOutput(status: Union[dbt.contracts.results.RunStatus, dbt.contracts.results.TestStatus, dbt.contracts.results.FreshnessStatus], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float, adapter_response: Dict[str, Any], message: Optional[str], failures: Optional[int], unique_id: str)"
|
||||
"description": "RunResultOutput(status: Union[dbt.contracts.results.RunStatus, dbt.contracts.results.TestStatus, dbt.contracts.results.FreshnessStatus], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float, adapter_response: Dict[str, Any], message: Union[str, NoneType], failures: Union[int, NoneType], unique_id: str)"
|
||||
},
|
||||
"TimingInfo": {
|
||||
"type": "object",
|
||||
@@ -183,7 +183,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "TimingInfo(name: str, started_at: Optional[datetime.datetime] = None, completed_at: Optional[datetime.datetime] = None)"
|
||||
"description": "TimingInfo(name: str, started_at: Union[datetime.datetime, NoneType] = None, completed_at: Union[datetime.datetime, NoneType] = None)"
|
||||
},
|
||||
"FreshnessMetadata": {
|
||||
"type": "object",
|
||||
@@ -195,12 +195,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.787436Z"
|
||||
"default": "2022-04-15T20:38:22.697740Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -211,7 +211,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -222,7 +222,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"SourceFreshnessRuntimeError": {
|
||||
"type": "object",
|
||||
@@ -361,7 +361,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessThreshold(warn_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, error_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, filter: Optional[str] = None)"
|
||||
"description": "FreshnessThreshold(warn_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, error_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, filter: Union[str, NoneType] = None)"
|
||||
},
|
||||
"Time": {
|
||||
"type": "object",
|
||||
@@ -394,7 +394,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Time(count: Optional[int] = None, period: Optional[dbt.contracts.graph.unparsed.TimePeriod] = None)"
|
||||
"description": "Time(count: Union[int, NoneType] = None, period: Union[dbt.contracts.graph.unparsed.TimePeriod, NoneType] = None)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -39,12 +39,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.5.0a1"
|
||||
"default": "1.2.0a1"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-01-23T21:56:17.787436Z"
|
||||
"default": "2022-04-15T20:38:22.697740Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -55,7 +55,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "10c9c26b-6682-4d46-84d2-12f641a070e5"
|
||||
"default": "34abf75e-59d3-442f-920c-fa3843d98014"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -66,7 +66,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Optional[str] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
"description": "FreshnessMetadata(dbt_schema_version: str = <factory>, dbt_version: str = '1.2.0a1', generated_at: datetime.datetime = <factory>, invocation_id: Union[str, NoneType] = <factory>, env: Dict[str, str] = <factory>)"
|
||||
},
|
||||
"SourceFreshnessRuntimeError": {
|
||||
"type": "object",
|
||||
@@ -205,7 +205,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "FreshnessThreshold(warn_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, error_after: Optional[dbt.contracts.graph.unparsed.Time] = <factory>, filter: Optional[str] = None)"
|
||||
"description": "FreshnessThreshold(warn_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, error_after: Union[dbt.contracts.graph.unparsed.Time, NoneType] = <factory>, filter: Union[str, NoneType] = None)"
|
||||
},
|
||||
"Time": {
|
||||
"type": "object",
|
||||
@@ -238,7 +238,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "Time(count: Optional[int] = None, period: Optional[dbt.contracts.graph.unparsed.TimePeriod] = None)"
|
||||
"description": "Time(count: Union[int, NoneType] = None, period: Union[dbt.contracts.graph.unparsed.TimePeriod, NoneType] = None)"
|
||||
},
|
||||
"TimingInfo": {
|
||||
"type": "object",
|
||||
@@ -273,7 +273,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "TimingInfo(name: str, started_at: Optional[datetime.datetime] = None, completed_at: Optional[datetime.datetime] = None)"
|
||||
"description": "TimingInfo(name: str, started_at: Union[datetime.datetime, NoneType] = None, completed_at: Union[datetime.datetime, NoneType] = None)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
6
scripts/env-setup.sh
Normal file
6
scripts/env-setup.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
# Set environment variables required for integration tests
|
||||
echo "DBT_INVOCATION_ENV=github-actions" >> $GITHUB_ENV
|
||||
echo "DBT_TEST_USER_1=dbt_test_user_1" >> $GITHUB_ENV
|
||||
echo "DBT_TEST_USER_2=dbt_test_user_2" >> $GITHUB_ENV
|
||||
echo "DBT_TEST_USER_3=dbt_test_user_3" >> $GITHUB_ENV
|
||||
@@ -0,0 +1,9 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
sort = 'first_name',
|
||||
dist = 'first_name'
|
||||
)
|
||||
}}
|
||||
|
||||
select * from {{ this.schema }}.seed
|
||||
110
test/integration/018_adapter_ddl_tests/seed.sql
Normal file
110
test/integration/018_adapter_ddl_tests/seed.sql
Normal file
@@ -0,0 +1,110 @@
|
||||
create table {schema}.seed (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
first_name VARCHAR(50),
|
||||
last_name VARCHAR(50),
|
||||
email VARCHAR(50),
|
||||
gender VARCHAR(50),
|
||||
ip_address VARCHAR(20)
|
||||
);
|
||||
|
||||
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jack', 'Hunter', 'jhunter0@pbs.org', 'Male', '59.80.20.168');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Kathryn', 'Walker', 'kwalker1@ezinearticles.com', 'Female', '194.121.179.35');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Gerald', 'Ryan', 'gryan2@com.com', 'Male', '11.3.212.243');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Bonnie', 'Spencer', 'bspencer3@ameblo.jp', 'Female', '216.32.196.175');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Harold', 'Taylor', 'htaylor4@people.com.cn', 'Male', '253.10.246.136');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jacqueline', 'Griffin', 'jgriffin5@t.co', 'Female', '16.13.192.220');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Wanda', 'Arnold', 'warnold6@google.nl', 'Female', '232.116.150.64');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Craig', 'Ortiz', 'cortiz7@sciencedaily.com', 'Male', '199.126.106.13');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Gary', 'Day', 'gday8@nih.gov', 'Male', '35.81.68.186');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Rose', 'Wright', 'rwright9@yahoo.co.jp', 'Female', '236.82.178.100');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Raymond', 'Kelley', 'rkelleya@fc2.com', 'Male', '213.65.166.67');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Gerald', 'Robinson', 'grobinsonb@disqus.com', 'Male', '72.232.194.193');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Mildred', 'Martinez', 'mmartinezc@samsung.com', 'Female', '198.29.112.5');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Dennis', 'Arnold', 'darnoldd@google.com', 'Male', '86.96.3.250');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Judy', 'Gray', 'jgraye@opensource.org', 'Female', '79.218.162.245');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Theresa', 'Garza', 'tgarzaf@epa.gov', 'Female', '21.59.100.54');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Gerald', 'Robertson', 'grobertsong@csmonitor.com', 'Male', '131.134.82.96');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Philip', 'Hernandez', 'phernandezh@adobe.com', 'Male', '254.196.137.72');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Julia', 'Gonzalez', 'jgonzalezi@cam.ac.uk', 'Female', '84.240.227.174');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Andrew', 'Davis', 'adavisj@patch.com', 'Male', '9.255.67.25');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Kimberly', 'Harper', 'kharperk@foxnews.com', 'Female', '198.208.120.253');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Mark', 'Martin', 'mmartinl@marketwatch.com', 'Male', '233.138.182.153');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Cynthia', 'Ruiz', 'cruizm@google.fr', 'Female', '18.178.187.201');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Samuel', 'Carroll', 'scarrolln@youtu.be', 'Male', '128.113.96.122');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jennifer', 'Larson', 'jlarsono@vinaora.com', 'Female', '98.234.85.95');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Ashley', 'Perry', 'aperryp@rakuten.co.jp', 'Female', '247.173.114.52');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Howard', 'Rodriguez', 'hrodriguezq@shutterfly.com', 'Male', '231.188.95.26');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Amy', 'Brooks', 'abrooksr@theatlantic.com', 'Female', '141.199.174.118');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Louise', 'Warren', 'lwarrens@adobe.com', 'Female', '96.105.158.28');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Tina', 'Watson', 'twatsont@myspace.com', 'Female', '251.142.118.177');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Janice', 'Kelley', 'jkelleyu@creativecommons.org', 'Female', '239.167.34.233');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Terry', 'Mccoy', 'tmccoyv@bravesites.com', 'Male', '117.201.183.203');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jeffrey', 'Morgan', 'jmorganw@surveymonkey.com', 'Male', '78.101.78.149');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Louis', 'Harvey', 'lharveyx@sina.com.cn', 'Male', '51.50.0.167');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Philip', 'Miller', 'pmillery@samsung.com', 'Male', '103.255.222.110');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Willie', 'Marshall', 'wmarshallz@ow.ly', 'Male', '149.219.91.68');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Patrick', 'Lopez', 'plopez10@redcross.org', 'Male', '250.136.229.89');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Adam', 'Jenkins', 'ajenkins11@harvard.edu', 'Male', '7.36.112.81');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Benjamin', 'Cruz', 'bcruz12@linkedin.com', 'Male', '32.38.98.15');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Ruby', 'Hawkins', 'rhawkins13@gmpg.org', 'Female', '135.171.129.255');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Carlos', 'Barnes', 'cbarnes14@a8.net', 'Male', '240.197.85.140');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Ruby', 'Griffin', 'rgriffin15@bravesites.com', 'Female', '19.29.135.24');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Sean', 'Mason', 'smason16@icq.com', 'Male', '159.219.155.249');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Anthony', 'Payne', 'apayne17@utexas.edu', 'Male', '235.168.199.218');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Steve', 'Cruz', 'scruz18@pcworld.com', 'Male', '238.201.81.198');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Anthony', 'Garcia', 'agarcia19@flavors.me', 'Male', '25.85.10.18');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Doris', 'Lopez', 'dlopez1a@sphinn.com', 'Female', '245.218.51.238');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Susan', 'Nichols', 'snichols1b@freewebs.com', 'Female', '199.99.9.61');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Wanda', 'Ferguson', 'wferguson1c@yahoo.co.jp', 'Female', '236.241.135.21');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Andrea', 'Pierce', 'apierce1d@google.co.uk', 'Female', '132.40.10.209');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Lawrence', 'Phillips', 'lphillips1e@jugem.jp', 'Male', '72.226.82.87');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Judy', 'Gilbert', 'jgilbert1f@multiply.com', 'Female', '196.250.15.142');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Eric', 'Williams', 'ewilliams1g@joomla.org', 'Male', '222.202.73.126');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Ralph', 'Romero', 'rromero1h@sogou.com', 'Male', '123.184.125.212');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jean', 'Wilson', 'jwilson1i@ocn.ne.jp', 'Female', '176.106.32.194');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Lori', 'Reynolds', 'lreynolds1j@illinois.edu', 'Female', '114.181.203.22');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Donald', 'Moreno', 'dmoreno1k@bbc.co.uk', 'Male', '233.249.97.60');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Steven', 'Berry', 'sberry1l@eepurl.com', 'Male', '186.193.50.50');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Theresa', 'Shaw', 'tshaw1m@people.com.cn', 'Female', '120.37.71.222');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('John', 'Stephens', 'jstephens1n@nationalgeographic.com', 'Male', '191.87.127.115');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Richard', 'Jacobs', 'rjacobs1o@state.tx.us', 'Male', '66.210.83.155');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Andrew', 'Lawson', 'alawson1p@over-blog.com', 'Male', '54.98.36.94');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Peter', 'Morgan', 'pmorgan1q@rambler.ru', 'Male', '14.77.29.106');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Nicole', 'Garrett', 'ngarrett1r@zimbio.com', 'Female', '21.127.74.68');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Joshua', 'Kim', 'jkim1s@edublogs.org', 'Male', '57.255.207.41');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Ralph', 'Roberts', 'rroberts1t@people.com.cn', 'Male', '222.143.131.109');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('George', 'Montgomery', 'gmontgomery1u@smugmug.com', 'Male', '76.75.111.77');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Gerald', 'Alvarez', 'galvarez1v@flavors.me', 'Male', '58.157.186.194');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Donald', 'Olson', 'dolson1w@whitehouse.gov', 'Male', '69.65.74.135');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Carlos', 'Morgan', 'cmorgan1x@pbs.org', 'Male', '96.20.140.87');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Aaron', 'Stanley', 'astanley1y@webnode.com', 'Male', '163.119.217.44');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Virginia', 'Long', 'vlong1z@spiegel.de', 'Female', '204.150.194.182');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Robert', 'Berry', 'rberry20@tripadvisor.com', 'Male', '104.19.48.241');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Antonio', 'Brooks', 'abrooks21@unesco.org', 'Male', '210.31.7.24');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Ruby', 'Garcia', 'rgarcia22@ovh.net', 'Female', '233.218.162.214');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jack', 'Hanson', 'jhanson23@blogtalkradio.com', 'Male', '31.55.46.199');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Kathryn', 'Nelson', 'knelson24@walmart.com', 'Female', '14.189.146.41');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jason', 'Reed', 'jreed25@printfriendly.com', 'Male', '141.189.89.255');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('George', 'Coleman', 'gcoleman26@people.com.cn', 'Male', '81.189.221.144');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Rose', 'King', 'rking27@ucoz.com', 'Female', '212.123.168.231');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Johnny', 'Holmes', 'jholmes28@boston.com', 'Male', '177.3.93.188');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Katherine', 'Gilbert', 'kgilbert29@altervista.org', 'Female', '199.215.169.61');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Joshua', 'Thomas', 'jthomas2a@ustream.tv', 'Male', '0.8.205.30');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Julie', 'Perry', 'jperry2b@opensource.org', 'Female', '60.116.114.192');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Richard', 'Perry', 'rperry2c@oracle.com', 'Male', '181.125.70.232');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Kenneth', 'Ruiz', 'kruiz2d@wikimedia.org', 'Male', '189.105.137.109');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jose', 'Morgan', 'jmorgan2e@webnode.com', 'Male', '101.134.215.156');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Donald', 'Campbell', 'dcampbell2f@goo.ne.jp', 'Male', '102.120.215.84');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Debra', 'Collins', 'dcollins2g@uol.com.br', 'Female', '90.13.153.235');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Jesse', 'Johnson', 'jjohnson2h@stumbleupon.com', 'Male', '225.178.125.53');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Elizabeth', 'Stone', 'estone2i@histats.com', 'Female', '123.184.126.221');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Angela', 'Rogers', 'arogers2j@goodreads.com', 'Female', '98.104.132.187');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Emily', 'Dixon', 'edixon2k@mlb.com', 'Female', '39.190.75.57');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Albert', 'Scott', 'ascott2l@tinypic.com', 'Male', '40.209.13.189');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Barbara', 'Peterson', 'bpeterson2m@ow.ly', 'Female', '75.249.136.180');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Adam', 'Greene', 'agreene2n@fastcompany.com', 'Male', '184.173.109.144');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Earl', 'Sanders', 'esanders2o@hc360.com', 'Male', '247.34.90.117');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Angela', 'Brooks', 'abrooks2p@mtv.com', 'Female', '10.63.249.126');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Harold', 'Foster', 'hfoster2q@privacy.gov.au', 'Male', '139.214.40.244');
|
||||
insert into {schema}.seed (first_name, last_name, email, gender, ip_address) values ('Carl', 'Meyer', 'cmeyer2r@disqus.com', 'Male', '204.117.7.88');
|
||||
23
test/integration/018_adapter_ddl_tests/test_adapter_ddl.py
Normal file
23
test/integration/018_adapter_ddl_tests/test_adapter_ddl.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
class TestAdapterDDL(DBTIntegrationTest):
|
||||
|
||||
def setUp(self):
|
||||
DBTIntegrationTest.setUp(self)
|
||||
|
||||
self.run_sql_file("seed.sql")
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "adaper_ddl_018"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_sort_and_dist_keys_are_nops_on_postgres(self):
|
||||
results = self.run_dbt(['run'])
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
self.assertTablesEqual("seed","materialized")
|
||||
10
test/integration/022_timezones_tests/models/timezones.sql
Normal file
10
test/integration/022_timezones_tests/models/timezones.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
{{
|
||||
config(
|
||||
materialized='table'
|
||||
)
|
||||
}}
|
||||
|
||||
select
|
||||
'{{ run_started_at.astimezone(modules.pytz.timezone("America/New_York")) }}' as run_started_at_est,
|
||||
'{{ run_started_at }}' as run_started_at_utc
|
||||
52
test/integration/022_timezones_tests/test_timezones.py
Normal file
52
test/integration/022_timezones_tests/test_timezones.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from freezegun import freeze_time
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
|
||||
class TestTimezones(DBTIntegrationTest):
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return "timezones_022"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def profile_config(self):
|
||||
return {
|
||||
'test': {
|
||||
'outputs': {
|
||||
'dev': {
|
||||
'type': 'postgres',
|
||||
'threads': 1,
|
||||
'host': self.database_host,
|
||||
'port': 5432,
|
||||
'user': "root",
|
||||
'pass': "password",
|
||||
'dbname': 'dbt',
|
||||
'schema': self.unique_schema()
|
||||
},
|
||||
},
|
||||
'target': 'dev'
|
||||
}
|
||||
}
|
||||
|
||||
@property
|
||||
def query(self):
|
||||
return """
|
||||
select
|
||||
run_started_at_est,
|
||||
run_started_at_utc
|
||||
from {schema}.timezones
|
||||
""".format(schema=self.unique_schema())
|
||||
|
||||
@freeze_time("2017-01-01 03:00:00", tz_offset=0)
|
||||
@use_profile('postgres')
|
||||
def test_postgres_run_started_at(self):
|
||||
results = self.run_dbt(['run'])
|
||||
self.assertEqual(len(results), 1)
|
||||
result = self.run_sql(self.query, fetch='all')[0]
|
||||
est, utc = result
|
||||
self.assertEqual(utc, '2017-01-01 03:00:00+00:00')
|
||||
self.assertEqual(est, '2016-12-31 22:00:00-05:00')
|
||||
@@ -0,0 +1,7 @@
|
||||
{{
|
||||
config(
|
||||
materialized = "view"
|
||||
)
|
||||
}}
|
||||
|
||||
select * from "{{ this.schema + 'z' }}"."external"
|
||||
@@ -0,0 +1,2 @@
|
||||
|
||||
select 1 as id
|
||||
@@ -0,0 +1,78 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
class TestExternalReference(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "external_reference_037"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.use_default_project()
|
||||
self.external_schema = self.unique_schema()+'z'
|
||||
self.run_sql(
|
||||
'create schema "{}"'.format(self.external_schema)
|
||||
)
|
||||
self.run_sql(
|
||||
'create table "{}"."external" (id integer)'
|
||||
.format(self.external_schema)
|
||||
)
|
||||
self.run_sql(
|
||||
'insert into "{}"."external" values (1), (2)'
|
||||
.format(self.external_schema)
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
# This has to happen before we drop the external schema, because
|
||||
# otherwise postgres hangs forever.
|
||||
self._drop_schemas()
|
||||
with self.get_connection():
|
||||
self._drop_schema_named(self.default_database, self.external_schema)
|
||||
super().tearDown()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test__postgres__external_reference(self):
|
||||
self.assertEqual(len(self.run_dbt()), 1)
|
||||
# running it again should succeed
|
||||
self.assertEqual(len(self.run_dbt()), 1)
|
||||
|
||||
|
||||
# The opposite of the test above -- check that external relations that
|
||||
# depend on a dbt model do not create issues with caching
|
||||
class TestExternalDependency(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "external_dependency_037"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "standalone_models"
|
||||
|
||||
def tearDown(self):
|
||||
# This has to happen before we drop the external schema, because
|
||||
# otherwise postgres hangs forever.
|
||||
self._drop_schemas()
|
||||
with self.get_connection():
|
||||
self._drop_schema_named(self.default_database, self.external_schema)
|
||||
super().tearDown()
|
||||
|
||||
@use_profile('postgres')
|
||||
def test__postgres__external_reference(self):
|
||||
self.assertEqual(len(self.run_dbt()), 1)
|
||||
|
||||
# create a view outside of the dbt schema that depends on this model
|
||||
self.external_schema = self.unique_schema()+'zz'
|
||||
self.run_sql(
|
||||
'create schema "{}"'.format(self.external_schema)
|
||||
)
|
||||
self.run_sql(
|
||||
'create view "{}"."external" as (select * from {}.my_model)'
|
||||
.format(self.external_schema, self.unique_schema())
|
||||
)
|
||||
|
||||
# running it again should succeed
|
||||
self.assertEqual(len(self.run_dbt()), 1)
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
|
||||
{#-- Verify that the config['alias'] key is present #}
|
||||
{% macro generate_alias_name(custom_alias_name, node) -%}
|
||||
{%- if custom_alias_name is none -%}
|
||||
{{ node.name }}
|
||||
{%- else -%}
|
||||
custom_{{ node.config['alias'] if 'alias' in node.config else '' | trim }}
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro string_literal(s) -%}
|
||||
{{ adapter.dispatch('string_literal', macro_namespace='test')(s) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__string_literal(s) %}
|
||||
'{{ s }}'::text
|
||||
{% endmacro %}
|
||||
17
test/integration/043_custom_aliases_tests/macros/macros.sql
Normal file
17
test/integration/043_custom_aliases_tests/macros/macros.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
|
||||
{% macro generate_alias_name(custom_alias_name, node) -%}
|
||||
{%- if custom_alias_name is none -%}
|
||||
{{ node.name }}
|
||||
{%- else -%}
|
||||
custom_{{ custom_alias_name | trim }}
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro string_literal(s) -%}
|
||||
{{ adapter.dispatch('string_literal', macro_namespace='test')(s) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__string_literal(s) %}
|
||||
'{{ s }}'::text
|
||||
{% endmacro %}
|
||||
@@ -0,0 +1,3 @@
|
||||
{{ config(materialized='table', alias='alias') }}
|
||||
|
||||
select {{ string_literal(this.name) }} as model_name
|
||||
@@ -0,0 +1,3 @@
|
||||
{{ config(materialized='table') }}
|
||||
|
||||
select {{ string_literal(this.name) }} as model_name
|
||||
15
test/integration/043_custom_aliases_tests/models/schema.yml
Normal file
15
test/integration/043_custom_aliases_tests/models/schema.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
version: 2
|
||||
|
||||
models:
|
||||
- name: model1
|
||||
columns:
|
||||
- name: model_name
|
||||
tests:
|
||||
- accepted_values:
|
||||
values: ['custom_alias']
|
||||
- name: model2
|
||||
columns:
|
||||
- name: model_name
|
||||
tests:
|
||||
- accepted_values:
|
||||
values: ['model2']
|
||||
@@ -0,0 +1,39 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
|
||||
class TestAliases(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "custom_aliases_043"
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
"macro-paths": ['macros'],
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_customer_alias_name(self):
|
||||
results = self.run_dbt(['run'])
|
||||
self.assertEqual(len(results), 2)
|
||||
self.run_dbt(['test'])
|
||||
|
||||
|
||||
class TestAliasesWithConfig(TestAliases):
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
"macro-paths": ['macros-configs'],
|
||||
}
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_customer_alias_name(self):
|
||||
results = self.run_dbt(['run'])
|
||||
self.assertEqual(len(results), 2)
|
||||
self.run_dbt(['test'])
|
||||
@@ -0,0 +1,4 @@
|
||||
-- Macro to override ref and always return the same result
|
||||
{% macro ref(modelname) %}
|
||||
{% do return(builtins.ref(modelname).replace_path(identifier='seed_2')) %}
|
||||
{% endmacro %}
|
||||
@@ -0,0 +1,3 @@
|
||||
select
|
||||
*
|
||||
from {{ ref('seed_1') }}
|
||||
4
test/integration/055_ref_override_tests/seeds/seed_1.csv
Normal file
4
test/integration/055_ref_override_tests/seeds/seed_1.csv
Normal file
@@ -0,0 +1,4 @@
|
||||
a,b
|
||||
1,2
|
||||
2,4
|
||||
3,6
|
||||
|
4
test/integration/055_ref_override_tests/seeds/seed_2.csv
Normal file
4
test/integration/055_ref_override_tests/seeds/seed_2.csv
Normal file
@@ -0,0 +1,4 @@
|
||||
a,b
|
||||
6,2
|
||||
12,4
|
||||
18,6
|
||||
|
30
test/integration/055_ref_override_tests/test_ref_override.py
Normal file
30
test/integration/055_ref_override_tests/test_ref_override.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from test.integration.base import DBTIntegrationTest, use_profile
|
||||
|
||||
|
||||
class TestRefOverride(DBTIntegrationTest):
|
||||
@property
|
||||
def schema(self):
|
||||
return "dbt_ref_override_055"
|
||||
|
||||
@property
|
||||
def project_config(self):
|
||||
return {
|
||||
'config-version': 2,
|
||||
'seed-paths': ['seeds'],
|
||||
"macro-paths": ["macros"],
|
||||
'seeds': {
|
||||
'quote_columns': False,
|
||||
},
|
||||
}
|
||||
|
||||
@property
|
||||
def models(self):
|
||||
return "models"
|
||||
|
||||
@use_profile('postgres')
|
||||
def test_postgres_ref_override(self):
|
||||
self.run_dbt(['seed'])
|
||||
self.run_dbt(['run'])
|
||||
# We want it to equal seed_2 and not seed_1. If it's
|
||||
# still pointing at seed_1 then the override hasn't worked.
|
||||
self.assertTablesEqual('ref_override', 'seed_2')
|
||||
@@ -1,8 +1,4 @@
|
||||
import pytest
|
||||
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
macros_sql = """
|
||||
{% macro test_array_results() %}
|
||||
|
||||
{% set sql %}
|
||||
@@ -18,16 +14,3 @@ macros_sql = """
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
|
||||
class TestTypes:
|
||||
@pytest.fixture(scope="class")
|
||||
def macros(self):
|
||||
return {
|
||||
"macros.sql": macros_sql,
|
||||
}
|
||||
|
||||
def test_nested_types(self, project):
|
||||
result = run_dbt(["run-operation", "test_array_results"])
|
||||
assert result.success
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user