forked from repo-mirrors/dbt-core
Compare commits
29 Commits
jerco/upda
...
remove-ipd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83c40d7fb5 | ||
|
|
6bfba6d116 | ||
|
|
d23d414dfe | ||
|
|
6e7e572e86 | ||
|
|
d57542307b | ||
|
|
7f2e9f94c0 | ||
|
|
27b2e9ac54 | ||
|
|
a6890609e0 | ||
|
|
3c30f96ab2 | ||
|
|
6fff2888d4 | ||
|
|
3f17044383 | ||
|
|
e1345d87cd | ||
|
|
6e9ff280e2 | ||
|
|
6defc86ef7 | ||
|
|
3fbb0a38ea | ||
|
|
d71385bdb6 | ||
|
|
c1925c67c2 | ||
|
|
54d186583b | ||
|
|
9026a0598c | ||
|
|
2d1215ed47 | ||
|
|
7f63c3d083 | ||
|
|
19c48e285e | ||
|
|
65f40b317e | ||
|
|
c94c891f73 | ||
|
|
fc45a51582 | ||
|
|
6042469c71 | ||
|
|
2584465169 | ||
|
|
9a81a4dfe3 | ||
|
|
b6a82446e5 |
@@ -1,18 +1,14 @@
|
||||
[bumpversion]
|
||||
current_version = 1.7.0a1
|
||||
current_version = 1.3.4
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
(?P<prerelease> # optional pre-release - ex: a1, b2, rc25
|
||||
(?P<prekind>a|b|rc) # pre-release type
|
||||
(?P<num>[\d]+) # pre-release version number
|
||||
(((?P<prekind>a|b|rc) # optional pre-release type
|
||||
?(?P<num>[\d]+?)) # optional pre-release version number
|
||||
\.?(?P<nightly>[a-z0-9]+\+[a-z]+)? # optional nightly release indicator
|
||||
)?
|
||||
( # optional nightly release indicator
|
||||
\.(?P<nightly>dev[0-9]+) # ex: .dev02142023
|
||||
)? # expected matches: `1.15.0`, `1.5.0a11`, `1.5.0a1.dev123`, `1.5.0.dev123457`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0`
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{prekind}{num}.{nightly}
|
||||
{major}.{minor}.{patch}.{nightly}
|
||||
{major}.{minor}.{patch}{prekind}{num}
|
||||
{major}.{minor}.{patch}
|
||||
commit = False
|
||||
|
||||
@@ -3,10 +3,6 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
|
||||
140
.changes/1.3.0.md
Normal file
140
.changes/1.3.0.md
Normal file
@@ -0,0 +1,140 @@
|
||||
## dbt-core 1.3.0 - October 12, 2022
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Renaming Metric Spec Attributes ([#5774](https://github.com/dbt-labs/dbt-core/issues/5774), [#5775](https://github.com/dbt-labs/dbt-core/pull/5775))
|
||||
|
||||
### Features
|
||||
|
||||
- Add `--defer` flag to dbt compile & dbt docs generate ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
||||
- Python model inital version ([#5261](https://github.com/dbt-labs/dbt-core/issues/5261), [#5421](https://github.com/dbt-labs/dbt-core/pull/5421))
|
||||
- allows user to include the file extension for .py models in the dbt run -m command. ([#5289](https://github.com/dbt-labs/dbt-core/issues/5289), [#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
||||
- Incremental materialization refactor and cleanup ([#5245](https://github.com/dbt-labs/dbt-core/issues/5245), [#5359](https://github.com/dbt-labs/dbt-core/pull/5359))
|
||||
- Python models can support incremental logic ([#0](https://github.com/dbt-labs/dbt-core/issues/0), [#35](https://github.com/dbt-labs/dbt-core/pull/35))
|
||||
- Add reusable function for retrying adapter connections. Utilize said function to add retries for Postgres (and Redshift). ([#5022](https://github.com/dbt-labs/dbt-core/issues/5022), [#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
||||
- merge_exclude_columns for incremental materialization ([#5260](https://github.com/dbt-labs/dbt-core/issues/5260), [#5457](https://github.com/dbt-labs/dbt-core/pull/5457))
|
||||
- add exponential backoff to connection retries on Postgres (and Redshift) ([#5502](https://github.com/dbt-labs/dbt-core/issues/5502), [#5503](https://github.com/dbt-labs/dbt-core/pull/5503))
|
||||
- use MethodName.File when value ends with .csv ([#5578](https://github.com/dbt-labs/dbt-core/issues/5578), [#5581](https://github.com/dbt-labs/dbt-core/pull/5581))
|
||||
- Make `docs` configurable in `dbt_project.yml` and add a `node_color` attribute to change the color of nodes in the DAG ([#5333](https://github.com/dbt-labs/dbt-core/issues/5333), [#5397](https://github.com/dbt-labs/dbt-core/pull/5397))
|
||||
- Adding ResolvedMetricReference helper functions and tests ([#5567](https://github.com/dbt-labs/dbt-core/issues/5567), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607))
|
||||
- Check dbt-core version requirements when installing Hub packages ([#5648](https://github.com/dbt-labs/dbt-core/issues/5648), [#5651](https://github.com/dbt-labs/dbt-core/pull/5651))
|
||||
- Search current working directory for `profiles.yml` ([#5411](https://github.com/dbt-labs/dbt-core/issues/5411), [#5717](https://github.com/dbt-labs/dbt-core/pull/5717))
|
||||
- Adding the `window` parameter to the metric spec. ([#5721](https://github.com/dbt-labs/dbt-core/issues/5721), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722))
|
||||
- Add invocation args dict to ProviderContext class ([#5524](https://github.com/dbt-labs/dbt-core/issues/5524), [#5782](https://github.com/dbt-labs/dbt-core/pull/5782))
|
||||
- Adds new cli framework ([#5526](https://github.com/dbt-labs/dbt-core/issues/5526), [#5647](https://github.com/dbt-labs/dbt-core/pull/5647))
|
||||
- Flags work with new Click CLI ([#5529](https://github.com/dbt-labs/dbt-core/issues/5529), [#5790](https://github.com/dbt-labs/dbt-core/pull/5790))
|
||||
- Add metadata env method to ProviderContext class ([#5522](https://github.com/dbt-labs/dbt-core/issues/5522), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794))
|
||||
- Array macros ([#5520](https://github.com/dbt-labs/dbt-core/issues/5520), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
||||
- Add enabled config to exposures and metrics ([#5422](https://github.com/dbt-labs/dbt-core/issues/5422), [#5815](https://github.com/dbt-labs/dbt-core/pull/5815))
|
||||
- Migrate dbt-utils current_timestamp macros into core + adapters ([#5521](https://github.com/dbt-labs/dbt-core/issues/5521), [#5838](https://github.com/dbt-labs/dbt-core/pull/5838))
|
||||
- add -fr flag shorthand ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879))
|
||||
- add type_boolean as a data type macro ([#5739](https://github.com/dbt-labs/dbt-core/issues/5739), [#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
||||
- Support .dbtignore in project root to ignore certain files being read by dbt ([#5733](https://github.com/dbt-labs/dbt-core/issues/5733), [#5897](https://github.com/dbt-labs/dbt-core/pull/5897))
|
||||
- This conditionally no-ops warehouse connection at compile depending on an env var, disabling introspection/queries during compilation only. This is a temporary solution to more complex permissions requirements for the semantic layer. ([#5936](https://github.com/dbt-labs/dbt-core/issues/5936), [#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Remove the default 256 characters limit on postgres character varying type when no limitation is set ([#5238](https://github.com/dbt-labs/dbt-core/issues/5238), [#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
||||
- Include schema file config in unrendered_config ([#5338](https://github.com/dbt-labs/dbt-core/issues/5338), [#5344](https://github.com/dbt-labs/dbt-core/pull/5344))
|
||||
- Add context to compilation errors generated while rendering generic test configuration values. ([#5294](https://github.com/dbt-labs/dbt-core/issues/5294), [#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
||||
- Resolves #5351 - Do not consider shorter varchar cols as schema changes ([#5351](https://github.com/dbt-labs/dbt-core/issues/5351), [#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
||||
- Rename try to strict for more intuitiveness ([#5475](https://github.com/dbt-labs/dbt-core/issues/5475), [#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
||||
- on_shchma_change fail verbosity enhancement ([#5504](https://github.com/dbt-labs/dbt-core/issues/5504), [#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
||||
- Ignore empty strings passed in as secrets ([#5312](https://github.com/dbt-labs/dbt-core/issues/5312), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518))
|
||||
- Fix handling of top-level exceptions ([#5564](https://github.com/dbt-labs/dbt-core/issues/5564), [#5560](https://github.com/dbt-labs/dbt-core/pull/5560))
|
||||
- Fix error rendering docs block in metrics description ([#5585](https://github.com/dbt-labs/dbt-core/issues/5585), [#5603](https://github.com/dbt-labs/dbt-core/pull/5603))
|
||||
- Extended validations for the project names ([#5379](https://github.com/dbt-labs/dbt-core/issues/5379), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
||||
- Use sys.exit instead of exit ([#5621](https://github.com/dbt-labs/dbt-core/issues/5621), [#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
||||
- Finishing logic upgrade to Redshift for name truncation collisions. ([#5586](https://github.com/dbt-labs/dbt-core/issues/5586), [#5656](https://github.com/dbt-labs/dbt-core/pull/5656))
|
||||
- multiple args for ref and source ([#5634](https://github.com/dbt-labs/dbt-core/issues/5634), [#5635](https://github.com/dbt-labs/dbt-core/pull/5635))
|
||||
- Fix Unexpected behavior when chaining methods on dbt-ref'ed/sourced dataframes ([#5646](https://github.com/dbt-labs/dbt-core/issues/5646), [#5677](https://github.com/dbt-labs/dbt-core/pull/5677))
|
||||
- Fix typos of comments in core/dbt/adapters/ ([#5690](https://github.com/dbt-labs/dbt-core/issues/5690), [#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
||||
- Include py.typed in MANIFEST.in. This enables packages that install dbt-core from pypi to use mypy. ([#5703](https://github.com/dbt-labs/dbt-core/issues/5703), [#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
||||
- Removal of all .coverage files when using make clean command ([#5633](https://github.com/dbt-labs/dbt-core/issues/5633), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
||||
- Remove temp files generated by unit tests ([#5631](https://github.com/dbt-labs/dbt-core/issues/5631), [#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
||||
- Fix warnings as errors during tests ([#5424](https://github.com/dbt-labs/dbt-core/issues/5424), [#5800](https://github.com/dbt-labs/dbt-core/pull/5800))
|
||||
- Prevent event_history from holding references ([#5848](https://github.com/dbt-labs/dbt-core/issues/5848), [#5858](https://github.com/dbt-labs/dbt-core/pull/5858))
|
||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992), [#5868](https://github.com/dbt-labs/dbt-core/pull/5868))
|
||||
- ConfigSelectorMethod should check for bools ([#5890](https://github.com/dbt-labs/dbt-core/issues/5890), [#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
||||
- shorthand for full refresh should be one character ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
||||
- Fix macro resolution order during static analysis for custom generic tests ([#5720](https://github.com/dbt-labs/dbt-core/issues/5720), [#5907](https://github.com/dbt-labs/dbt-core/pull/5907))
|
||||
- Fix race condition when invoking dbt via lib.py concurrently ([#5919](https://github.com/dbt-labs/dbt-core/issues/5919), [#5921](https://github.com/dbt-labs/dbt-core/pull/5921))
|
||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041), [#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
||||
|
||||
### Docs
|
||||
|
||||
- Update dependency inline-source from ^6.1.5 to ^7.2.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency jest from ^26.2.2 to ^28.1.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency underscore from ^1.9.0 to ^1.13.4 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency webpack-cli from ^3.3.12 to ^4.7.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency webpack-dev-server from ^3.1.11 to ^4.9.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Searches no longer require perfect matches, and instead consider each word individually. `my model` or `model my` will now find `my_model`, without the need for underscores ([dbt-docs/#143](https://github.com/dbt-labs/dbt-docs/issues/143), [dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
||||
- Support the renaming of SQL to code happening in dbt-core ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
||||
- Leverages `docs.node_color` from `dbt-core` to color nodes in the DAG ([dbt-docs/#44](https://github.com/dbt-labs/dbt-docs/issues/44), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- Refer to exposures by their label by default. ([dbt-docs/#306](https://github.com/dbt-labs/dbt-docs/issues/306), [dbt-docs/#307](https://github.com/dbt-labs/dbt-docs/pull/307))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Added language to tracked fields in run_model event ([#5571](https://github.com/dbt-labs/dbt-core/issues/5571), [#5469](https://github.com/dbt-labs/dbt-core/pull/5469))
|
||||
- Update mashumaro to 3.0.3 ([#4940](https://github.com/dbt-labs/dbt-core/issues/4940), [#5118](https://github.com/dbt-labs/dbt-core/pull/5118))
|
||||
- Add python incremental materialization test ([#0000](https://github.com/dbt-labs/dbt-core/issues/0000), [#5571](https://github.com/dbt-labs/dbt-core/pull/5571))
|
||||
- Save use of default env vars to manifest to enable partial parsing in those cases. ([#5155](https://github.com/dbt-labs/dbt-core/issues/5155), [#5589](https://github.com/dbt-labs/dbt-core/pull/5589))
|
||||
- add more information to log line interop test failures ([#5658](https://github.com/dbt-labs/dbt-core/issues/5658), [#5659](https://github.com/dbt-labs/dbt-core/pull/5659))
|
||||
- Add supported languages to materializations ([#5569](https://github.com/dbt-labs/dbt-core/issues/5569), [#5695](https://github.com/dbt-labs/dbt-core/pull/5695))
|
||||
- Migrate integration test 014 but also fix the snapshot hard delete test's timezone logic and force all integration tests to run flags.set_from_args to force environment variables are accessible to all integration test threads. ([#5760](https://github.com/dbt-labs/dbt-core/issues/5760), [#5760](https://github.com/dbt-labs/dbt-core/pull/5760))
|
||||
- Support dbt-metrics compilation by rebuilding flat_graph ([#5525](https://github.com/dbt-labs/dbt-core/issues/5525), [#5786](https://github.com/dbt-labs/dbt-core/pull/5786))
|
||||
- Reworking the way we define the window attribute of metrics to match freshness tests ([#5722](https://github.com/dbt-labs/dbt-core/issues/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793))
|
||||
- Add PythonJobHelper base class in core and add more type checking ([#5802](https://github.com/dbt-labs/dbt-core/issues/5802), [#5802](https://github.com/dbt-labs/dbt-core/pull/5802))
|
||||
- The link did not go to the anchor directly, now it does ([#5813](https://github.com/dbt-labs/dbt-core/issues/5813), [#5814](https://github.com/dbt-labs/dbt-core/pull/5814))
|
||||
- remove key as reserved keyword from test_bool_or ([#5817](https://github.com/dbt-labs/dbt-core/issues/5817), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818))
|
||||
- Convert default selector tests to pytest ([#5728](https://github.com/dbt-labs/dbt-core/issues/5728), [#5820](https://github.com/dbt-labs/dbt-core/pull/5820))
|
||||
- Compatibiltiy for metric attribute renaming ([#5807](https://github.com/dbt-labs/dbt-core/issues/5807), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825))
|
||||
- remove source quoting setting in adapter tests ([#5836](https://github.com/dbt-labs/dbt-core/issues/5836), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
||||
- Add name validation for metrics ([#5456](https://github.com/dbt-labs/dbt-core/issues/5456), [#5841](https://github.com/dbt-labs/dbt-core/pull/5841))
|
||||
- Validate exposure name and add label ([#5606](https://github.com/dbt-labs/dbt-core/issues/5606), [#5844](https://github.com/dbt-labs/dbt-core/pull/5844))
|
||||
- Adding validation for metric expression attribute ([#5871](https://github.com/dbt-labs/dbt-core/issues/5871), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
||||
- Profiling and Adapter Management work with Click CLI ([#5531](https://github.com/dbt-labs/dbt-core/issues/5531), [#5892](https://github.com/dbt-labs/dbt-core/pull/5892))
|
||||
- Reparse references to deleted metric ([#5444](https://github.com/dbt-labs/dbt-core/issues/5444), [#5920](https://github.com/dbt-labs/dbt-core/pull/5920))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgrade to Jinja2==3.1.2 from Jinja2==2.11.3 ([#4748](https://github.com/dbt-labs/dbt-core/issues/4748), [#5465](https://github.com/dbt-labs/dbt-core/pull/5465))
|
||||
- Bump mypy from 0.961 to 0.971 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5495](https://github.com/dbt-labs/dbt-core/pull/5495))
|
||||
- Remove pin for MarkUpSafe from >=0.23,<2.1 ([#5506](https://github.com/dbt-labs/dbt-core/issues/5506), [#5507](https://github.com/dbt-labs/dbt-core/pull/5507))
|
||||
|
||||
### Dependency
|
||||
|
||||
- Bump python from 3.10.5-slim-bullseye to 3.10.6-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5623](https://github.com/dbt-labs/dbt-core/pull/5623))
|
||||
- Bump mashumaro[msgpack] from 3.0.3 to 3.0.4 in /core ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5649](https://github.com/dbt-labs/dbt-core/pull/5649))
|
||||
- Bump black from 22.6.0 to 22.8.0 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5750](https://github.com/dbt-labs/dbt-core/pull/5750))
|
||||
- Bump python from 3.10.6-slim-bullseye to 3.10.7-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5805](https://github.com/dbt-labs/dbt-core/pull/5805))
|
||||
|
||||
### Contributors
|
||||
- [@Goodkat](https://github.com/Goodkat) ([#5581](https://github.com/dbt-labs/dbt-core/pull/5581), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
||||
- [@Ilanbenb](https://github.com/Ilanbenb) ([#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
||||
- [@b-per](https://github.com/b-per) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- [@bbroeksema](https://github.com/bbroeksema) ([#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#5775](https://github.com/dbt-labs/dbt-core/pull/5775), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
||||
- [@danielcmessias](https://github.com/danielcmessias) ([#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5457](https://github.com/dbt-labs/dbt-core/pull/5457), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([#5717](https://github.com/dbt-labs/dbt-core/pull/5717), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
||||
- [@drewbanin](https://github.com/drewbanin) ([#5921](https://github.com/dbt-labs/dbt-core/pull/5921), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
||||
- [@epapineau](https://github.com/epapineau) ([#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
||||
- [@graciegoheen](https://github.com/graciegoheen) ([#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5782](https://github.com/dbt-labs/dbt-core/pull/5782), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
||||
- [@jeremyyeo](https://github.com/jeremyyeo) ([#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
||||
- [@joellabes](https://github.com/joellabes) ([dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
||||
- [@jpmmcneill](https://github.com/jpmmcneill) ([#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
||||
- [@kadero](https://github.com/kadero) ([#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
||||
- [@leoebfolsom](https://github.com/leoebfolsom) ([#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
||||
- [@matt-winkler](https://github.com/matt-winkler) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- [@nicholasyager](https://github.com/nicholasyager) ([#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
||||
- [@panasenco](https://github.com/panasenco) ([#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#5814](https://github.com/dbt-labs/dbt-core/pull/5814), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
||||
- [@shrodingers](https://github.com/shrodingers) ([#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- [@tomasfarias](https://github.com/tomasfarias) ([#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
||||
- [@varun-dc](https://github.com/varun-dc) ([#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
||||
- [@yoiki](https://github.com/yoiki) ([#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
||||
- [@chamini2](https://github.com/chamini2) ([#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
||||
10
.changes/1.3.1.md
Normal file
10
.changes/1.3.1.md
Normal file
@@ -0,0 +1,10 @@
|
||||
## dbt-core 1.3.1 - November 16, 2022
|
||||
### Features
|
||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201), [#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
||||
### Docs
|
||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880), [dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323), [dbt-docs/#346](https://github.com/dbt-labs/dbt-docs/pull/346))
|
||||
|
||||
### Contributors
|
||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
||||
5
.changes/1.3.2.md
Normal file
5
.changes/1.3.2.md
Normal file
@@ -0,0 +1,5 @@
|
||||
## dbt-core 1.3.2 - January 04, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Bug when partial parsing with an empty schema file ([#4850](https://github.com/dbt-labs/dbt-core/issues/4850), [#<no value>](https://github.com/dbt-labs/dbt-core/pull/<no value>))
|
||||
8
.changes/1.3.3.md
Normal file
8
.changes/1.3.3.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.3.3 - February 28, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- add pytz dependency ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
|
||||
### Contributors
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
8
.changes/1.3.4.md
Normal file
8
.changes/1.3.4.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## dbt-core 1.3.4 - April 19, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Improved failed event serialization handling and associated tests ([#7113](https://github.com/dbt-labs/dbt-core/issues/7113), [#7108](https://github.com/dbt-labs/dbt-core/issues/7108), [#6568](https://github.com/dbt-labs/dbt-core/issues/6568))
|
||||
|
||||
### Contributors
|
||||
- [@QMalcolm](https://github.com/QMalcolm) ([#7113](https://github.com/dbt-labs/dbt-core/issues/7113), [#7108](https://github.com/dbt-labs/dbt-core/issues/7108), [#6568](https://github.com/dbt-labs/dbt-core/issues/6568))
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Pin click<9 + sqlparse<0.5
|
||||
time: 2023-07-19T12:37:43.716495+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
PR: "8146"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix for column tests not rendering on quoted columns
|
||||
time: 2023-05-31T11:54:19.687363-04:00
|
||||
custom:
|
||||
Author: drewbanin
|
||||
Issue: "201"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Remove static SQL codeblock for metrics
|
||||
time: 2023-07-18T19:24:22.155323+02:00
|
||||
custom:
|
||||
Author: marcodamore
|
||||
Issue: "436"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Enable converting deprecation warnings to errors
|
||||
time: 2023-07-18T12:55:18.03914-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8130"
|
||||
@@ -4,7 +4,6 @@ headerPath: header.tpl.md
|
||||
versionHeaderPath: ""
|
||||
changelogPath: CHANGELOG.md
|
||||
versionExt: md
|
||||
envPrefix: "CHANGIE_"
|
||||
versionFormat: '## dbt-core {{.Version}} - {{.Time.Format "January 02, 2006"}}'
|
||||
kindFormat: '### {{.Kind}}'
|
||||
changeFormat: |-
|
||||
@@ -88,21 +87,15 @@ custom:
|
||||
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* ensure all names in this list are all lowercase for later matching purposes */}}
|
||||
{{- $core_team := splitList " " .Env.CORE_TEAM }}
|
||||
{{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
|
||||
{{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
|
||||
{{- range $team_member := $core_team }}
|
||||
{{- $team_member_lower := lower $team_member }}
|
||||
{{- $maintainers = append $maintainers $team_member_lower }}
|
||||
{{- end }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "aranke" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a single changelog */}}
|
||||
{{- range $author := $authorList }}
|
||||
{{- $authorLower := lower $author }}
|
||||
{{- /* we only want to include non-core team contributors */}}
|
||||
{{- if not (has $authorLower $maintainers)}}
|
||||
{{- if not (has $authorLower $core_team)}}
|
||||
{{- $changeList := splitList " " $change.Custom.Author }}
|
||||
{{- $IssueList := list }}
|
||||
{{- $changeLink := $change.Kind }}
|
||||
|
||||
2
.flake8
2
.flake8
@@ -9,4 +9,4 @@ ignore =
|
||||
E203 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
exclude = test
|
||||
|
||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +0,0 @@
|
||||
core/dbt/include/index.html binary
|
||||
tests/functional/artifacts/data/state/*/manifest.json binary
|
||||
core/dbt/docs/build/html/searchindex.js binary
|
||||
core/dbt/docs/build/html/index.html binary
|
||||
performance/runner/Cargo.lock binary
|
||||
core/dbt/events/types_pb2.py binary
|
||||
53
.github/CODEOWNERS
vendored
53
.github/CODEOWNERS
vendored
@@ -11,24 +11,44 @@
|
||||
|
||||
# As a default for areas with no assignment,
|
||||
# the core team as a whole will be assigned
|
||||
* @dbt-labs/core-team
|
||||
* @dbt-labs/core
|
||||
|
||||
### OSS Tooling Guild
|
||||
# Changes to GitHub configurations including Actions
|
||||
/.github/ @leahwicz
|
||||
|
||||
/.github/ @dbt-labs/guild-oss-tooling
|
||||
.bumpversion.cfg @dbt-labs/guild-oss-tooling
|
||||
### LANGUAGE
|
||||
|
||||
.changie.yaml @dbt-labs/guild-oss-tooling
|
||||
# Language core modules
|
||||
/core/dbt/config/ @dbt-labs/core-language
|
||||
/core/dbt/context/ @dbt-labs/core-language
|
||||
/core/dbt/contracts/ @dbt-labs/core-language
|
||||
/core/dbt/deps/ @dbt-labs/core-language
|
||||
/core/dbt/events/ @dbt-labs/core-language # structured logging
|
||||
/core/dbt/parser/ @dbt-labs/core-language
|
||||
|
||||
pre-commit-config.yaml @dbt-labs/guild-oss-tooling
|
||||
pytest.ini @dbt-labs/guild-oss-tooling
|
||||
tox.ini @dbt-labs/guild-oss-tooling
|
||||
# Language misc files
|
||||
/core/dbt/dataclass_schema.py @dbt-labs/core-language
|
||||
/core/dbt/hooks.py @dbt-labs/core-language
|
||||
/core/dbt/node_types.py @dbt-labs/core-language
|
||||
/core/dbt/semver.py @dbt-labs/core-language
|
||||
|
||||
|
||||
### EXECUTION
|
||||
|
||||
# Execution core modules
|
||||
/core/dbt/graph/ @dbt-labs/core-execution
|
||||
/core/dbt/task/ @dbt-labs/core-execution
|
||||
|
||||
# Execution misc files
|
||||
/core/dbt/compilation.py @dbt-labs/core-execution
|
||||
/core/dbt/flags.py @dbt-labs/core-execution
|
||||
/core/dbt/lib.py @dbt-labs/core-execution
|
||||
/core/dbt/main.py @dbt-labs/core-execution
|
||||
/core/dbt/profiler.py @dbt-labs/core-execution
|
||||
/core/dbt/selected_resources.py @dbt-labs/core-execution
|
||||
/core/dbt/tracking.py @dbt-labs/core-execution
|
||||
/core/dbt/version.py @dbt-labs/core-execution
|
||||
|
||||
pyproject.toml @dbt-labs/guild-oss-tooling
|
||||
requirements.txt @dbt-labs/guild-oss-tooling
|
||||
dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
/core/setup.py @dbt-labs/guild-oss-tooling
|
||||
/core/MANIFEST.in @dbt-labs/guild-oss-tooling
|
||||
|
||||
### ADAPTERS
|
||||
|
||||
@@ -40,7 +60,6 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Postgres plugin
|
||||
/plugins/ @dbt-labs/core-adapters
|
||||
/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling
|
||||
|
||||
# Functional tests for adapter plugins
|
||||
/tests/adapter @dbt-labs/core-adapters
|
||||
@@ -52,9 +71,5 @@ dev_requirements.txt @dbt-labs/guild-oss-tooling
|
||||
# Perf regression testing framework
|
||||
# This excludes the test project files itself since those aren't specific
|
||||
# framework changes (excluded by not setting an owner next to it- no owner)
|
||||
/performance @nathaniel-may
|
||||
/performance @nathaniel-may
|
||||
/performance/projects
|
||||
|
||||
### ARTIFACTS
|
||||
|
||||
/schemas/dbt @dbt-labs/cloud-artifacts
|
||||
|
||||
22
.github/_README.md
vendored
22
.github/_README.md
vendored
@@ -63,12 +63,12 @@ permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
```
|
||||
|
||||
|
||||
### Secrets
|
||||
- When to use a [Personal Access Token (PAT)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) vs the [GITHUB_TOKEN](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) generated for the action?
|
||||
|
||||
The `GITHUB_TOKEN` is used by default. In most cases it is sufficient for what you need.
|
||||
|
||||
|
||||
If you expect the workflow to result in a commit to that should retrigger workflows, you will need to use a Personal Access Token for the bot to commit the file. When using the GITHUB_TOKEN, the resulting commit will not trigger another GitHub Actions Workflow run. This is due to limitations set by GitHub. See [the docs](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) for a more detailed explanation.
|
||||
|
||||
For example, we must use a PAT in our workflow to commit a new changelog yaml file for bot PRs. Once the file has been committed to the branch, it should retrigger the check to validate that a changelog exists on the PR. Otherwise, it would stay in a failed state since the check would never retrigger.
|
||||
@@ -105,7 +105,7 @@ Some triggers of note that we use:
|
||||
|
||||
```
|
||||
# **what?**
|
||||
# Describe what the action does.
|
||||
# Describe what the action does.
|
||||
|
||||
# **why?**
|
||||
# Why does this action exist?
|
||||
@@ -138,7 +138,7 @@ Some triggers of note that we use:
|
||||
id: fp
|
||||
run: |
|
||||
FILEPATH=.changes/unreleased/Dependencies-${{ steps.filename_time.outputs.time }}.yaml
|
||||
echo "FILEPATH=$FILEPATH" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=FILEPATH::$FILEPATH"
|
||||
```
|
||||
|
||||
- Print out all variables you will reference as the first step of a job. This allows for easier debugging. The first job should log all inputs. Subsequent jobs should reference outputs of other jobs, if present.
|
||||
@@ -158,14 +158,14 @@ Some triggers of note that we use:
|
||||
echo "The build_script_path: ${{ inputs.build_script_path }}"
|
||||
echo "The s3_bucket_name: ${{ inputs.s3_bucket_name }}"
|
||||
echo "The package_test_command: ${{ inputs.package_test_command }}"
|
||||
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set Variables
|
||||
id: variables
|
||||
run: |
|
||||
echo "important_path='performance/runner/Cargo.toml'" >> $GITHUB_OUTPUT
|
||||
echo "release_id=${{github.event.inputs.release_id}}" >> $GITHUB_OUTPUT
|
||||
echo "open_prs=${{github.event.inputs.open_prs}}" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=important_path::'performance/runner/Cargo.toml'"
|
||||
echo "::set-output name=release_id::${{github.event.inputs.release_id}}"
|
||||
echo "::set-output name=open_prs::${{github.event.inputs.open_prs}}"
|
||||
|
||||
job2:
|
||||
needs: [job1]
|
||||
@@ -190,14 +190,14 @@ ___
|
||||
### Actions from the Marketplace
|
||||
- Don’t use external actions for things that can easily be accomplished manually.
|
||||
- Always read through what an external action does before using it! Often an action in the GitHub Actions Marketplace can be replaced with a few lines in bash. This is much more maintainable (and won’t change under us) and clear as to what’s actually happening. It also prevents any
|
||||
- Pin actions _we don't control_ to tags.
|
||||
- Pin actions _we don't control_ to tags.
|
||||
|
||||
### Connecting to AWS
|
||||
- Authenticate with the aws managed workflow
|
||||
|
||||
```yaml
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -208,7 +208,7 @@ ___
|
||||
|
||||
```yaml
|
||||
- name: Copy Artifacts from S3 via CLI
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
run: aws s3 cp ${{ env.s3_bucket }} . --recursive
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
2
.github/actions/latest-wrangler/README.md
vendored
2
.github/actions/latest-wrangler/README.md
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v1
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v1
|
||||
- name: Wrangle latest tag
|
||||
id: is_latest
|
||||
uses: ./.github/actions/latest-wrangler
|
||||
|
||||
17
.github/actions/latest-wrangler/main.py
vendored
17
.github/actions/latest-wrangler/main.py
vendored
@@ -28,12 +28,11 @@ if __name__ == "__main__":
|
||||
if package_request.status_code == 404:
|
||||
if halt_on_missing:
|
||||
sys.exit(1)
|
||||
# everything is the latest if the package doesn't exist
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write("latest=True")
|
||||
gh_output.write("minor_latest=True")
|
||||
sys.exit(0)
|
||||
else:
|
||||
# everything is the latest if the package doesn't exist
|
||||
print(f"::set-output name=latest::{True}")
|
||||
print(f"::set-output name=minor_latest::{True}")
|
||||
sys.exit(0)
|
||||
|
||||
# TODO: verify package meta is "correct"
|
||||
# https://github.com/dbt-labs/dbt-core/issues/4640
|
||||
@@ -92,7 +91,5 @@ if __name__ == "__main__":
|
||||
latest = is_latest(pre_rel, new_version, current_latest)
|
||||
minor_latest = is_latest(pre_rel, new_version, current_minor_latest)
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
with open(github_output, "at", encoding="utf-8") as gh_output:
|
||||
gh_output.write(f"latest={latest}")
|
||||
gh_output.write(f"minor_latest={minor_latest}")
|
||||
print(f"::set-output name=latest::{latest}")
|
||||
print(f"::set-output name=minor_latest::{minor_latest}")
|
||||
|
||||
30
.github/pull_request_template.md
vendored
30
.github/pull_request_template.md
vendored
@@ -1,35 +1,23 @@
|
||||
resolves #
|
||||
[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/#
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
Include the number of the docs issue that was opened for this PR. If
|
||||
this change has no user-facing implications, "N/A" suffices instead. New
|
||||
docs tickets can be created by clicking the link above or by going to
|
||||
https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose.
|
||||
-->
|
||||
|
||||
### Problem
|
||||
### Description
|
||||
|
||||
<!---
|
||||
Describe the problem this PR is solving. What is the application state
|
||||
before this PR is merged?
|
||||
-->
|
||||
|
||||
### Solution
|
||||
|
||||
<!---
|
||||
Describe the way this PR solves the above problem. Add as much detail as you
|
||||
can to help reviewers understand your changes. Include any alternatives and
|
||||
tradeoffs you considered.
|
||||
Describe the Pull Request here. Add any references and info to help reviewers
|
||||
understand your changes. Include any tradeoffs you considered.
|
||||
-->
|
||||
|
||||
### Checklist
|
||||
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements)
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] I have [opened an issue to add/update docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose), or docs changes are not required/relevant for this PR
|
||||
- [ ] I have run `changie new` to [create a changelog entry](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-a-changelog-entry)
|
||||
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -35,6 +35,6 @@ jobs:
|
||||
github.event.pull_request.merged
|
||||
&& contains(github.event.label.name, 'backport')
|
||||
steps:
|
||||
- uses: tibdex/backport@v2.0.3
|
||||
- uses: tibdex/backport@v2.0.2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/bot-changelog.yml
vendored
2
.github/workflows/bot-changelog.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
- name: Create and commit changelog on bot PR
|
||||
if: ${{ contains(github.event.pull_request.labels.*.name, matrix.label) }}
|
||||
id: bot_changelog
|
||||
uses: emmyoop/changie_bot@v1.1.0
|
||||
uses: emmyoop/changie_bot@v1.0.1
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
commit_author_name: "Github Build Bot"
|
||||
|
||||
41
.github/workflows/cut-release-branch.yml
vendored
41
.github/workflows/cut-release-branch.yml
vendored
@@ -1,41 +0,0 @@
|
||||
# **what?**
|
||||
# Cuts a new `*.latest` branch
|
||||
# Also cleans up all files in `.changes/unreleased` and `.changes/previous verion on
|
||||
# `main` and bumps `main` to the input version.
|
||||
|
||||
# **why?**
|
||||
# Generally reduces the workload of engineers and reduces error. Allow automation.
|
||||
|
||||
# **when?**
|
||||
# This will run when called manually.
|
||||
|
||||
name: Cut new release branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_to_bump_main:
|
||||
description: 'The alpha version main should bump to (ex. 1.6.0a1)'
|
||||
required: true
|
||||
new_branch_name:
|
||||
description: 'The full name of the new branch (ex. 1.5.latest)'
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cut_branch:
|
||||
name: "Cut branch and clean up main for dbt-core"
|
||||
uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main
|
||||
with:
|
||||
version_to_bump_main: ${{ inputs.version_to_bump_main }}
|
||||
new_branch_name: ${{ inputs.new_branch_name }}
|
||||
PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch"
|
||||
PR_body: "All adapter PRs will fail CI until the dbt-core PR has been merged due to release version conflicts."
|
||||
secrets:
|
||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
4
.github/workflows/jira-creation.yml
vendored
4
.github/workflows/jira-creation.yml
vendored
@@ -18,8 +18,8 @@ permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
call-creation-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-creation-actions.yml@main
|
||||
call-label-action:
|
||||
uses: dbt-labs/jira-actions/.github/workflows/jira-creation.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
|
||||
2
.github/workflows/jira-label.yml
vendored
2
.github/workflows/jira-label.yml
vendored
@@ -19,7 +19,7 @@ permissions:
|
||||
|
||||
jobs:
|
||||
call-label-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-label-actions.yml@main
|
||||
uses: dbt-labs/jira-actions/.github/workflows/jira-label.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
|
||||
4
.github/workflows/jira-transition.yml
vendored
4
.github/workflows/jira-transition.yml
vendored
@@ -19,8 +19,8 @@ on:
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
call-transition-action:
|
||||
uses: dbt-labs/actions/.github/workflows/jira-transition-actions.yml@main
|
||||
call-label-action:
|
||||
uses: dbt-labs/jira-actions/.github/workflows/jira-transition.yml@main
|
||||
secrets:
|
||||
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
|
||||
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
|
||||
|
||||
67
.github/workflows/main.yml
vendored
67
.github/workflows/main.yml
vendored
@@ -42,10 +42,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
@@ -53,8 +53,12 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
make dev
|
||||
python -m pip install pre-commit
|
||||
pre-commit --version
|
||||
python -m pip install mypy==0.942
|
||||
mypy --version
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install -r dev-requirements.txt
|
||||
dbt --version
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
@@ -69,17 +73,18 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -96,26 +101,24 @@ jobs:
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||
path: unit_results.csv
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
@@ -125,22 +128,18 @@ jobs:
|
||||
|
||||
env:
|
||||
TOXENV: integration
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -164,26 +163,24 @@ jobs:
|
||||
tox --version
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
run: tox
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
id: date
|
||||
run: |
|
||||
CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts
|
||||
echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT
|
||||
run: echo "::set-output name=date::$(date +'%Y_%m_%dT%H_%M_%S')" #no colons allowed for artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}
|
||||
path: ./logs
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: always()
|
||||
with:
|
||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
|
||||
path: integration_results.csv
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
@@ -192,10 +189,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
|
||||
265
.github/workflows/model_performance.yml
vendored
265
.github/workflows/model_performance.yml
vendored
@@ -1,265 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow models the performance characteristics of a point in time in dbt.
|
||||
# It runs specific dbt commands on committed projects multiple times to create and
|
||||
# commit information about the distribution to the current branch. For more information
|
||||
# see the readme in the performance module at /performance/README.md.
|
||||
#
|
||||
# **why?**
|
||||
# When developing new features, we can take quick performance samples and compare
|
||||
# them against the commited baseline measurements produced by this workflow to detect
|
||||
# some performance regressions at development time before they reach users.
|
||||
#
|
||||
# **when?**
|
||||
# This is only run once directly after each release (for non-prereleases). If for some
|
||||
# reason the results of a run are not satisfactory, it can also be triggered manually.
|
||||
|
||||
name: Model Performance Characteristics
|
||||
|
||||
on:
|
||||
# runs after non-prereleases are published.
|
||||
release:
|
||||
types: [released]
|
||||
# run manually from the actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_id:
|
||||
description: 'dbt version to model (must be non-prerelease in Pypi)'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
env:
|
||||
RUNNER_CACHE_PATH: performance/runner/target/release/runner
|
||||
|
||||
# both jobs need to write
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
set-variables:
|
||||
name: Setting Variables
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
cache_key: ${{ steps.variables.outputs.cache_key }}
|
||||
release_id: ${{ steps.semver.outputs.base-version }}
|
||||
release_branch: ${{ steps.variables.outputs.release_branch }}
|
||||
steps:
|
||||
|
||||
# explicitly checkout the performance runner from main regardless of which
|
||||
# version we are modeling.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Parse version into parts
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1
|
||||
with:
|
||||
version: ${{ github.event.inputs.release_id || github.event.release.tag_name }}
|
||||
|
||||
# collect all the variables that need to be used in subsequent jobs
|
||||
- name: Set variables
|
||||
id: variables
|
||||
run: |
|
||||
# create a cache key that will be used in the next job. without this the
|
||||
# next job would have to checkout from main and hash the files itself.
|
||||
echo "cache_key=${{ runner.os }}-${{ hashFiles('performance/runner/Cargo.toml')}}-${{ hashFiles('performance/runner/src/*') }}" >> $GITHUB_OUTPUT
|
||||
|
||||
branch_name="${{steps.semver.outputs.major}}.${{steps.semver.outputs.minor}}.latest"
|
||||
echo "release_branch=$branch_name" >> $GITHUB_OUTPUT
|
||||
echo "release branch is inferred to be ${branch_name}"
|
||||
|
||||
latest-runner:
|
||||
name: Build or Fetch Runner
|
||||
runs-on: ubuntu-latest
|
||||
needs: [set-variables]
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
steps:
|
||||
- name: '[DEBUG] print variables'
|
||||
run: |
|
||||
echo "all variables defined in set-variables"
|
||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
# explicitly checkout the performance runner from main regardless of which
|
||||
# version we are modeling.
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
|
||||
# attempts to access a previously cached runner
|
||||
- uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||
key: ${{ needs.set-variables.outputs.cache_key }}
|
||||
|
||||
- name: Fetch Rust Toolchain
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Add fmt
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- name: Cargo fmt
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --manifest-path performance/runner/Cargo.toml --all -- --check
|
||||
|
||||
- name: Test
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --manifest-path performance/runner/Cargo.toml
|
||||
|
||||
- name: Build (optimized)
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --manifest-path performance/runner/Cargo.toml
|
||||
# the cache action automatically caches this binary at the end of the job
|
||||
|
||||
model:
|
||||
# depends on `latest-runner` as a separate job so that failures in this job do not prevent
|
||||
# a successfully tested and built binary from being cached.
|
||||
needs: [set-variables, latest-runner]
|
||||
name: Model a release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: '[DEBUG] print variables'
|
||||
run: |
|
||||
echo "all variables defined in set-variables"
|
||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install dbt
|
||||
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
||||
|
||||
- name: Install Hyperfine
|
||||
run: wget https://github.com/sharkdp/hyperfine/releases/download/v1.11.0/hyperfine_1.11.0_amd64.deb && sudo dpkg -i hyperfine_1.11.0_amd64.deb
|
||||
|
||||
# explicitly checkout main to get the latest project definitions
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
|
||||
# this was built in the previous job so it will be there.
|
||||
- name: Fetch Runner
|
||||
uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ env.RUNNER_CACHE_PATH }}
|
||||
key: ${{ needs.set-variables.outputs.cache_key }}
|
||||
|
||||
- name: Move Runner
|
||||
run: mv performance/runner/target/release/runner performance/app
|
||||
|
||||
- name: Change Runner Permissions
|
||||
run: chmod +x ./performance/app
|
||||
|
||||
- name: '[DEBUG] ls baseline directory before run'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
# `${{ github.workspace }}` is used to pass the absolute path
|
||||
- name: Create directories
|
||||
run: |
|
||||
mkdir ${{ github.workspace }}/performance/tmp/
|
||||
mkdir -p performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||
|
||||
# Run modeling with taking 20 samples
|
||||
- name: Run Measurement
|
||||
run: |
|
||||
performance/app model -v ${{ needs.set-variables.outputs.release_id }} -b ${{ github.workspace }}/performance/baselines/ -p ${{ github.workspace }}/performance/projects/ -t ${{ github.workspace }}/performance/tmp/ -n 20
|
||||
|
||||
- name: '[DEBUG] ls baseline directory after run'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/
|
||||
|
||||
create-pr:
|
||||
name: Open PR for ${{ matrix.base-branch }}
|
||||
|
||||
# depends on `model` as a separate job so that the baseline can be committed to more than one branch
|
||||
# i.e. release branch and main
|
||||
needs: [set-variables, latest-runner, model]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- base-branch: refs/heads/main
|
||||
target-branch: performance-bot/main_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
||||
- base-branch: refs/heads/${{ needs.set-variables.outputs.release_branch }}
|
||||
target-branch: performance-bot/release_${{ needs.set-variables.outputs.release_id }}_${{GITHUB.RUN_ID}}
|
||||
|
||||
steps:
|
||||
- name: '[DEBUG] print variables'
|
||||
run: |
|
||||
echo "all variables defined in set-variables"
|
||||
echo "cache_key: ${{ needs.set-variables.outputs.cache_key }}"
|
||||
echo "release_id: ${{ needs.set-variables.outputs.release_id }}"
|
||||
echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ matrix.base-branch }}
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
git checkout -b ${{ matrix.target-branch }}
|
||||
git push origin ${{ matrix.target-branch }}
|
||||
git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }}
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: baseline
|
||||
path: performance/baselines/${{ needs.set-variables.outputs.release_id }}
|
||||
|
||||
- name: '[DEBUG] ls baselines after artifact download'
|
||||
run: ls -R performance/baselines/
|
||||
|
||||
- name: Commit baseline
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
add: 'performance/baselines/*'
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'adding performance baseline for ${{ needs.set-variables.outputs.release_id }}'
|
||||
push: 'origin origin/${{ matrix.target-branch }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{ matrix.base-branch }}
|
||||
branch: '${{ matrix.target-branch }}'
|
||||
title: 'Adding performance modeling for ${{needs.set-variables.outputs.release_id}} to ${{ matrix.base-branch }}'
|
||||
body: 'Committing perf results for tracking for the ${{needs.set-variables.outputs.release_id}}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
Performance
|
||||
4
.github/workflows/nightly-release.yml
vendored
4
.github/workflows/nightly-release.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
- name: "Generate Nightly Release Version Number"
|
||||
id: nightly-release-version
|
||||
run: |
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}"
|
||||
number="${{ steps.semver.outputs.version }}.dev${{ steps.current-date.outputs.date }}+nightly"
|
||||
echo "number=$number" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Audit Nightly Release Version And Parse Into Parts"
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
sha: ${{ needs.aggregate-release-data.outputs.commit_sha }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }}
|
||||
target_branch: ${{ needs.aggregate-release-data.outputs.release-branch }}
|
||||
version_number: ${{ needs.aggregate-release-data.outputs.version_number }}
|
||||
build_script_path: "scripts/build-dist.sh"
|
||||
env_setup_script_path: "scripts/env-setup.sh"
|
||||
|
||||
43
.github/workflows/release-branch-tests.yml
vendored
43
.github/workflows/release-branch-tests.yml
vendored
@@ -1,7 +1,11 @@
|
||||
# **what?**
|
||||
# The purpose of this workflow is to trigger CI to run for each
|
||||
# release branch and main branch on a regular cadence. If the CI workflow
|
||||
# fails for a branch, it will post to #dev-core-alerts to raise awareness.
|
||||
# fails for a branch, it will post to dev-core-alerts to raise awareness.
|
||||
# The 'aurelien-baudet/workflow-dispatch' Action triggers the existing
|
||||
# CI worklow file on the given branch to run so that even if we change the
|
||||
# CI workflow file in the future, the one that is tailored for the given
|
||||
# release branch will be used.
|
||||
|
||||
# **why?**
|
||||
# Ensures release branches and main are always shippable and not broken.
|
||||
@@ -24,8 +28,35 @@ on:
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
run_tests:
|
||||
uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main
|
||||
with:
|
||||
workflows_to_run: '["main.yml"]'
|
||||
secrets: inherit
|
||||
kick-off-ci:
|
||||
name: Kick-off CI
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
# must run CI 1 branch at a time b/c the workflow-dispatch Action polls for
|
||||
# latest run for results and it gets confused when we kick off multiple runs
|
||||
# at once. There is a race condition so we will just run in sequential order.
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, main]
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
id: trigger-step
|
||||
uses: aurelien-baudet/workflow-dispatch@v2.1.1
|
||||
with:
|
||||
workflow: main.yml
|
||||
ref: ${{ matrix.branch }}
|
||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
|
||||
- name: Post failure to Slack
|
||||
uses: ravsamhq/notify-slack-action@v1
|
||||
if: ${{ always() && !contains(steps.trigger-step.outputs.workflow-conclusion,'success') }}
|
||||
with:
|
||||
status: ${{ job.status }}
|
||||
notification_title: 'dbt-core scheduled run of "${{ matrix.branch }}" branch not successful'
|
||||
message_format: ':x: CI on branch "${{ matrix.branch }}" ${{ steps.trigger-step.outputs.workflow-conclusion }}'
|
||||
footer: 'Linked failed CI run ${{ steps.trigger-step.outputs.workflow-url }}'
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
|
||||
24
.github/workflows/release-docker.yml
vendored
24
.github/workflows/release-docker.yml
vendored
@@ -36,14 +36,14 @@ jobs:
|
||||
latest: ${{ steps.latest.outputs.latest }}
|
||||
minor_latest: ${{ steps.latest.outputs.minor_latest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v1
|
||||
- name: Split version
|
||||
id: version
|
||||
run: |
|
||||
IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }}
|
||||
echo "major=$MAJOR" >> $GITHUB_OUTPUT
|
||||
echo "minor=$MINOR" >> $GITHUB_OUTPUT
|
||||
echo "patch=$PATCH" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=major::$MAJOR"
|
||||
echo "::set-output name=minor::$MINOR"
|
||||
echo "::set-output name=patch::$PATCH"
|
||||
|
||||
- name: Is pkg 'latest'
|
||||
id: latest
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
needs: [get_version_meta]
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
build_and_push:
|
||||
name: Build images and push to GHCR
|
||||
@@ -70,20 +70,18 @@ jobs:
|
||||
- name: Get docker build arg
|
||||
id: build_arg
|
||||
run: |
|
||||
BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT
|
||||
echo "::set-output name=build_arg_name::"$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g')
|
||||
echo "::set-output name=build_arg_value::"$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g')
|
||||
|
||||
- name: Log in to the GHCR
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push MAJOR.MINOR.PATCH tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: True
|
||||
@@ -94,7 +92,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Build and push MINOR.latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v2
|
||||
if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
@@ -106,7 +104,7 @@ jobs:
|
||||
ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest
|
||||
|
||||
- name: Build and push latest tag
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v2
|
||||
if: ${{ needs.get_version_meta.outputs.latest == 'True' }}
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
|
||||
3
.github/workflows/release.yml
vendored
3
.github/workflows/release.yml
vendored
@@ -143,7 +143,8 @@ jobs:
|
||||
test_run: ${{ inputs.test_run }}
|
||||
nightly_release: ${{ inputs.nightly_release }}
|
||||
|
||||
secrets: inherit
|
||||
secrets:
|
||||
FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
|
||||
log-outputs-bump-version-generate-changelog:
|
||||
name: "[Log output] Bump package version, Generate changelog"
|
||||
|
||||
8
.github/workflows/schema-check.yml
vendored
8
.github/workflows/schema-check.yml
vendored
@@ -37,17 +37,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
path: ${{ env.DBT_REPO_DIRECTORY }}
|
||||
|
||||
- name: Checkout schemas.getdbt.com repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
repository: dbt-labs/schemas.getdbt.com
|
||||
ref: 'main'
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload schema diff
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2.2.4
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
name: 'schema_schanges.txt'
|
||||
|
||||
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
@@ -9,4 +9,13 @@ permissions:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
||||
with:
|
||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
|
||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||
days-before-stale: 180
|
||||
|
||||
@@ -30,8 +30,6 @@ jobs:
|
||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||
# tells integration tests to output into json format
|
||||
DBT_LOG_FORMAT: "json"
|
||||
# tell eventmgr to convert logging events into bytes
|
||||
DBT_TEST_BINARY_SERIALIZATION: "true"
|
||||
# Additional test users
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
@@ -39,15 +37,21 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
pip install --user --upgrade pip
|
||||
@@ -65,3 +69,10 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
|
||||
# apply our schema tests to every log event from the previous step
|
||||
# skips any output that isn't valid json
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: run
|
||||
args: --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
|
||||
155
.github/workflows/test-repeater.yml
vendored
155
.github/workflows/test-repeater.yml
vendored
@@ -1,155 +0,0 @@
|
||||
# **what?**
|
||||
# This workflow will test all test(s) at the input path given number of times to determine if it's flaky or not. You can test with any supported OS/Python combination.
|
||||
# This is batched in 10 to allow more test iterations faster.
|
||||
|
||||
# **why?**
|
||||
# Testing if a test is flaky and if a previously flaky test has been fixed. This allows easy testing on supported python versions and OS combinations.
|
||||
|
||||
# **when?**
|
||||
# This is triggered manually from dbt-core.
|
||||
|
||||
name: Flaky Tester
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Branch to check out'
|
||||
type: string
|
||||
required: true
|
||||
default: 'main'
|
||||
test_path:
|
||||
description: 'Path to single test to run (ex: tests/functional/retry/test_retry.py::TestRetry::test_fail_fast)'
|
||||
type: string
|
||||
required: true
|
||||
default: 'tests/functional/...'
|
||||
python_version:
|
||||
description: 'Version of Python to Test Against'
|
||||
type: choice
|
||||
options:
|
||||
- '3.8'
|
||||
- '3.9'
|
||||
- '3.10'
|
||||
- '3.11'
|
||||
os:
|
||||
description: 'OS to run test in'
|
||||
type: choice
|
||||
options:
|
||||
- 'ubuntu-latest'
|
||||
- 'macos-latest'
|
||||
- 'windows-latest'
|
||||
num_runs_per_batch:
|
||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||
type: number
|
||||
required: true
|
||||
default: '50'
|
||||
|
||||
permissions: read-all
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Output Inputs"
|
||||
run: |
|
||||
echo "Branch: ${{ inputs.branch }}"
|
||||
echo "test_path: ${{ inputs.test_path }}"
|
||||
echo "python_version: ${{ inputs.python_version }}"
|
||||
echo "os: ${{ inputs.os }}"
|
||||
echo "num_runs_per_batch: ${{ inputs.num_runs_per_batch }}"
|
||||
|
||||
pytest:
|
||||
runs-on: ${{ inputs.os }}
|
||||
strategy:
|
||||
# run all batches, even if one fails. This informs how flaky the test may be.
|
||||
fail-fast: false
|
||||
# using a matrix to speed up the jobs since the matrix will run in parallel when runners are available
|
||||
matrix:
|
||||
batch: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
|
||||
env:
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||
DD_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
||||
DD_SITE: datadoghq.com
|
||||
DD_ENV: ci
|
||||
DD_SERVICE: ${{ github.event.repository.name }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
|
||||
- name: "Setup Python"
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "${{ inputs.python_version }}"
|
||||
|
||||
- name: "Setup Dev Environment"
|
||||
run: make dev
|
||||
|
||||
- name: "Set up postgres (linux)"
|
||||
if: inputs.os == 'ubuntu-latest'
|
||||
run: make setup-db
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: "Set up postgres (macos)"
|
||||
if: inputs.os == 'macos-latest'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: "Set up postgres (windows)"
|
||||
if: inputs.os == 'windows-latest'
|
||||
uses: ./.github/actions/setup-postgres-windows
|
||||
|
||||
- name: "Test Command"
|
||||
id: command
|
||||
run: |
|
||||
test_command="python -m pytest ${{ inputs.test_path }}"
|
||||
echo "test_command=$test_command" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Run test ${{ inputs.num_runs_per_batch }} times"
|
||||
id: pytest
|
||||
run: |
|
||||
set +e
|
||||
for ((i=1; i<=${{ inputs.num_runs_per_batch }}; i++))
|
||||
do
|
||||
echo "Running pytest iteration $i..."
|
||||
python -m pytest --ddtrace ${{ inputs.test_path }}
|
||||
exit_code=$?
|
||||
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
success=$((success + 1))
|
||||
echo "Iteration $i: Success"
|
||||
else
|
||||
failure=$((failure + 1))
|
||||
echo "Iteration $i: Failure"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "==========================="
|
||||
echo "Successful runs: $success"
|
||||
echo "Failed runs: $failure"
|
||||
echo "==========================="
|
||||
echo
|
||||
done
|
||||
|
||||
echo "failure=$failure" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Success and Failure Summary: ${{ inputs.os }}/Python ${{ inputs.python_version }}"
|
||||
run: |
|
||||
echo "Batch: ${{ matrix.batch }}"
|
||||
echo "Successful runs: ${{ steps.pytest.outputs.success }}"
|
||||
echo "Failed runs: ${{ steps.pytest.outputs.failure }}"
|
||||
|
||||
- name: "Error for Failures"
|
||||
if: ${{ steps.pytest.outputs.failure }}
|
||||
run: |
|
||||
echo "Batch ${{ matrix.batch }} failed ${{ steps.pytest.outputs.failure }} of ${{ inputs.num_runs_per_batch }} tests"
|
||||
exit 1
|
||||
12
.github/workflows/triage-labels.yml
vendored
12
.github/workflows/triage-labels.yml
vendored
@@ -24,8 +24,10 @@ permissions:
|
||||
jobs:
|
||||
triage_label:
|
||||
if: contains(github.event.issue.labels.*.name, 'awaiting_response')
|
||||
uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main
|
||||
with:
|
||||
add_label: "triage"
|
||||
remove_label: "awaiting_response"
|
||||
secrets: inherit
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: initial labeling
|
||||
uses: andymckay/labeler@master
|
||||
with:
|
||||
add-labels: "triage"
|
||||
remove-labels: "awaiting_response"
|
||||
|
||||
107
.github/workflows/version-bump.yml
vendored
107
.github/workflows/version-bump.yml
vendored
@@ -20,9 +20,106 @@ on:
|
||||
description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
version_bump_and_changie:
|
||||
uses: dbt-labs/actions/.github/workflows/version-bump.yml@main
|
||||
with:
|
||||
version_number: ${{ inputs.version_number }}
|
||||
secrets: inherit # ok since what we are calling is internally maintained
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "[DEBUG] Print Variables"
|
||||
run: |
|
||||
echo "all variables defined as inputs"
|
||||
echo The version_number: ${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
pip install --upgrade pip
|
||||
|
||||
- name: Add Homebrew to PATH
|
||||
run: |
|
||||
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Homebrew packages
|
||||
run: |
|
||||
brew install pre-commit
|
||||
brew tap miniscruff/changie https://github.com/miniscruff/changie
|
||||
brew install changie
|
||||
|
||||
- name: Audit Version and Parse Into Parts
|
||||
id: semver
|
||||
uses: dbt-labs/actions/parse-semver@v1
|
||||
with:
|
||||
version: ${{ github.event.inputs.version_number }}
|
||||
|
||||
- name: Set branch value
|
||||
id: variables
|
||||
run: |
|
||||
echo "::set-output name=BRANCH_NAME::prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID"
|
||||
|
||||
- name: Create PR branch
|
||||
run: |
|
||||
git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
git push origin ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }}
|
||||
|
||||
- name: Bump version
|
||||
run: |
|
||||
source env/bin/activate
|
||||
pip install -r dev-requirements.txt
|
||||
env/bin/bumpversion --allow-dirty --new-version ${{ github.event.inputs.version_number }} major
|
||||
git status
|
||||
|
||||
- name: Run changie
|
||||
run: |
|
||||
if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]]
|
||||
then
|
||||
changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}'
|
||||
else
|
||||
changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases
|
||||
fi
|
||||
changie merge
|
||||
git status
|
||||
|
||||
# this step will fail on whitespace errors but also correct them
|
||||
- name: Remove trailing whitespace
|
||||
continue-on-error: true
|
||||
run: |
|
||||
pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/*
|
||||
git status
|
||||
|
||||
# this step will fail on newline errors but also correct them
|
||||
- name: Removing extra newlines
|
||||
continue-on-error: true
|
||||
run: |
|
||||
pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/*
|
||||
git status
|
||||
|
||||
- name: Commit version bump to branch
|
||||
uses: EndBug/add-and-commit@v7
|
||||
with:
|
||||
author_name: 'Github Build Bot'
|
||||
author_email: 'buildbot@fishtownanalytics.com'
|
||||
message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG'
|
||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
with:
|
||||
author: 'Github Build Bot <buildbot@fishtownanalytics.com>'
|
||||
base: ${{github.ref}}
|
||||
title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog'
|
||||
branch: '${{ steps.variables.outputs.BRANCH_NAME }}'
|
||||
labels: |
|
||||
Skip Changelog
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -11,8 +11,6 @@ __pycache__/
|
||||
env*/
|
||||
dbt_env/
|
||||
build/
|
||||
!tests/functional/build
|
||||
!core/dbt/docs/build
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
@@ -26,11 +24,8 @@ var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
*.mypy_cache/
|
||||
logs/
|
||||
.user.yml
|
||||
profiles.yml
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
@@ -54,7 +49,6 @@ coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
test.env
|
||||
makefile.test.env
|
||||
*.pytest_cache/
|
||||
|
||||
|
||||
@@ -104,4 +98,5 @@ venv/
|
||||
*.code-workspace
|
||||
|
||||
# poetry
|
||||
pyproject.toml
|
||||
poetry.lock
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# Configuration for pre-commit hooks (see https://pre-commit.com/).
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py)
|
||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||
exclude: ^test/
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
@@ -23,10 +24,15 @@ repos:
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- "--line-length=99"
|
||||
- "--target-version=py38"
|
||||
- id: black
|
||||
alias: black-check
|
||||
stages: [manual]
|
||||
args:
|
||||
- "--line-length=99"
|
||||
- "--target-version=py38"
|
||||
- "--check"
|
||||
- "--diff"
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
@@ -37,7 +43,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v0.942
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
181
CHANGELOG.md
181
CHANGELOG.md
@@ -5,15 +5,188 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.3.4 - April 19, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Improved failed event serialization handling and associated tests ([#7113](https://github.com/dbt-labs/dbt-core/issues/7113), [#7108](https://github.com/dbt-labs/dbt-core/issues/7108), [#6568](https://github.com/dbt-labs/dbt-core/issues/6568))
|
||||
|
||||
### Contributors
|
||||
- [@QMalcolm](https://github.com/QMalcolm) ([#7113](https://github.com/dbt-labs/dbt-core/issues/7113), [#7108](https://github.com/dbt-labs/dbt-core/issues/7108), [#6568](https://github.com/dbt-labs/dbt-core/issues/6568))
|
||||
|
||||
|
||||
## dbt-core 1.3.3 - February 28, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- add pytz dependency ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
|
||||
### Contributors
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#7077](https://github.com/dbt-labs/dbt-core/issues/7077))
|
||||
|
||||
## dbt-core 1.3.2 - January 04, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Bug when partial parsing with an empty schema file ([#4850](https://github.com/dbt-labs/dbt-core/issues/4850), [#<no value>](https://github.com/dbt-labs/dbt-core/pull/<no value>))
|
||||
|
||||
## dbt-core 1.3.1 - November 16, 2022
|
||||
### Features
|
||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201), [#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
||||
### Docs
|
||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880), [dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323), [dbt-docs/#346](https://github.com/dbt-labs/dbt-docs/pull/346))
|
||||
|
||||
### Contributors
|
||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
||||
|
||||
## dbt-core 1.3.0 - October 12, 2022
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Renaming Metric Spec Attributes ([#5774](https://github.com/dbt-labs/dbt-core/issues/5774), [#5775](https://github.com/dbt-labs/dbt-core/pull/5775))
|
||||
|
||||
### Features
|
||||
|
||||
- Add `--defer` flag to dbt compile & dbt docs generate ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
||||
- Python model inital version ([#5261](https://github.com/dbt-labs/dbt-core/issues/5261), [#5421](https://github.com/dbt-labs/dbt-core/pull/5421))
|
||||
- allows user to include the file extension for .py models in the dbt run -m command. ([#5289](https://github.com/dbt-labs/dbt-core/issues/5289), [#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
||||
- Incremental materialization refactor and cleanup ([#5245](https://github.com/dbt-labs/dbt-core/issues/5245), [#5359](https://github.com/dbt-labs/dbt-core/pull/5359))
|
||||
- Python models can support incremental logic ([#0](https://github.com/dbt-labs/dbt-core/issues/0), [#35](https://github.com/dbt-labs/dbt-core/pull/35))
|
||||
- Add reusable function for retrying adapter connections. Utilize said function to add retries for Postgres (and Redshift). ([#5022](https://github.com/dbt-labs/dbt-core/issues/5022), [#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
||||
- merge_exclude_columns for incremental materialization ([#5260](https://github.com/dbt-labs/dbt-core/issues/5260), [#5457](https://github.com/dbt-labs/dbt-core/pull/5457))
|
||||
- add exponential backoff to connection retries on Postgres (and Redshift) ([#5502](https://github.com/dbt-labs/dbt-core/issues/5502), [#5503](https://github.com/dbt-labs/dbt-core/pull/5503))
|
||||
- use MethodName.File when value ends with .csv ([#5578](https://github.com/dbt-labs/dbt-core/issues/5578), [#5581](https://github.com/dbt-labs/dbt-core/pull/5581))
|
||||
- Make `docs` configurable in `dbt_project.yml` and add a `node_color` attribute to change the color of nodes in the DAG ([#5333](https://github.com/dbt-labs/dbt-core/issues/5333), [#5397](https://github.com/dbt-labs/dbt-core/pull/5397))
|
||||
- Adding ResolvedMetricReference helper functions and tests ([#5567](https://github.com/dbt-labs/dbt-core/issues/5567), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607))
|
||||
- Check dbt-core version requirements when installing Hub packages ([#5648](https://github.com/dbt-labs/dbt-core/issues/5648), [#5651](https://github.com/dbt-labs/dbt-core/pull/5651))
|
||||
- Search current working directory for `profiles.yml` ([#5411](https://github.com/dbt-labs/dbt-core/issues/5411), [#5717](https://github.com/dbt-labs/dbt-core/pull/5717))
|
||||
- Adding the `window` parameter to the metric spec. ([#5721](https://github.com/dbt-labs/dbt-core/issues/5721), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722))
|
||||
- Add invocation args dict to ProviderContext class ([#5524](https://github.com/dbt-labs/dbt-core/issues/5524), [#5782](https://github.com/dbt-labs/dbt-core/pull/5782))
|
||||
- Adds new cli framework ([#5526](https://github.com/dbt-labs/dbt-core/issues/5526), [#5647](https://github.com/dbt-labs/dbt-core/pull/5647))
|
||||
- Flags work with new Click CLI ([#5529](https://github.com/dbt-labs/dbt-core/issues/5529), [#5790](https://github.com/dbt-labs/dbt-core/pull/5790))
|
||||
- Add metadata env method to ProviderContext class ([#5522](https://github.com/dbt-labs/dbt-core/issues/5522), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794))
|
||||
- Array macros ([#5520](https://github.com/dbt-labs/dbt-core/issues/5520), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
||||
- Add enabled config to exposures and metrics ([#5422](https://github.com/dbt-labs/dbt-core/issues/5422), [#5815](https://github.com/dbt-labs/dbt-core/pull/5815))
|
||||
- Migrate dbt-utils current_timestamp macros into core + adapters ([#5521](https://github.com/dbt-labs/dbt-core/issues/5521), [#5838](https://github.com/dbt-labs/dbt-core/pull/5838))
|
||||
- add -fr flag shorthand ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879))
|
||||
- add type_boolean as a data type macro ([#5739](https://github.com/dbt-labs/dbt-core/issues/5739), [#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
||||
- Support .dbtignore in project root to ignore certain files being read by dbt ([#5733](https://github.com/dbt-labs/dbt-core/issues/5733), [#5897](https://github.com/dbt-labs/dbt-core/pull/5897))
|
||||
- This conditionally no-ops warehouse connection at compile depending on an env var, disabling introspection/queries during compilation only. This is a temporary solution to more complex permissions requirements for the semantic layer. ([#5936](https://github.com/dbt-labs/dbt-core/issues/5936), [#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Remove the default 256 characters limit on postgres character varying type when no limitation is set ([#5238](https://github.com/dbt-labs/dbt-core/issues/5238), [#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
||||
- Include schema file config in unrendered_config ([#5338](https://github.com/dbt-labs/dbt-core/issues/5338), [#5344](https://github.com/dbt-labs/dbt-core/pull/5344))
|
||||
- Add context to compilation errors generated while rendering generic test configuration values. ([#5294](https://github.com/dbt-labs/dbt-core/issues/5294), [#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
||||
- Resolves #5351 - Do not consider shorter varchar cols as schema changes ([#5351](https://github.com/dbt-labs/dbt-core/issues/5351), [#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
||||
- Rename try to strict for more intuitiveness ([#5475](https://github.com/dbt-labs/dbt-core/issues/5475), [#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
||||
- on_shchma_change fail verbosity enhancement ([#5504](https://github.com/dbt-labs/dbt-core/issues/5504), [#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
||||
- Ignore empty strings passed in as secrets ([#5312](https://github.com/dbt-labs/dbt-core/issues/5312), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518))
|
||||
- Fix handling of top-level exceptions ([#5564](https://github.com/dbt-labs/dbt-core/issues/5564), [#5560](https://github.com/dbt-labs/dbt-core/pull/5560))
|
||||
- Fix error rendering docs block in metrics description ([#5585](https://github.com/dbt-labs/dbt-core/issues/5585), [#5603](https://github.com/dbt-labs/dbt-core/pull/5603))
|
||||
- Extended validations for the project names ([#5379](https://github.com/dbt-labs/dbt-core/issues/5379), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
||||
- Use sys.exit instead of exit ([#5621](https://github.com/dbt-labs/dbt-core/issues/5621), [#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
||||
- Finishing logic upgrade to Redshift for name truncation collisions. ([#5586](https://github.com/dbt-labs/dbt-core/issues/5586), [#5656](https://github.com/dbt-labs/dbt-core/pull/5656))
|
||||
- multiple args for ref and source ([#5634](https://github.com/dbt-labs/dbt-core/issues/5634), [#5635](https://github.com/dbt-labs/dbt-core/pull/5635))
|
||||
- Fix Unexpected behavior when chaining methods on dbt-ref'ed/sourced dataframes ([#5646](https://github.com/dbt-labs/dbt-core/issues/5646), [#5677](https://github.com/dbt-labs/dbt-core/pull/5677))
|
||||
- Fix typos of comments in core/dbt/adapters/ ([#5690](https://github.com/dbt-labs/dbt-core/issues/5690), [#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
||||
- Include py.typed in MANIFEST.in. This enables packages that install dbt-core from pypi to use mypy. ([#5703](https://github.com/dbt-labs/dbt-core/issues/5703), [#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
||||
- Removal of all .coverage files when using make clean command ([#5633](https://github.com/dbt-labs/dbt-core/issues/5633), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
||||
- Remove temp files generated by unit tests ([#5631](https://github.com/dbt-labs/dbt-core/issues/5631), [#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
||||
- Fix warnings as errors during tests ([#5424](https://github.com/dbt-labs/dbt-core/issues/5424), [#5800](https://github.com/dbt-labs/dbt-core/pull/5800))
|
||||
- Prevent event_history from holding references ([#5848](https://github.com/dbt-labs/dbt-core/issues/5848), [#5858](https://github.com/dbt-labs/dbt-core/pull/5858))
|
||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992), [#5868](https://github.com/dbt-labs/dbt-core/pull/5868))
|
||||
- ConfigSelectorMethod should check for bools ([#5890](https://github.com/dbt-labs/dbt-core/issues/5890), [#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
||||
- shorthand for full refresh should be one character ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
||||
- Fix macro resolution order during static analysis for custom generic tests ([#5720](https://github.com/dbt-labs/dbt-core/issues/5720), [#5907](https://github.com/dbt-labs/dbt-core/pull/5907))
|
||||
- Fix race condition when invoking dbt via lib.py concurrently ([#5919](https://github.com/dbt-labs/dbt-core/issues/5919), [#5921](https://github.com/dbt-labs/dbt-core/pull/5921))
|
||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041), [#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
||||
|
||||
### Docs
|
||||
|
||||
- Update dependency inline-source from ^6.1.5 to ^7.2.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency jest from ^26.2.2 to ^28.1.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency underscore from ^1.9.0 to ^1.13.4 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency webpack-cli from ^3.3.12 to ^4.7.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Update dependency webpack-dev-server from ^3.1.11 to ^4.9.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
||||
- Searches no longer require perfect matches, and instead consider each word individually. `my model` or `model my` will now find `my_model`, without the need for underscores ([dbt-docs/#143](https://github.com/dbt-labs/dbt-docs/issues/143), [dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
||||
- Support the renaming of SQL to code happening in dbt-core ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
||||
- Leverages `docs.node_color` from `dbt-core` to color nodes in the DAG ([dbt-docs/#44](https://github.com/dbt-labs/dbt-docs/issues/44), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- Refer to exposures by their label by default. ([dbt-docs/#306](https://github.com/dbt-labs/dbt-docs/issues/306), [dbt-docs/#307](https://github.com/dbt-labs/dbt-docs/pull/307))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Added language to tracked fields in run_model event ([#5571](https://github.com/dbt-labs/dbt-core/issues/5571), [#5469](https://github.com/dbt-labs/dbt-core/pull/5469))
|
||||
- Update mashumaro to 3.0.3 ([#4940](https://github.com/dbt-labs/dbt-core/issues/4940), [#5118](https://github.com/dbt-labs/dbt-core/pull/5118))
|
||||
- Add python incremental materialization test ([#0000](https://github.com/dbt-labs/dbt-core/issues/0000), [#5571](https://github.com/dbt-labs/dbt-core/pull/5571))
|
||||
- Save use of default env vars to manifest to enable partial parsing in those cases. ([#5155](https://github.com/dbt-labs/dbt-core/issues/5155), [#5589](https://github.com/dbt-labs/dbt-core/pull/5589))
|
||||
- add more information to log line interop test failures ([#5658](https://github.com/dbt-labs/dbt-core/issues/5658), [#5659](https://github.com/dbt-labs/dbt-core/pull/5659))
|
||||
- Add supported languages to materializations ([#5569](https://github.com/dbt-labs/dbt-core/issues/5569), [#5695](https://github.com/dbt-labs/dbt-core/pull/5695))
|
||||
- Migrate integration test 014 but also fix the snapshot hard delete test's timezone logic and force all integration tests to run flags.set_from_args to force environment variables are accessible to all integration test threads. ([#5760](https://github.com/dbt-labs/dbt-core/issues/5760), [#5760](https://github.com/dbt-labs/dbt-core/pull/5760))
|
||||
- Support dbt-metrics compilation by rebuilding flat_graph ([#5525](https://github.com/dbt-labs/dbt-core/issues/5525), [#5786](https://github.com/dbt-labs/dbt-core/pull/5786))
|
||||
- Reworking the way we define the window attribute of metrics to match freshness tests ([#5722](https://github.com/dbt-labs/dbt-core/issues/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793))
|
||||
- Add PythonJobHelper base class in core and add more type checking ([#5802](https://github.com/dbt-labs/dbt-core/issues/5802), [#5802](https://github.com/dbt-labs/dbt-core/pull/5802))
|
||||
- The link did not go to the anchor directly, now it does ([#5813](https://github.com/dbt-labs/dbt-core/issues/5813), [#5814](https://github.com/dbt-labs/dbt-core/pull/5814))
|
||||
- remove key as reserved keyword from test_bool_or ([#5817](https://github.com/dbt-labs/dbt-core/issues/5817), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818))
|
||||
- Convert default selector tests to pytest ([#5728](https://github.com/dbt-labs/dbt-core/issues/5728), [#5820](https://github.com/dbt-labs/dbt-core/pull/5820))
|
||||
- Compatibiltiy for metric attribute renaming ([#5807](https://github.com/dbt-labs/dbt-core/issues/5807), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825))
|
||||
- remove source quoting setting in adapter tests ([#5836](https://github.com/dbt-labs/dbt-core/issues/5836), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
||||
- Add name validation for metrics ([#5456](https://github.com/dbt-labs/dbt-core/issues/5456), [#5841](https://github.com/dbt-labs/dbt-core/pull/5841))
|
||||
- Validate exposure name and add label ([#5606](https://github.com/dbt-labs/dbt-core/issues/5606), [#5844](https://github.com/dbt-labs/dbt-core/pull/5844))
|
||||
- Adding validation for metric expression attribute ([#5871](https://github.com/dbt-labs/dbt-core/issues/5871), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
||||
- Profiling and Adapter Management work with Click CLI ([#5531](https://github.com/dbt-labs/dbt-core/issues/5531), [#5892](https://github.com/dbt-labs/dbt-core/pull/5892))
|
||||
- Reparse references to deleted metric ([#5444](https://github.com/dbt-labs/dbt-core/issues/5444), [#5920](https://github.com/dbt-labs/dbt-core/pull/5920))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgrade to Jinja2==3.1.2 from Jinja2==2.11.3 ([#4748](https://github.com/dbt-labs/dbt-core/issues/4748), [#5465](https://github.com/dbt-labs/dbt-core/pull/5465))
|
||||
- Bump mypy from 0.961 to 0.971 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5495](https://github.com/dbt-labs/dbt-core/pull/5495))
|
||||
- Remove pin for MarkUpSafe from >=0.23,<2.1 ([#5506](https://github.com/dbt-labs/dbt-core/issues/5506), [#5507](https://github.com/dbt-labs/dbt-core/pull/5507))
|
||||
|
||||
### Dependency
|
||||
|
||||
- Bump python from 3.10.5-slim-bullseye to 3.10.6-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5623](https://github.com/dbt-labs/dbt-core/pull/5623))
|
||||
- Bump mashumaro[msgpack] from 3.0.3 to 3.0.4 in /core ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5649](https://github.com/dbt-labs/dbt-core/pull/5649))
|
||||
- Bump black from 22.6.0 to 22.8.0 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5750](https://github.com/dbt-labs/dbt-core/pull/5750))
|
||||
- Bump python from 3.10.6-slim-bullseye to 3.10.7-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5805](https://github.com/dbt-labs/dbt-core/pull/5805))
|
||||
|
||||
### Contributors
|
||||
- [@Goodkat](https://github.com/Goodkat) ([#5581](https://github.com/dbt-labs/dbt-core/pull/5581), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
||||
- [@Ilanbenb](https://github.com/Ilanbenb) ([#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
||||
- [@b-per](https://github.com/b-per) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- [@bbroeksema](https://github.com/bbroeksema) ([#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#5775](https://github.com/dbt-labs/dbt-core/pull/5775), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
||||
- [@danielcmessias](https://github.com/danielcmessias) ([#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5457](https://github.com/dbt-labs/dbt-core/pull/5457), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
||||
- [@dbeatty10](https://github.com/dbeatty10) ([#5717](https://github.com/dbt-labs/dbt-core/pull/5717), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
||||
- [@drewbanin](https://github.com/drewbanin) ([#5921](https://github.com/dbt-labs/dbt-core/pull/5921), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
||||
- [@epapineau](https://github.com/epapineau) ([#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
||||
- [@graciegoheen](https://github.com/graciegoheen) ([#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5782](https://github.com/dbt-labs/dbt-core/pull/5782), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
||||
- [@jeremyyeo](https://github.com/jeremyyeo) ([#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
||||
- [@joellabes](https://github.com/joellabes) ([dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
||||
- [@jpmmcneill](https://github.com/jpmmcneill) ([#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
||||
- [@kadero](https://github.com/kadero) ([#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
||||
- [@leoebfolsom](https://github.com/leoebfolsom) ([#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
||||
- [@matt-winkler](https://github.com/matt-winkler) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- [@nicholasyager](https://github.com/nicholasyager) ([#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
||||
- [@panasenco](https://github.com/panasenco) ([#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
||||
- [@racheldaniel](https://github.com/racheldaniel) ([#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
||||
- [@sdebruyn](https://github.com/sdebruyn) ([#5814](https://github.com/dbt-labs/dbt-core/pull/5814), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
||||
- [@shrodingers](https://github.com/shrodingers) ([#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
||||
- [@sungchun12](https://github.com/sungchun12) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
||||
- [@tomasfarias](https://github.com/tomasfarias) ([#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
||||
- [@varun-dc](https://github.com/varun-dc) ([#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
||||
- [@yoiki](https://github.com/yoiki) ([#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
||||
- [@chamini2](https://github.com/chamini2) ([#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
* [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md)
|
||||
* [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md)
|
||||
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
1. [About this document](#about-this-document)
|
||||
2. [Getting the code](#getting-the-code)
|
||||
3. [Setting up an environment](#setting-up-an-environment)
|
||||
4. [Running dbt-core in development](#running-dbt-core-in-development)
|
||||
4. [Running `dbt` in development](#running-dbt-core-in-development)
|
||||
5. [Testing dbt-core](#testing)
|
||||
6. [Debugging](#debugging)
|
||||
7. [Adding or modifying a changelog entry](#adding-or-modifying-a-changelog-entry)
|
||||
7. [Adding a changelog entry](#adding-a-changelog-entry)
|
||||
8. [Submitting a Pull Request](#submitting-a-pull-request)
|
||||
|
||||
## About this document
|
||||
@@ -56,7 +56,7 @@ There are some tools that will be helpful to you in developing locally. While th
|
||||
|
||||
These are the tools used in `dbt-core` development and testing:
|
||||
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.8, 3.9, 3.10 and 3.11
|
||||
- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.7, 3.8, 3.9, and 3.10
|
||||
- [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
|
||||
- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting
|
||||
- [`black`](https://github.com/psf/black) for code formatting
|
||||
@@ -96,15 +96,12 @@ brew install postgresql
|
||||
|
||||
### Installation
|
||||
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies):
|
||||
First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Also ensure you have the latest version of pip installed with `pip install --upgrade pip`. Next, install `dbt-core` (and its dependencies) with:
|
||||
|
||||
```sh
|
||||
make dev
|
||||
```
|
||||
or, alternatively:
|
||||
```sh
|
||||
# or
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
When installed in this way, any changes you make to your local copy of the source code will be reflected immediately in your next `dbt` run.
|
||||
@@ -113,7 +110,7 @@ When installed in this way, any changes you make to your local copy of the sourc
|
||||
|
||||
With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv.
|
||||
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate. Make sure to create a profile before running integration tests.
|
||||
Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as necessary to connect to your target databases. It may be a good idea to add a new profile pointing to a local Postgres instance, or a specific test sandbox within your data warehouse if appropriate.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -163,7 +160,7 @@ suites.
|
||||
|
||||
#### `tox`
|
||||
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.8, Python 3.9, Python 3.10 and Python 3.11 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py38`. The configuration for these tests in located in `tox.ini`.
|
||||
[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run tests. You can also run tests in parallel, for example, you can run unit tests for Python 3.7, Python 3.8, Python 3.9, and Python 3.10 checks in parallel with `tox -p`. Also, you can run unit tests for specific python versions with `tox -e py37`. The configuration for these tests in located in `tox.ini`.
|
||||
|
||||
#### `pytest`
|
||||
|
||||
@@ -171,10 +168,12 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
python3 -m pytest tests/unit/test_graph.py
|
||||
python3 -m pytest test/unit/test_graph.py
|
||||
# run a specific unit test
|
||||
python3 -m pytest tests/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
# run specific Postgres functional tests
|
||||
python3 -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list
|
||||
# run specific Postgres integration tests (old way)
|
||||
python3 -m pytest -m profile_postgres test/integration/074_postgres_unlogged_table_tests
|
||||
# run specific Postgres integration tests (new way)
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
|
||||
@@ -183,8 +182,9 @@ python3 -m pytest tests/functional/sources
|
||||
### Unit, Integration, Functional?
|
||||
|
||||
Here are some general rules for adding tests:
|
||||
* unit tests (`tests/unit`) don’t need to access a database; "pure Python" tests should be written as unit tests
|
||||
* functional tests (`tests/functional`) cover anything that interacts with a database, namely adapter
|
||||
* unit tests (`test/unit` & `tests/unit`) don’t need to access a database; "pure Python" tests should be written as unit tests
|
||||
* functional tests (`test/integration` & `tests/functional`) cover anything that interacts with a database, namely adapter
|
||||
* *everything in* `test/*` *is being steadily migrated to* `tests/*`
|
||||
|
||||
## Debugging
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
software-properties-common gpg-agent \
|
||||
software-properties-common \
|
||||
&& add-apt-repository ppa:git-core/ppa -y \
|
||||
&& apt-get dist-upgrade -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
@@ -30,9 +30,16 @@ RUN apt-get update \
|
||||
unixodbc-dev \
|
||||
&& add-apt-repository ppa:deadsnakes/ppa \
|
||||
&& apt-get install -y \
|
||||
python-is-python3 \
|
||||
python-dev-is-python3 \
|
||||
python \
|
||||
python-dev \
|
||||
python3-pip \
|
||||
python3.6 \
|
||||
python3.6-dev \
|
||||
python3-pip \
|
||||
python3.6-venv \
|
||||
python3.7 \
|
||||
python3.7-dev \
|
||||
python3.7-venv \
|
||||
python3.8 \
|
||||
python3.8-dev \
|
||||
python3.8-venv \
|
||||
@@ -42,9 +49,6 @@ RUN apt-get update \
|
||||
python3.10 \
|
||||
python3.10-dev \
|
||||
python3.10-venv \
|
||||
python3.11 \
|
||||
python3.11-dev \
|
||||
python3.11-venv \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
|
||||
48
Makefile
48
Makefile
@@ -6,41 +6,23 @@ ifeq ($(USE_DOCKER),true)
|
||||
DOCKER_CMD := docker-compose run --rm test
|
||||
endif
|
||||
|
||||
#
|
||||
# To override CI_flags, create a file at this repo's root dir named `makefile.test.env`. Fill it
|
||||
# with any ENV_VAR overrides required by your test environment, e.g.
|
||||
# DBT_TEST_USER_1=user
|
||||
# LOG_DIR="dir with a space in it"
|
||||
#
|
||||
# Warn: Restrict each line to one variable only.
|
||||
#
|
||||
ifeq (./makefile.test.env,$(wildcard ./makefile.test.env))
|
||||
include ./makefile.test.env
|
||||
endif
|
||||
LOGS_DIR := ./logs
|
||||
|
||||
# Optional flag to invoke tests using our CI env.
|
||||
# But we always want these active for structured
|
||||
# log testing.
|
||||
CI_FLAGS =\
|
||||
DBT_TEST_USER_1=$(if $(DBT_TEST_USER_1),$(DBT_TEST_USER_1),dbt_test_user_1)\
|
||||
DBT_TEST_USER_2=$(if $(DBT_TEST_USER_2),$(DBT_TEST_USER_2),dbt_test_user_2)\
|
||||
DBT_TEST_USER_3=$(if $(DBT_TEST_USER_3),$(DBT_TEST_USER_3),dbt_test_user_3)\
|
||||
RUSTFLAGS=$(if $(RUSTFLAGS),$(RUSTFLAGS),"-D warnings")\
|
||||
LOG_DIR=$(if $(LOG_DIR),$(LOG_DIR),./logs)\
|
||||
DBT_LOG_FORMAT=$(if $(DBT_LOG_FORMAT),$(DBT_LOG_FORMAT),json)
|
||||
|
||||
|
||||
.PHONY: dev_req
|
||||
dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies.
|
||||
@\
|
||||
pip install -r dev-requirements.txt
|
||||
pip install -r editable-requirements.txt
|
||||
DBT_TEST_USER_1=dbt_test_user_1\
|
||||
DBT_TEST_USER_2=dbt_test_user_2\
|
||||
DBT_TEST_USER_3=dbt_test_user_3\
|
||||
RUSTFLAGS="-D warnings"\
|
||||
LOG_DIR=./logs\
|
||||
DBT_LOG_FORMAT=json
|
||||
|
||||
.PHONY: dev
|
||||
dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit.
|
||||
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
|
||||
@\
|
||||
pre-commit install
|
||||
|
||||
.PHONY: proto_types
|
||||
proto_types: ## generates google protobuf python file from types.proto
|
||||
protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto
|
||||
pip install -r dev-requirements.txt -r editable-requirements.txt
|
||||
|
||||
.PHONY: mypy
|
||||
mypy: .env ## Runs mypy against staged changes for static type checking.
|
||||
@@ -79,7 +61,7 @@ test: .env ## Runs unit tests with py and code checks against staged changes.
|
||||
.PHONY: integration
|
||||
integration: .env ## Runs postgres integration tests with py-integration
|
||||
@\
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
$(if $(USE_CI_FLAGS), $(CI_FLAGS)) $(DOCKER_CMD) tox -e py-integration -- -nauto
|
||||
|
||||
.PHONY: integration-fail-fast
|
||||
integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode.
|
||||
@@ -89,9 +71,9 @@ integration-fail-fast: .env ## Runs postgres integration tests with py-integrati
|
||||
.PHONY: interop
|
||||
interop: clean
|
||||
@\
|
||||
mkdir $(LOG_DIR) && \
|
||||
mkdir $(LOGS_DIR) && \
|
||||
$(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto && \
|
||||
LOG_DIR=$(LOG_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
LOG_DIR=$(LOGS_DIR) cargo run --manifest-path test/interop/log_parsing/Cargo.toml
|
||||
|
||||
.PHONY: setup-db
|
||||
setup-db: ## Setup Postgres database with docker-compose for system testing.
|
||||
|
||||
@@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Install dbt](https://docs.getdbt.com/docs/get-started/installation)
|
||||
- [Install dbt](https://docs.getdbt.com/docs/installation)
|
||||
- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/)
|
||||
|
||||
## Join the dbt Community
|
||||
|
||||
@@ -2,59 +2,50 @@
|
||||
|
||||
## The following are individual files in this directory.
|
||||
|
||||
### compilation.py
|
||||
|
||||
### constants.py
|
||||
|
||||
### dataclass_schema.py
|
||||
|
||||
### deprecations.py
|
||||
|
||||
### exceptions.py
|
||||
|
||||
### flags.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### hooks.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### links.py
|
||||
|
||||
### logger.py
|
||||
|
||||
### main.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### profiler.py
|
||||
|
||||
### selected_resources.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### tracking.py
|
||||
|
||||
### version.py
|
||||
|
||||
### lib.py
|
||||
|
||||
### node_types.py
|
||||
|
||||
### helper_types.py
|
||||
|
||||
### links.py
|
||||
|
||||
### semver.py
|
||||
|
||||
### ui.py
|
||||
|
||||
### compilation.py
|
||||
|
||||
### dataclass_schema.py
|
||||
|
||||
### exceptions.py
|
||||
|
||||
### hooks.py
|
||||
|
||||
### logger.py
|
||||
|
||||
### profiler.py
|
||||
|
||||
### utils.py
|
||||
|
||||
### version.py
|
||||
|
||||
|
||||
## The subdirectories will be documented in a README in the subdirectory
|
||||
* adapters
|
||||
* cli
|
||||
* clients
|
||||
* config
|
||||
* context
|
||||
* contracts
|
||||
* deps
|
||||
* docs
|
||||
* events
|
||||
* graph
|
||||
* include
|
||||
* parser
|
||||
* adapters
|
||||
* context
|
||||
* deps
|
||||
* graph
|
||||
* task
|
||||
* tests
|
||||
* clients
|
||||
* events
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
# these are all just exports, #noqa them so flake8 will be happy
|
||||
|
||||
# TODO: Should we still include this in the `adapters` namespace?
|
||||
from dbt.contracts.connection import Credentials # noqa: F401
|
||||
from dbt.adapters.base.meta import available # noqa: F401
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401
|
||||
from dbt.adapters.base.relation import ( # noqa: F401
|
||||
from dbt.contracts.connection import Credentials # noqa
|
||||
from dbt.adapters.base.meta import available # noqa
|
||||
from dbt.adapters.base.connections import BaseConnectionManager # noqa
|
||||
from dbt.adapters.base.relation import ( # noqa
|
||||
BaseRelation,
|
||||
RelationType,
|
||||
SchemaSearchMap,
|
||||
)
|
||||
from dbt.adapters.base.column import Column # noqa: F401
|
||||
from dbt.adapters.base.impl import ( # noqa: F401
|
||||
AdapterConfig,
|
||||
BaseAdapter,
|
||||
PythonJobHelper,
|
||||
ConstraintSupport,
|
||||
)
|
||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401
|
||||
from dbt.adapters.base.column import Column # noqa
|
||||
from dbt.adapters.base.impl import AdapterConfig, BaseAdapter, PythonJobHelper # noqa
|
||||
from dbt.adapters.base.plugin import AdapterPlugin # noqa
|
||||
|
||||
@@ -2,7 +2,7 @@ from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Dict, ClassVar, Any, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -60,7 +60,6 @@ class Column:
|
||||
"float",
|
||||
"double precision",
|
||||
"float8",
|
||||
"double",
|
||||
]
|
||||
|
||||
def is_integer(self) -> bool:
|
||||
@@ -86,7 +85,7 @@ class Column:
|
||||
|
||||
def string_size(self) -> int:
|
||||
if not self.is_string():
|
||||
raise DbtRuntimeError("Called string_size() on non-string field!")
|
||||
raise RuntimeException("Called string_size() on non-string field!")
|
||||
|
||||
if self.dtype == "text" or self.char_size is None:
|
||||
# char_size should never be None. Handle it reasonably just in case
|
||||
@@ -125,7 +124,7 @@ class Column:
|
||||
def from_description(cls, name: str, raw_data_type: str) -> "Column":
|
||||
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
|
||||
if match is None:
|
||||
raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"')
|
||||
raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
|
||||
data_type, size_info = match.groups()
|
||||
char_size = None
|
||||
numeric_precision = None
|
||||
@@ -138,7 +137,7 @@ class Column:
|
||||
try:
|
||||
char_size = int(parts[0])
|
||||
except ValueError:
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
@@ -146,14 +145,14 @@ class Column:
|
||||
try:
|
||||
numeric_precision = int(parts[0])
|
||||
except ValueError:
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[0]}" to an integer'
|
||||
)
|
||||
try:
|
||||
numeric_scale = int(parts[1])
|
||||
except ValueError:
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
f'Could not interpret data_type "{raw_data_type}": '
|
||||
f'could not convert "{parts[1]}" to an integer'
|
||||
)
|
||||
|
||||
@@ -2,7 +2,6 @@ import abc
|
||||
import os
|
||||
from time import sleep
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
@@ -41,16 +40,14 @@ from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpenInCleanup,
|
||||
ConnectionLeftOpen,
|
||||
ConnectionClosedInCleanup,
|
||||
ConnectionLeftOpen2,
|
||||
ConnectionClosed,
|
||||
ConnectionClosed2,
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt import flags
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
@@ -91,13 +88,13 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
key = self.get_thread_identifier()
|
||||
with self.lock:
|
||||
if key not in self.thread_connections:
|
||||
raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections))
|
||||
raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
|
||||
return self.thread_connections[key]
|
||||
|
||||
def set_thread_connection(self, conn: Connection) -> None:
|
||||
key = self.get_thread_identifier()
|
||||
if key in self.thread_connections:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.exceptions.InternalException(
|
||||
"In set_thread_connection, existing connection exists for {}"
|
||||
)
|
||||
self.thread_connections[key] = conn
|
||||
@@ -137,49 +134,47 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A context manager that handles exceptions raised by the
|
||||
underlying database.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`exception_handler` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def set_connection_name(self, name: Optional[str] = None) -> Connection:
|
||||
"""Called by 'acquire_connection' in BaseAdapter, which is called by
|
||||
'connection_named', called by 'connection_for(node)'.
|
||||
Creates a connection for this thread if one doesn't already
|
||||
exist, and will rename an existing connection."""
|
||||
conn_name: str
|
||||
if name is None:
|
||||
# if a name isn't specified, we'll re-use a single handle
|
||||
# named 'master'
|
||||
conn_name = "master"
|
||||
else:
|
||||
if not isinstance(name, str):
|
||||
raise dbt.exceptions.CompilerException(
|
||||
f"For connection name, got {name} - not a string!"
|
||||
)
|
||||
assert isinstance(name, str)
|
||||
conn_name = name
|
||||
|
||||
conn_name: str = "master" if name is None else name
|
||||
|
||||
# Get a connection for this thread
|
||||
conn = self.get_if_exists()
|
||||
|
||||
if conn and conn.name == conn_name and conn.state == "open":
|
||||
# Found a connection and nothing to do, so just return it
|
||||
return conn
|
||||
|
||||
if conn is None:
|
||||
# Create a new connection
|
||||
conn = Connection(
|
||||
type=Identifier(self.TYPE),
|
||||
name=conn_name,
|
||||
name=None,
|
||||
state=ConnectionState.INIT,
|
||||
transaction_open=False,
|
||||
handle=None,
|
||||
credentials=self.profile.credentials,
|
||||
)
|
||||
conn.handle = LazyHandle(self.open)
|
||||
# Add the connection to thread_connections for this thread
|
||||
self.set_thread_connection(conn)
|
||||
fire_event(
|
||||
NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
|
||||
)
|
||||
else: # existing connection either wasn't open or didn't have the right name
|
||||
if conn.state != "open":
|
||||
conn.handle = LazyHandle(self.open)
|
||||
if conn.name != conn_name:
|
||||
orig_conn_name: str = conn.name or ""
|
||||
conn.name = conn_name
|
||||
fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name))
|
||||
|
||||
if conn.name == conn_name and conn.state == "open":
|
||||
return conn
|
||||
|
||||
fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
|
||||
|
||||
if conn.state == "open":
|
||||
fire_event(ConnectionReused(conn_name=conn_name))
|
||||
else:
|
||||
conn.handle = LazyHandle(self.open)
|
||||
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
@@ -211,7 +206,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
connect should trigger a retry.
|
||||
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||
is exceeded before a successful call, a FailedToConnectError will be raised.
|
||||
is exceeded before a successful call, a FailedToConnectException will be raised.
|
||||
Must be non-negative.
|
||||
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||
@@ -220,14 +215,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without
|
||||
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectError(
|
||||
raise dbt.exceptions.FailedToConnectException(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
@@ -235,7 +230,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative")
|
||||
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
@@ -246,7 +241,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
@@ -268,12 +263,12 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectError(str(e))
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -288,7 +283,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
This should be thread-safe, or hold the lock if necessary. The given
|
||||
connection should not be in either in_use or available.
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
|
||||
|
||||
def release(self) -> None:
|
||||
with self.lock:
|
||||
@@ -309,9 +304,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection.state not in {"closed", "init"}:
|
||||
fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(ConnectionLeftOpen(conn_name=connection.name))
|
||||
else:
|
||||
fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(ConnectionClosed(conn_name=connection.name))
|
||||
self.close(connection)
|
||||
|
||||
# garbage collect these connections
|
||||
@@ -320,12 +315,16 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def begin(self) -> None:
|
||||
"""Begin a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`begin` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def commit(self) -> None:
|
||||
"""Commit a transaction. (passable)"""
|
||||
raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`commit` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _rollback_handle(cls, connection: Connection) -> None:
|
||||
@@ -333,40 +332,28 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
try:
|
||||
connection.handle.rollback()
|
||||
except Exception:
|
||||
fire_event(
|
||||
RollbackFailed(
|
||||
conn_name=cast_to_str(connection.name),
|
||||
exc_info=traceback.format_exc(),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
fire_event(RollbackFailed(conn_name=connection.name))
|
||||
|
||||
@classmethod
|
||||
def _close_handle(cls, connection: Connection) -> None:
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, "close"):
|
||||
fire_event(
|
||||
ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info())
|
||||
)
|
||||
fire_event(ConnectionClosed2(conn_name=connection.name))
|
||||
connection.handle.close()
|
||||
else:
|
||||
fire_event(
|
||||
ConnectionLeftOpen(
|
||||
conn_name=cast_to_str(connection.name), node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
fire_event(ConnectionLeftOpen2(conn_name=connection.name))
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
"""Roll back the given connection."""
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Tried to rollback transaction on connection "
|
||||
f'"{connection.name}", but it does not have one open!'
|
||||
)
|
||||
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
fire_event(Rollback(conn_name=connection.name))
|
||||
cls._rollback_handle(connection)
|
||||
|
||||
connection.transaction_open = False
|
||||
@@ -378,7 +365,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
return connection
|
||||
|
||||
if connection.transaction_open and connection.handle:
|
||||
fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info()))
|
||||
fire_event(Rollback(conn_name=connection.name))
|
||||
cls._rollback_handle(connection)
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -411,4 +398,6 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`execute` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -2,63 +2,54 @@ import abc
|
||||
from concurrent.futures import as_completed, Future
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
import time
|
||||
from itertools import chain
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Tuple,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Type,
|
||||
Dict,
|
||||
Any,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
Iterator,
|
||||
Union,
|
||||
Set,
|
||||
)
|
||||
|
||||
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
|
||||
|
||||
import agate
|
||||
import pytz
|
||||
|
||||
from dbt.exceptions import (
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
DbtValidationError,
|
||||
MacroArgTypeError,
|
||||
MacroResultError,
|
||||
NotImplementedError,
|
||||
NullRelationCacheAttemptedError,
|
||||
NullRelationDropAttemptedError,
|
||||
QuoteConfigTypeError,
|
||||
RelationReturnedMultipleResultsError,
|
||||
RenameToNoneAttemptedError,
|
||||
SnapshotTargetIncompleteError,
|
||||
SnapshotTargetNotSnapshotTableError,
|
||||
UnexpectedNonTimestampError,
|
||||
UnexpectedNullError,
|
||||
raise_database_error,
|
||||
raise_compiler_error,
|
||||
invalid_type_error,
|
||||
get_relation_returned_multiple_results,
|
||||
InternalException,
|
||||
NotImplementedException,
|
||||
RuntimeException,
|
||||
)
|
||||
|
||||
from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol
|
||||
from dbt.adapters.protocol import (
|
||||
AdapterConfig,
|
||||
ConnectionManagerProtocol,
|
||||
)
|
||||
from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
CacheMiss,
|
||||
ListRelations,
|
||||
CodeExecution,
|
||||
CodeExecutionStatus,
|
||||
CatalogGenerationError,
|
||||
ConstraintNotSupported,
|
||||
ConstraintNotEnforced,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict
|
||||
from dbt.utils import filter_null_values, executor
|
||||
|
||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||
from dbt.adapters.base.meta import AdapterMeta, available
|
||||
@@ -70,22 +61,19 @@ from dbt.adapters.base.relation import (
|
||||
)
|
||||
from dbt.adapters.base import Column as BaseColumn
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
||||
from dbt import deprecations
|
||||
from dbt.adapters.cache import RelationsCache, _make_key
|
||||
|
||||
|
||||
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
|
||||
|
||||
|
||||
GET_CATALOG_MACRO_NAME = "get_catalog"
|
||||
FRESHNESS_MACRO_NAME = "collect_freshness"
|
||||
|
||||
|
||||
class ConstraintSupport(str, Enum):
|
||||
ENFORCED = "enforced"
|
||||
NOT_ENFORCED = "not_enforced"
|
||||
NOT_SUPPORTED = "not_supported"
|
||||
|
||||
|
||||
def _expect_row_value(key: str, row: agate.Row):
|
||||
if key not in row.keys():
|
||||
raise DbtInternalError(
|
||||
raise InternalException(
|
||||
'Got a row without "{}" column, columns: {}'.format(key, row.keys())
|
||||
)
|
||||
return row[key]
|
||||
@@ -114,10 +102,18 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
|
||||
assume the datetime is already for UTC and add the timezone.
|
||||
"""
|
||||
if dt is None:
|
||||
raise UnexpectedNullError(field_name, source)
|
||||
raise raise_database_error(
|
||||
"Expected a non-null value when querying field '{}' of table "
|
||||
" {} but received value 'null' instead".format(field_name, source)
|
||||
)
|
||||
|
||||
elif not hasattr(dt, "tzinfo"):
|
||||
raise UnexpectedNonTimestampError(field_name, source, dt)
|
||||
raise raise_database_error(
|
||||
"Expected a timestamp value when querying field '{}' of table "
|
||||
"{} but received value of type '{}' instead".format(
|
||||
field_name, source, type(dt).__name__
|
||||
)
|
||||
)
|
||||
|
||||
elif dt.tzinfo:
|
||||
return dt.astimezone(pytz.UTC)
|
||||
@@ -187,7 +183,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
- truncate_relation
|
||||
- rename_relation
|
||||
- get_columns_in_relation
|
||||
- get_column_schema_from_query
|
||||
- expand_column_types
|
||||
- list_relations_without_caching
|
||||
- is_cancelable
|
||||
@@ -214,14 +209,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# for use in materializations
|
||||
AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig
|
||||
|
||||
CONSTRAINT_SUPPORT = {
|
||||
ConstraintType.check: ConstraintSupport.NOT_SUPPORTED,
|
||||
ConstraintType.not_null: ConstraintSupport.ENFORCED,
|
||||
ConstraintType.unique: ConstraintSupport.NOT_ENFORCED,
|
||||
ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED,
|
||||
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.cache = RelationsCache()
|
||||
@@ -256,7 +243,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return conn.name
|
||||
|
||||
@contextmanager
|
||||
def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]:
|
||||
def connection_named(
|
||||
self, name: str, node: Optional[CompileResultNode] = None
|
||||
) -> Iterator[None]:
|
||||
try:
|
||||
if self.connections.query_header is not None:
|
||||
self.connections.query_header.set(name, node)
|
||||
@@ -268,13 +257,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
self.connections.query_header.reset()
|
||||
|
||||
@contextmanager
|
||||
def connection_for(self, node: ResultNode) -> Iterator[None]:
|
||||
def connection_for(self, node: CompileResultNode) -> Iterator[None]:
|
||||
with self.connection_named(node.unique_id, node):
|
||||
yield
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
"""Execute the given SQL. This is a thin wrapper around
|
||||
ConnectionManager.execute.
|
||||
@@ -283,35 +272,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param bool auto_begin: If set, and dbt is not currently inside a
|
||||
transaction, automatically begin one.
|
||||
:param bool fetch: If set, fetch results.
|
||||
:param Optional[int] limit: If set, only fetch n number of rows
|
||||
:return: A tuple of the query status and results (empty if fetch=False).
|
||||
:rtype: Tuple[AdapterResponse, agate.Table]
|
||||
"""
|
||||
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit)
|
||||
|
||||
def validate_sql(self, sql: str) -> AdapterResponse:
|
||||
"""Submit the given SQL to the engine for validation, but not execution.
|
||||
|
||||
This should throw an appropriate exception if the input SQL is invalid, although
|
||||
in practice that will generally be handled by delegating to an existing method
|
||||
for execution and allowing the error handler to take care of the rest.
|
||||
|
||||
:param str sql: The sql to validate
|
||||
"""
|
||||
raise NotImplementedError("`validate_sql` is not implemented for this adapter!")
|
||||
|
||||
@available.parse(lambda *a, **k: [])
|
||||
def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
|
||||
"""Get a list of the Columns with names and data types from the given sql."""
|
||||
_, cursor = self.connections.add_select_query(sql)
|
||||
columns = [
|
||||
self.Column.create(
|
||||
column_name, self.connections.data_type_code_to_name(column_type_code)
|
||||
)
|
||||
# https://peps.python.org/pep-0249/#description
|
||||
for column_name, column_type_code, *_ in cursor.description
|
||||
]
|
||||
return columns
|
||||
return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
|
||||
|
||||
@available.parse(lambda *a, **k: ("", empty_table()))
|
||||
def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
|
||||
@@ -379,7 +343,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
fire_event(
|
||||
CacheMiss(
|
||||
conn_name=self.nice_connection_name(),
|
||||
database=cast_to_str(database),
|
||||
database=database,
|
||||
schema=schema,
|
||||
)
|
||||
)
|
||||
@@ -395,7 +359,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return {
|
||||
self.Relation.create_from(self.config, node).without_identifier()
|
||||
for node in manifest.nodes.values()
|
||||
if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node)
|
||||
if (node.is_relational and not node.is_ephemeral_model)
|
||||
}
|
||||
|
||||
def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:
|
||||
@@ -408,7 +372,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
lowercase strings.
|
||||
"""
|
||||
info_schema_name_map = SchemaSearchMap()
|
||||
nodes: Iterator[ResultNode] = chain(
|
||||
nodes: Iterator[CompileResultNode] = chain(
|
||||
[
|
||||
node
|
||||
for node in manifest.nodes.values()
|
||||
@@ -426,7 +390,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return info_schema_name_map
|
||||
|
||||
def _relations_cache_for_schemas(
|
||||
self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None
|
||||
self, manifest: Manifest, cache_schemas: Set[BaseRelation] = None
|
||||
) -> None:
|
||||
"""Populate the relations cache for the given schemas. Returns an
|
||||
iterable of the schemas populated, as strings.
|
||||
@@ -462,7 +426,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
self,
|
||||
manifest: Manifest,
|
||||
clear: bool = False,
|
||||
required_schemas: Optional[Set[BaseRelation]] = None,
|
||||
required_schemas: Set[BaseRelation] = None,
|
||||
) -> None:
|
||||
"""Run a query that gets a populated cache of the relations in the
|
||||
database and set the cache on this adapter.
|
||||
@@ -477,7 +441,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""Cache a new relation in dbt. It will show up in `list relations`."""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise NullRelationCacheAttemptedError(name)
|
||||
raise_compiler_error("Attempted to cache a null relation for {}".format(name))
|
||||
self.cache.add(relation)
|
||||
# so jinja doesn't render things
|
||||
return ""
|
||||
@@ -489,7 +453,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
if relation is None:
|
||||
name = self.nice_connection_name()
|
||||
raise NullRelationDropAttemptedError(name)
|
||||
raise_compiler_error("Attempted to drop a null relation for {}".format(name))
|
||||
self.cache.drop(relation)
|
||||
return ""
|
||||
|
||||
@@ -506,7 +470,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
name = self.nice_connection_name()
|
||||
src_name = _relation_name(from_relation)
|
||||
dst_name = _relation_name(to_relation)
|
||||
raise RenameToNoneAttemptedError(src_name, dst_name, name)
|
||||
raise_compiler_error(
|
||||
"Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
|
||||
)
|
||||
|
||||
self.cache.rename(from_relation, to_relation)
|
||||
return ""
|
||||
@@ -518,12 +484,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def date_function(cls) -> str:
|
||||
"""Get the date function used by this adapter's database."""
|
||||
raise NotImplementedError("`date_function` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`date_function` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
raise NotImplementedError("`is_cancelable` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
|
||||
|
||||
###
|
||||
# Abstract methods about schemas
|
||||
@@ -531,7 +497,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@abc.abstractmethod
|
||||
def list_schemas(self, database: str) -> List[str]:
|
||||
"""Get a list of existing schemas in database"""
|
||||
raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
|
||||
|
||||
@available.parse(lambda *a, **k: False)
|
||||
def check_schema_exists(self, database: str, schema: str) -> bool:
|
||||
@@ -554,13 +520,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
*Implementors must call self.cache.drop() to preserve cache state!*
|
||||
"""
|
||||
raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def truncate_relation(self, relation: BaseRelation) -> None:
|
||||
"""Truncate the given relation."""
|
||||
raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
@@ -569,13 +535,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
Implementors must call self.cache.rename() to preserve cache state.
|
||||
"""
|
||||
raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_list
|
||||
def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
|
||||
"""Get a list of the columns in the given Relation."""
|
||||
raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
|
||||
raise NotImplementedException(
|
||||
"`get_columns_in_relation` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
|
||||
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
|
||||
@@ -597,7 +565,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param self.Relation current: A relation that currently exists in the
|
||||
database with columns of unspecified types.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
raise NotImplementedException(
|
||||
"`expand_target_column_types` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -612,8 +580,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:return: The relations in schema
|
||||
:rtype: List[self.Relation]
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"`list_relations_without_caching` is not implemented for this adapter!"
|
||||
raise NotImplementedException(
|
||||
"`list_relations_without_caching` is not implemented for this " "adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
@@ -654,7 +622,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
to_relation.
|
||||
"""
|
||||
if not isinstance(from_relation, self.Relation):
|
||||
raise MacroArgTypeError(
|
||||
invalid_type_error(
|
||||
method_name="get_missing_columns",
|
||||
arg_name="from_relation",
|
||||
got_value=from_relation,
|
||||
@@ -662,7 +630,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
if not isinstance(to_relation, self.Relation):
|
||||
raise MacroArgTypeError(
|
||||
invalid_type_error(
|
||||
method_name="get_missing_columns",
|
||||
arg_name="to_relation",
|
||||
got_value=to_relation,
|
||||
@@ -683,11 +651,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
expected columns.
|
||||
|
||||
:param Relation relation: The relation to check
|
||||
:raises InvalidMacroArgType: If the columns are
|
||||
:raises CompilationException: If the columns are
|
||||
incorrect.
|
||||
"""
|
||||
if not isinstance(relation, self.Relation):
|
||||
raise MacroArgTypeError(
|
||||
invalid_type_error(
|
||||
method_name="valid_snapshot_target",
|
||||
arg_name="relation",
|
||||
got_value=relation,
|
||||
@@ -708,16 +676,24 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
if missing:
|
||||
if extra:
|
||||
raise SnapshotTargetIncompleteError(extra, missing)
|
||||
msg = (
|
||||
'Snapshot target has ("{}") but not ("{}") - is it an '
|
||||
"unmigrated previous version archive?".format(
|
||||
'", "'.join(extra), '", "'.join(missing)
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise SnapshotTargetNotSnapshotTableError(missing)
|
||||
msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
|
||||
'", "'.join(missing)
|
||||
)
|
||||
raise_compiler_error(msg)
|
||||
|
||||
@available.parse_none
|
||||
def expand_target_column_types(
|
||||
self, from_relation: BaseRelation, to_relation: BaseRelation
|
||||
) -> None:
|
||||
if not isinstance(from_relation, self.Relation):
|
||||
raise MacroArgTypeError(
|
||||
invalid_type_error(
|
||||
method_name="expand_target_column_types",
|
||||
arg_name="from_relation",
|
||||
got_value=from_relation,
|
||||
@@ -725,7 +701,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
)
|
||||
|
||||
if not isinstance(to_relation, self.Relation):
|
||||
raise MacroArgTypeError(
|
||||
invalid_type_error(
|
||||
method_name="expand_target_column_types",
|
||||
arg_name="to_relation",
|
||||
got_value=to_relation,
|
||||
@@ -748,23 +724,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# we can't build the relations cache because we don't have a
|
||||
# manifest so we can't run any operations.
|
||||
relations = self.list_relations_without_caching(schema_relation)
|
||||
|
||||
# if the cache is already populated, add this schema in
|
||||
# otherwise, skip updating the cache and just ignore
|
||||
if self.cache:
|
||||
for relation in relations:
|
||||
self.cache.add(relation)
|
||||
if not relations:
|
||||
# it's possible that there were no relations in some schemas. We want
|
||||
# to insert the schemas we query into the cache's `.schemas` attribute
|
||||
# so we can check it later
|
||||
self.cache.update_schemas([(database, schema)])
|
||||
|
||||
fire_event(
|
||||
ListRelations(
|
||||
database=cast_to_str(database),
|
||||
database=database,
|
||||
schema=schema,
|
||||
relations=[_make_ref_key_dict(x) for x in relations],
|
||||
relations=[_make_key(x) for x in relations],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -796,6 +760,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
schema: str,
|
||||
identifier: str,
|
||||
) -> List[BaseRelation]:
|
||||
|
||||
matches = []
|
||||
|
||||
search = self._make_match_kwargs(database, schema, identifier)
|
||||
@@ -818,7 +783,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"schema": schema,
|
||||
"database": database,
|
||||
}
|
||||
raise RelationReturnedMultipleResultsError(kwargs, matches)
|
||||
get_relation_returned_multiple_results(kwargs, matches)
|
||||
|
||||
elif matches:
|
||||
return matches[0]
|
||||
@@ -840,20 +805,20 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
@available.parse_none
|
||||
def create_schema(self, relation: BaseRelation):
|
||||
"""Create the given schema if it does not exist."""
|
||||
raise NotImplementedError("`create_schema` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`create_schema` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractmethod
|
||||
@available.parse_none
|
||||
def drop_schema(self, relation: BaseRelation):
|
||||
"""Drop the given schema (and everything in it) if it exists."""
|
||||
raise NotImplementedError("`drop_schema` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def quote(cls, identifier: str) -> str:
|
||||
"""Quote the given identifier, as appropriate for the database."""
|
||||
raise NotImplementedError("`quote` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`quote` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
def quote_as_configured(self, identifier: str, quote_key: str) -> str:
|
||||
@@ -882,7 +847,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
elif quote_config is None:
|
||||
pass
|
||||
else:
|
||||
raise QuoteConfigTypeError(quote_config)
|
||||
raise_compiler_error(
|
||||
f'The seed configuration value of "quote_columns" has an '
|
||||
f"invalid type {type(quote_config)}"
|
||||
)
|
||||
|
||||
if quote_columns:
|
||||
return self.quote(column)
|
||||
@@ -903,7 +871,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedError("`convert_text_type` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -915,7 +883,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -927,7 +895,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!")
|
||||
raise NotImplementedException(
|
||||
"`convert_boolean_type` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -939,7 +909,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!")
|
||||
raise NotImplementedException(
|
||||
"`convert_datetime_type` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -951,7 +923,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedError("`convert_date_type` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
@@ -963,7 +935,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:param col_idx: The index into the agate table for the column.
|
||||
:return: The name of the type in the database
|
||||
"""
|
||||
raise NotImplementedError("`convert_time_type` is not implemented for this adapter!")
|
||||
raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
@@ -996,9 +968,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
manifest: Optional[Manifest] = None,
|
||||
project: Optional[str] = None,
|
||||
context_override: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Optional[Dict[str, Any]] = None,
|
||||
kwargs: Dict[str, Any] = None,
|
||||
text_only_columns: Optional[Iterable[str]] = None,
|
||||
) -> AttrDict:
|
||||
) -> agate.Table:
|
||||
"""Look macro_name up in the manifest and execute its results.
|
||||
|
||||
:param macro_name: The name of the macro to execute.
|
||||
@@ -1030,7 +1002,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
else:
|
||||
package_name = 'the "{}" package'.format(project)
|
||||
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
'dbt could not find a macro with the name "{}" in {}'.format(
|
||||
macro_name, package_name
|
||||
)
|
||||
@@ -1073,6 +1045,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
schemas: Set[str],
|
||||
manifest: Manifest,
|
||||
) -> agate.Table:
|
||||
|
||||
kwargs = {"information_schema": information_schema, "schemas": schemas}
|
||||
table = self.execute_macro(
|
||||
GET_CATALOG_MACRO_NAME,
|
||||
@@ -1082,7 +1055,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
manifest=manifest,
|
||||
)
|
||||
|
||||
results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type]
|
||||
results = self._catalog_filter_table(table, manifest)
|
||||
return results
|
||||
|
||||
def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
|
||||
@@ -1114,7 +1087,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
loaded_at_field: str,
|
||||
filter: Optional[str],
|
||||
manifest: Optional[Manifest] = None,
|
||||
) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]:
|
||||
) -> Dict[str, Any]:
|
||||
"""Calculate the freshness of sources in dbt, and return it"""
|
||||
kwargs: Dict[str, Any] = {
|
||||
"source": source,
|
||||
@@ -1123,23 +1096,15 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
}
|
||||
|
||||
# run the macro
|
||||
# in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly
|
||||
# starting in v1.5, by default, we return both the table and the adapter response (metadata about the query)
|
||||
result: Union[
|
||||
AttrDict, # current: contains AdapterResponse + agate.Table
|
||||
agate.Table, # previous: just table
|
||||
]
|
||||
result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
if isinstance(result, agate.Table):
|
||||
deprecations.warn("collect-freshness-return-signature")
|
||||
adapter_response = None
|
||||
table = result
|
||||
else:
|
||||
adapter_response, table = result.response, result.table # type: ignore[attr-defined]
|
||||
table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
|
||||
# now we have a 1-row table of the maximum `loaded_at_field` value and
|
||||
# the current time according to the db.
|
||||
if len(table) != 1 or len(table[0]) != 2:
|
||||
raise MacroResultError(FRESHNESS_MACRO_NAME, table)
|
||||
raise_compiler_error(
|
||||
'Got an invalid result from "{}" macro: {}'.format(
|
||||
FRESHNESS_MACRO_NAME, [tuple(r) for r in table]
|
||||
)
|
||||
)
|
||||
if table[0][0] is None:
|
||||
# no records in the table, so really the max_loaded_at was
|
||||
# infinitely long ago. Just call it 0:00 January 1 year UTC
|
||||
@@ -1149,12 +1114,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
|
||||
snapshotted_at = _utc(table[0][1], source, loaded_at_field)
|
||||
age = (snapshotted_at - max_loaded_at).total_seconds()
|
||||
freshness = {
|
||||
return {
|
||||
"max_loaded_at": max_loaded_at,
|
||||
"snapshotted_at": snapshotted_at,
|
||||
"age": age,
|
||||
}
|
||||
return adapter_response, freshness
|
||||
|
||||
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
|
||||
"""A hook for running some operation before the model materialization
|
||||
@@ -1217,7 +1181,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
elif location == "prepend":
|
||||
return f"'{value}' || {add_to}"
|
||||
else:
|
||||
raise DbtRuntimeError(f'Got an unexpected location value of "{location}"')
|
||||
raise RuntimeException(f'Got an unexpected location value of "{location}"')
|
||||
|
||||
def get_rows_different_sql(
|
||||
self,
|
||||
@@ -1275,7 +1239,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
return self.generate_python_submission_response(submission_result)
|
||||
|
||||
def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse:
|
||||
raise NotImplementedError(
|
||||
raise NotImplementedException(
|
||||
"Your adapter need to implement generate_python_submission_response"
|
||||
)
|
||||
|
||||
@@ -1299,7 +1263,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
valid_strategies.append("default")
|
||||
builtin_strategies = self.builtin_incremental_strategies()
|
||||
if strategy in builtin_strategies and strategy not in valid_strategies:
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
f"The incremental strategy '{strategy}' is not valid for this adapter"
|
||||
)
|
||||
|
||||
@@ -1307,7 +1271,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
macro_name = f"get_incremental_{strategy}_sql"
|
||||
# The model_context should have MacroGenerator callable objects for all macros
|
||||
if macro_name not in model_context:
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
|
||||
macro_name, self.config.project_name
|
||||
)
|
||||
@@ -1316,119 +1280,6 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# This returns a callable macro
|
||||
return model_context[macro_name]
|
||||
|
||||
@classmethod
|
||||
def _parse_column_constraint(cls, raw_constraint: Dict[str, Any]) -> ColumnLevelConstraint:
|
||||
try:
|
||||
ColumnLevelConstraint.validate(raw_constraint)
|
||||
return ColumnLevelConstraint.from_dict(raw_constraint)
|
||||
except Exception:
|
||||
raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
|
||||
|
||||
@classmethod
|
||||
def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]:
|
||||
"""Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
|
||||
rendering."""
|
||||
constraint_expression = constraint.expression or ""
|
||||
|
||||
rendered_column_constraint = None
|
||||
if constraint.type == ConstraintType.check and constraint_expression:
|
||||
rendered_column_constraint = f"check ({constraint_expression})"
|
||||
elif constraint.type == ConstraintType.not_null:
|
||||
rendered_column_constraint = f"not null {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.unique:
|
||||
rendered_column_constraint = f"unique {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.primary_key:
|
||||
rendered_column_constraint = f"primary key {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.foreign_key and constraint_expression:
|
||||
rendered_column_constraint = f"references {constraint_expression}"
|
||||
elif constraint.type == ConstraintType.custom and constraint_expression:
|
||||
rendered_column_constraint = constraint_expression
|
||||
|
||||
if rendered_column_constraint:
|
||||
rendered_column_constraint = rendered_column_constraint.strip()
|
||||
|
||||
return rendered_column_constraint
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List:
|
||||
rendered_column_constraints = []
|
||||
|
||||
for v in raw_columns.values():
|
||||
col_name = cls.quote(v["name"]) if v.get("quote") else v["name"]
|
||||
rendered_column_constraint = [f"{col_name} {v['data_type']}"]
|
||||
for con in v.get("constraints", None):
|
||||
constraint = cls._parse_column_constraint(con)
|
||||
c = cls.process_parsed_constraint(constraint, cls.render_column_constraint)
|
||||
if c is not None:
|
||||
rendered_column_constraint.append(c)
|
||||
rendered_column_constraints.append(" ".join(rendered_column_constraint))
|
||||
|
||||
return rendered_column_constraints
|
||||
|
||||
@classmethod
|
||||
def process_parsed_constraint(
|
||||
cls, parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint], render_func
|
||||
) -> Optional[str]:
|
||||
if (
|
||||
parsed_constraint.warn_unsupported
|
||||
and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED
|
||||
):
|
||||
warn_or_error(
|
||||
ConstraintNotSupported(constraint=parsed_constraint.type.value, adapter=cls.type())
|
||||
)
|
||||
if (
|
||||
parsed_constraint.warn_unenforced
|
||||
and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_ENFORCED
|
||||
):
|
||||
warn_or_error(
|
||||
ConstraintNotEnforced(constraint=parsed_constraint.type.value, adapter=cls.type())
|
||||
)
|
||||
if cls.CONSTRAINT_SUPPORT[parsed_constraint.type] != ConstraintSupport.NOT_SUPPORTED:
|
||||
return render_func(parsed_constraint)
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _parse_model_constraint(cls, raw_constraint: Dict[str, Any]) -> ModelLevelConstraint:
|
||||
try:
|
||||
ModelLevelConstraint.validate(raw_constraint)
|
||||
c = ModelLevelConstraint.from_dict(raw_constraint)
|
||||
return c
|
||||
except Exception:
|
||||
raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
|
||||
|
||||
@available
|
||||
@classmethod
|
||||
def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]:
|
||||
return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None]
|
||||
|
||||
@classmethod
|
||||
def render_raw_model_constraint(cls, raw_constraint: Dict[str, Any]) -> Optional[str]:
|
||||
constraint = cls._parse_model_constraint(raw_constraint)
|
||||
return cls.process_parsed_constraint(constraint, cls.render_model_constraint)
|
||||
|
||||
@classmethod
|
||||
def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]:
|
||||
"""Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
|
||||
rendering."""
|
||||
constraint_prefix = f"constraint {constraint.name} " if constraint.name else ""
|
||||
column_list = ", ".join(constraint.columns)
|
||||
if constraint.type == ConstraintType.check and constraint.expression:
|
||||
return f"{constraint_prefix}check ({constraint.expression})"
|
||||
elif constraint.type == ConstraintType.unique:
|
||||
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
|
||||
return f"{constraint_prefix}unique{constraint_expression} ({column_list})"
|
||||
elif constraint.type == ConstraintType.primary_key:
|
||||
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
|
||||
return f"{constraint_prefix}primary key{constraint_expression} ({column_list})"
|
||||
elif constraint.type == ConstraintType.foreign_key and constraint.expression:
|
||||
return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}"
|
||||
elif constraint.type == ConstraintType.custom and constraint.expression:
|
||||
return f"{constraint_prefix}{constraint.expression}"
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
COLUMNS_EQUAL_SQL = """
|
||||
with diff_count as (
|
||||
@@ -1462,6 +1313,7 @@ join diff_count using (id)
|
||||
def catch_as_completed(
|
||||
futures, # typing: List[Future[agate.Table]]
|
||||
) -> Tuple[agate.Table, List[Exception]]:
|
||||
|
||||
# catalogs: agate.Table = agate.Table(rows=[])
|
||||
tables: List[agate.Table] = []
|
||||
exceptions: List[Exception] = []
|
||||
@@ -1475,7 +1327,7 @@ def catch_as_completed(
|
||||
elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
|
||||
raise exc
|
||||
else:
|
||||
warn_or_error(CatalogGenerationError(exc=str(exc)))
|
||||
warn_or_error(f"Encountered an error while generating catalog: {str(exc)}")
|
||||
# exc is not None, derives from Exception, and isn't ctrl+c
|
||||
exceptions.append(exc)
|
||||
return merge_tables(tables), exceptions
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
from typing import List, Optional, Type
|
||||
|
||||
from dbt.adapters.base import Credentials
|
||||
from dbt.exceptions import CompilationError
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.adapters.protocol import AdapterProtocol
|
||||
|
||||
|
||||
def project_name_from_path(include_path: str) -> str:
|
||||
# avoid an import cycle
|
||||
from dbt.config.project import PartialProject
|
||||
from dbt.config.project import Project
|
||||
|
||||
partial = PartialProject.from_project_root(include_path)
|
||||
partial = Project.partial_load(include_path)
|
||||
if partial.project_name is None:
|
||||
raise CompilationError(f"Invalid project at {include_path}: name not set!")
|
||||
raise CompilationException(f"Invalid project at {include_path}: name not set!")
|
||||
return partial.project_name
|
||||
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@ from dbt.clients.jinja import QueryStringGenerator
|
||||
|
||||
from dbt.context.manifest import generate_query_header_context
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, QueryComment
|
||||
from dbt.contracts.graph.nodes import ResultNode
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
|
||||
class NodeWrapper:
|
||||
@@ -48,7 +48,7 @@ class _QueryComment(local):
|
||||
if isinstance(comment, str) and "*/" in comment:
|
||||
# tell the user "no" so they don't hurt themselves by writing
|
||||
# garbage
|
||||
raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}')
|
||||
raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
|
||||
self.query_comment = comment
|
||||
self.append = append
|
||||
|
||||
@@ -90,7 +90,7 @@ class MacroQueryStringSetter:
|
||||
def reset(self):
|
||||
self.set("master", None)
|
||||
|
||||
def set(self, name: str, node: Optional[ResultNode]):
|
||||
def set(self, name: str, node: Optional[CompileResultNode]):
|
||||
wrapped: Optional[NodeWrapper] = None
|
||||
if node is not None:
|
||||
wrapped = NodeWrapper(node)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from collections.abc import Hashable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
|
||||
|
||||
from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode
|
||||
from dbt.contracts.graph.compiled import CompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
|
||||
from dbt.contracts.relation import (
|
||||
RelationType,
|
||||
ComponentName,
|
||||
@@ -11,11 +12,7 @@ from dbt.contracts.relation import (
|
||||
Policy,
|
||||
Path,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
ApproximateMatchError,
|
||||
DbtInternalError,
|
||||
MultipleDatabasesNotAllowedError,
|
||||
)
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import filter_null_values, deep_merge, classproperty
|
||||
|
||||
@@ -30,10 +27,8 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
path: Path
|
||||
type: Optional[RelationType] = None
|
||||
quote_character: str = '"'
|
||||
# Python 3.11 requires that these use default_factory instead of simple default
|
||||
# ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory
|
||||
include_policy: Policy = field(default_factory=lambda: Policy())
|
||||
quote_policy: Policy = field(default_factory=lambda: Policy())
|
||||
include_policy: Policy = Policy()
|
||||
quote_policy: Policy = Policy()
|
||||
dbt_created: bool = False
|
||||
|
||||
def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
|
||||
@@ -44,9 +39,9 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@classmethod
|
||||
def _get_field_named(cls, field_name):
|
||||
for f, _ in cls._get_fields():
|
||||
if f.name == field_name:
|
||||
return f
|
||||
for field, _ in cls._get_fields():
|
||||
if field.name == field_name:
|
||||
return field
|
||||
# this should be unreachable
|
||||
raise ValueError(f"BaseRelation has no {field_name} field!")
|
||||
|
||||
@@ -57,11 +52,11 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
@classmethod
|
||||
def get_default_quote_policy(cls) -> Policy:
|
||||
return cls._get_field_named("quote_policy").default_factory()
|
||||
return cls._get_field_named("quote_policy").default
|
||||
|
||||
@classmethod
|
||||
def get_default_include_policy(cls) -> Policy:
|
||||
return cls._get_field_named("include_policy").default_factory()
|
||||
return cls._get_field_named("include_policy").default
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Override `.get` to return a metadata object so we don't break
|
||||
@@ -87,7 +82,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if not search:
|
||||
# nothing was passed in
|
||||
raise dbt.exceptions.DbtRuntimeError(
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
"Tried to match relation, but no search path was passed!"
|
||||
)
|
||||
|
||||
@@ -104,7 +99,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
|
||||
if approximate_match and not exact_match:
|
||||
target = self.create(database=database, schema=schema, identifier=identifier)
|
||||
raise ApproximateMatchError(target, self)
|
||||
dbt.exceptions.approximate_relation_match(target, self)
|
||||
|
||||
return exact_match
|
||||
|
||||
@@ -189,7 +184,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self:
|
||||
def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
|
||||
source_quoting = source.quoting.to_dict(omit_none=True)
|
||||
source_quoting.pop("column", None)
|
||||
quote_policy = deep_merge(
|
||||
@@ -214,7 +209,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_ephemeral_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ManifestNode,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
) -> Self:
|
||||
# Note that ephemeral models are based on the name.
|
||||
identifier = cls.add_ephemeral_prefix(node.name)
|
||||
@@ -227,7 +222,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from_node(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node,
|
||||
node: Union[ParsedNode, CompiledNode],
|
||||
quote_policy: Optional[Dict[str, bool]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
@@ -248,20 +243,20 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: ResultNode,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
**kwargs: Any,
|
||||
) -> Self:
|
||||
if node.resource_type == NodeType.Source:
|
||||
if not isinstance(node, SourceDefinition):
|
||||
raise DbtInternalError(
|
||||
"type mismatch, expected SourceDefinition but got {}".format(type(node))
|
||||
if not isinstance(node, ParsedSourceDefinition):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_source(node, **kwargs)
|
||||
else:
|
||||
# Can't use ManifestNode here because of parameterized generics
|
||||
if not isinstance(node, (ParsedNode)):
|
||||
raise DbtInternalError(
|
||||
f"type mismatch, expected ManifestNode but got {type(node)}"
|
||||
if not isinstance(node, (ParsedNode, CompiledNode)):
|
||||
raise InternalException(
|
||||
"type mismatch, expected ParsedNode or CompiledNode but "
|
||||
"got {}".format(type(node))
|
||||
)
|
||||
return cls.create_from_node(config, node, **kwargs)
|
||||
|
||||
@@ -328,10 +323,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def is_view(self) -> bool:
|
||||
return self.type == RelationType.View
|
||||
|
||||
@property
|
||||
def is_materialized_view(self) -> bool:
|
||||
return self.type == RelationType.MaterializedView
|
||||
|
||||
@classproperty
|
||||
def Table(cls) -> str:
|
||||
return str(RelationType.Table)
|
||||
@@ -348,10 +339,6 @@ class BaseRelation(FakeAPIObject, Hashable):
|
||||
def External(cls) -> str:
|
||||
return str(RelationType.External)
|
||||
|
||||
@classproperty
|
||||
def MaterializedView(cls) -> str:
|
||||
return str(RelationType.MaterializedView)
|
||||
|
||||
@classproperty
|
||||
def get_relation_type(cls) -> Type[RelationType]:
|
||||
return RelationType
|
||||
@@ -366,7 +353,7 @@ class InformationSchema(BaseRelation):
|
||||
|
||||
def __post_init__(self):
|
||||
if not isinstance(self.information_schema_view, (type(None), str)):
|
||||
raise dbt.exceptions.CompilationError(
|
||||
raise dbt.exceptions.CompilationException(
|
||||
"Got an invalid name: {}".format(self.information_schema_view)
|
||||
)
|
||||
|
||||
@@ -450,7 +437,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
|
||||
if not allow_multiple_databases:
|
||||
seen = {r.database.lower() for r in self if r.database}
|
||||
if len(seen) > 1:
|
||||
raise MultipleDatabasesNotAllowedError(seen)
|
||||
dbt.exceptions.raise_compiler_error(str(seen))
|
||||
|
||||
for information_schema_name, schema in self.search():
|
||||
path = {"database": information_schema_name.database, "schema": schema}
|
||||
|
||||
@@ -1,23 +1,28 @@
|
||||
import re
|
||||
import threading
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from dbt.adapters.reference_keys import (
|
||||
_make_ref_key,
|
||||
_make_ref_key_dict,
|
||||
_ReferenceKey,
|
||||
from dbt.adapters.reference_keys import _make_key, _ReferenceKey
|
||||
import dbt.exceptions
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
AddLink,
|
||||
AddRelation,
|
||||
DropCascade,
|
||||
DropMissingRelation,
|
||||
DropRelation,
|
||||
DumpAfterAddGraph,
|
||||
DumpAfterRenameSchema,
|
||||
DumpBeforeAddGraph,
|
||||
DumpBeforeRenameSchema,
|
||||
RenameSchema,
|
||||
TemporaryRelation,
|
||||
UncachedRelation,
|
||||
UpdateReference,
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
DependentLinkNotCachedError,
|
||||
NewNameAlreadyInCacheError,
|
||||
NoneRelationFoundError,
|
||||
ReferencedLinkNotCachedError,
|
||||
TruncatedModelNameCausedCollisionError,
|
||||
)
|
||||
from dbt.events.functions import fire_event, fire_event_if
|
||||
from dbt.events.types import CacheAction, CacheDumpGraph
|
||||
from dbt.flags import get_flags
|
||||
from dbt.utils import lowercase
|
||||
from dbt.helper_types import Lazy
|
||||
|
||||
|
||||
def dot_separated(key: _ReferenceKey) -> str:
|
||||
@@ -77,7 +82,7 @@ class _CachedRelation:
|
||||
|
||||
:return _ReferenceKey: A key for this relation.
|
||||
"""
|
||||
return _make_ref_key(self)
|
||||
return _make_key(self)
|
||||
|
||||
def add_reference(self, referrer: "_CachedRelation"):
|
||||
"""Add a reference from referrer to self, indicating that if this node
|
||||
@@ -140,7 +145,11 @@ class _CachedRelation:
|
||||
:raises InternalError: If the new key already exists.
|
||||
"""
|
||||
if new_key in self.referenced_by:
|
||||
raise NewNameAlreadyInCacheError(old_key, new_key)
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
'in rename of "{}" -> "{}", new name is in the cache already'.format(
|
||||
old_key, new_key
|
||||
)
|
||||
)
|
||||
|
||||
if old_key not in self.referenced_by:
|
||||
return
|
||||
@@ -229,7 +238,7 @@ class RelationsCache:
|
||||
# self.relations or any cache entry's referenced_by during iteration
|
||||
# it's a runtime error!
|
||||
with self.lock:
|
||||
return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()}
|
||||
return {dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items()}
|
||||
|
||||
def _setdefault(self, relation: _CachedRelation):
|
||||
"""Add a relation to the cache, or return it if it already exists.
|
||||
@@ -256,17 +265,21 @@ class RelationsCache:
|
||||
if referenced is None:
|
||||
return
|
||||
if referenced is None:
|
||||
raise ReferencedLinkNotCachedError(referenced_key)
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, referenced link key {} not in cache!".format(referenced_key)
|
||||
)
|
||||
|
||||
dependent = self.relations.get(dependent_key)
|
||||
if dependent is None:
|
||||
raise DependentLinkNotCachedError(dependent_key)
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in add_link, dependent link key {} not in cache!".format(dependent_key)
|
||||
)
|
||||
|
||||
assert dependent is not None # we just raised!
|
||||
|
||||
referenced.add_reference(dependent)
|
||||
|
||||
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
|
||||
# TODO: Is this dead code? I can't seem to find it grepping the codebase.
|
||||
def add_link(self, referenced, dependent):
|
||||
"""Add a link between two relations to the database. If either relation
|
||||
does not exist, it will be added as an "external" relation.
|
||||
@@ -281,18 +294,13 @@ class RelationsCache:
|
||||
:param BaseRelation dependent: The dependent model.
|
||||
:raises InternalError: If either entry does not exist.
|
||||
"""
|
||||
ref_key = _make_ref_key(referenced)
|
||||
dep_key = _make_ref_key(dependent)
|
||||
ref_key = _make_key(referenced)
|
||||
dep_key = _make_key(dependent)
|
||||
if (ref_key.database, ref_key.schema) not in self:
|
||||
# if we have not cached the referenced schema at all, we must be
|
||||
# referring to a table outside our control. There's no need to make
|
||||
# a link - we will never drop the referenced relation during a run.
|
||||
fire_event(
|
||||
CacheAction(
|
||||
ref_key=ref_key._asdict(),
|
||||
ref_key_2=dep_key._asdict(),
|
||||
)
|
||||
)
|
||||
fire_event(UncachedRelation(dep_key=dep_key, ref_key=ref_key))
|
||||
return
|
||||
if ref_key not in self.relations:
|
||||
# Insert a dummy "external" relation.
|
||||
@@ -302,13 +310,7 @@ class RelationsCache:
|
||||
# Insert a dummy "external" relation.
|
||||
dependent = dependent.replace(type=referenced.External)
|
||||
self.add(dependent)
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="add_link",
|
||||
ref_key=dep_key._asdict(),
|
||||
ref_key_2=ref_key._asdict(),
|
||||
)
|
||||
)
|
||||
fire_event(AddLink(dep_key=dep_key, ref_key=ref_key))
|
||||
with self.lock:
|
||||
self._add_link(ref_key, dep_key)
|
||||
|
||||
@@ -318,20 +320,13 @@ class RelationsCache:
|
||||
|
||||
:param BaseRelation relation: The underlying relation.
|
||||
"""
|
||||
flags = get_flags()
|
||||
cached = _CachedRelation(relation)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
||||
fire_event(AddRelation(relation=_make_key(cached)))
|
||||
fire_event(DumpBeforeAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
with self.lock:
|
||||
self._setdefault(cached)
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(DumpAfterAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
def _remove_refs(self, keys):
|
||||
"""Removes all references to all entries in keys. This does not
|
||||
@@ -346,6 +341,19 @@ class RelationsCache:
|
||||
for cached in self.relations.values():
|
||||
cached.release_references(keys)
|
||||
|
||||
def _drop_cascade_relation(self, dropped_key):
|
||||
"""Drop the given relation and cascade it appropriately to all
|
||||
dependent relations.
|
||||
|
||||
:param _CachedRelation dropped: An existing _CachedRelation to drop.
|
||||
"""
|
||||
if dropped_key not in self.relations:
|
||||
fire_event(DropMissingRelation(relation=dropped_key))
|
||||
return
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
fire_event(DropCascade(dropped=dropped_key, consequences=consequences))
|
||||
self._remove_refs(consequences)
|
||||
|
||||
def drop(self, relation):
|
||||
"""Drop the named relation and cascade it appropriately to all
|
||||
dependent relations.
|
||||
@@ -357,22 +365,10 @@ class RelationsCache:
|
||||
:param str schema: The schema of the relation to drop.
|
||||
:param str identifier: The identifier of the relation to drop.
|
||||
"""
|
||||
dropped_key = _make_ref_key(relation)
|
||||
dropped_key_msg = _make_ref_key_dict(relation)
|
||||
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
||||
dropped_key = _make_key(relation)
|
||||
fire_event(DropRelation(dropped=dropped_key))
|
||||
with self.lock:
|
||||
if dropped_key not in self.relations:
|
||||
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
|
||||
return
|
||||
consequences = self.relations[dropped_key].collect_consequences()
|
||||
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||
consequence_msgs = [key._asdict() for key in consequences]
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs
|
||||
)
|
||||
)
|
||||
self._remove_refs(consequences)
|
||||
self._drop_cascade_relation(dropped_key)
|
||||
|
||||
def _rename_relation(self, old_key, new_relation):
|
||||
"""Rename a relation named old_key to new_key, updating references.
|
||||
@@ -394,14 +390,8 @@ class RelationsCache:
|
||||
for cached in self.relations.values():
|
||||
if cached.is_referenced_by(old_key):
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="update_reference",
|
||||
ref_key=_make_ref_key_dict(old_key),
|
||||
ref_key_2=_make_ref_key_dict(new_key),
|
||||
ref_key_3=_make_ref_key_dict(cached.key()),
|
||||
)
|
||||
UpdateReference(old_key=old_key, new_key=new_key, cached_key=cached.key())
|
||||
)
|
||||
|
||||
cached.rename_key(old_key, new_key)
|
||||
|
||||
self.relations[new_key] = relation
|
||||
@@ -426,10 +416,27 @@ class RelationsCache:
|
||||
if new_key in self.relations:
|
||||
# Tell user when collision caused by model names truncated during
|
||||
# materialization.
|
||||
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
||||
match = re.search("__dbt_backup|__dbt_tmp$", new_key.identifier)
|
||||
if match:
|
||||
truncated_model_name_prefix = new_key.identifier[: match.start()]
|
||||
message_addendum = (
|
||||
"\n\nName collisions can occur when the length of two "
|
||||
"models' names approach your database's builtin limit. "
|
||||
"Try restructuring your project such that no two models "
|
||||
"share the prefix '{}'.".format(truncated_model_name_prefix)
|
||||
+ " Then, clean your warehouse of any removed models."
|
||||
)
|
||||
else:
|
||||
message_addendum = ""
|
||||
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in rename, new key {} already in cache: {}{}".format(
|
||||
new_key, list(self.relations.keys()), message_addendum
|
||||
)
|
||||
)
|
||||
|
||||
if old_key not in self.relations:
|
||||
fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict()))
|
||||
fire_event(TemporaryRelation(key=old_key))
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -445,20 +452,11 @@ class RelationsCache:
|
||||
:param BaseRelation new: The new relation name information.
|
||||
:raises InternalError: If the new key is already present.
|
||||
"""
|
||||
old_key = _make_ref_key(old)
|
||||
new_key = _make_ref_key(new)
|
||||
fire_event(
|
||||
CacheAction(
|
||||
action="rename_relation",
|
||||
ref_key=old_key._asdict(),
|
||||
ref_key_2=new_key._asdict(),
|
||||
)
|
||||
)
|
||||
flags = get_flags()
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
old_key = _make_key(old)
|
||||
new_key = _make_key(new)
|
||||
fire_event(RenameSchema(old_key=old_key, new_key=new_key))
|
||||
|
||||
fire_event(DumpBeforeRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
with self.lock:
|
||||
if self._check_rename_constraints(old_key, new_key):
|
||||
@@ -466,10 +464,7 @@ class RelationsCache:
|
||||
else:
|
||||
self._setdefault(_CachedRelation(new))
|
||||
|
||||
fire_event_if(
|
||||
flags.LOG_CACHE_EVENTS,
|
||||
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
||||
)
|
||||
fire_event(DumpAfterRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
||||
|
||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||
"""Case-insensitively yield all relations matching the given schema.
|
||||
@@ -488,7 +483,9 @@ class RelationsCache:
|
||||
]
|
||||
|
||||
if None in results:
|
||||
raise NoneRelationFoundError()
|
||||
dbt.exceptions.raise_cache_inconsistent(
|
||||
"in get_relations, a None relation was found in the cache!"
|
||||
)
|
||||
return results
|
||||
|
||||
def clear(self):
|
||||
@@ -515,6 +512,6 @@ class RelationsCache:
|
||||
"""
|
||||
for relation in to_remove:
|
||||
# it may have been cascaded out already
|
||||
drop_key = _make_ref_key(relation)
|
||||
drop_key = _make_key(relation)
|
||||
if drop_key in self.relations:
|
||||
self.drop(drop_key)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
@@ -9,11 +8,10 @@ from dbt.adapters.base.plugin import AdapterPlugin
|
||||
from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered
|
||||
from dbt.exceptions import DbtInternalError, DbtRuntimeError
|
||||
from dbt.events.types import AdapterImportError, PluginLoadError
|
||||
from dbt.exceptions import InternalException, RuntimeException
|
||||
from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.semver import VersionSpecifier
|
||||
|
||||
Adapter = AdapterProtocol
|
||||
|
||||
@@ -35,7 +33,7 @@ class AdapterContainer:
|
||||
names = ", ".join(self.plugins.keys())
|
||||
|
||||
message = f"Invalid adapter type {name}! Must be one of {names}"
|
||||
raise DbtRuntimeError(message)
|
||||
raise RuntimeException(message)
|
||||
|
||||
def get_adapter_class_by_name(self, name: str) -> Type[Adapter]:
|
||||
plugin = self.get_plugin_by_name(name)
|
||||
@@ -61,17 +59,17 @@ class AdapterContainer:
|
||||
# the user about it via a runtime error
|
||||
if exc.name == "dbt.adapters." + name:
|
||||
fire_event(AdapterImportError(exc=str(exc)))
|
||||
raise DbtRuntimeError(f"Could not find adapter type {name}!")
|
||||
raise RuntimeException(f"Could not find adapter type {name}!")
|
||||
# otherwise, the error had to have come from some underlying
|
||||
# library. Log the stack trace.
|
||||
|
||||
fire_event(PluginLoadError(exc_info=traceback.format_exc()))
|
||||
fire_event(PluginLoadError())
|
||||
raise
|
||||
plugin: AdapterPlugin = mod.Plugin
|
||||
plugin_type = plugin.adapter.type()
|
||||
|
||||
if plugin_type != name:
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
f"Expected to find adapter with type named {name}, got "
|
||||
f"adapter with type {plugin_type}"
|
||||
)
|
||||
@@ -90,13 +88,7 @@ class AdapterContainer:
|
||||
def register_adapter(self, config: AdapterRequiredConfig) -> None:
|
||||
adapter_name = config.credentials.type
|
||||
adapter_type = self.get_adapter_class_by_name(adapter_name)
|
||||
adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version
|
||||
adapter_version_specifier = VersionSpecifier.from_version_string(
|
||||
adapter_version
|
||||
).to_version_string()
|
||||
fire_event(
|
||||
AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier)
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
if adapter_name in self.adapters:
|
||||
# this shouldn't really happen...
|
||||
@@ -139,7 +131,7 @@ class AdapterContainer:
|
||||
try:
|
||||
plugin = self.plugins[plugin_name]
|
||||
except KeyError:
|
||||
raise DbtInternalError(f"No plugin found for {plugin_name}") from None
|
||||
raise InternalException(f"No plugin found for {plugin_name}") from None
|
||||
plugins.append(plugin)
|
||||
seen.add(plugin_name)
|
||||
for dep in plugin.dependencies:
|
||||
@@ -158,16 +150,13 @@ class AdapterContainer:
|
||||
try:
|
||||
path = self.packages[package_name]
|
||||
except KeyError:
|
||||
raise DbtInternalError(f"No internal package listing found for {package_name}")
|
||||
raise InternalException(f"No internal package listing found for {package_name}")
|
||||
paths.append(path)
|
||||
return paths
|
||||
|
||||
def get_adapter_type_names(self, name: Optional[str]) -> List[str]:
|
||||
return [p.adapter.type() for p in self.get_adapter_plugins(name)]
|
||||
|
||||
def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]:
|
||||
return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore
|
||||
|
||||
|
||||
FACTORY: AdapterContainer = AdapterContainer()
|
||||
|
||||
@@ -224,10 +213,6 @@ def get_adapter_type_names(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_type_names(name)
|
||||
|
||||
|
||||
def get_adapter_constraint_support(name: Optional[str]) -> List[str]:
|
||||
return FACTORY.get_adapter_constraint_support(name)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adapter_management():
|
||||
reset_adapters()
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import (
|
||||
Generic,
|
||||
TypeVar,
|
||||
Tuple,
|
||||
Union,
|
||||
Dict,
|
||||
Any,
|
||||
)
|
||||
@@ -16,7 +17,8 @@ from typing_extensions import Protocol
|
||||
import agate
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
|
||||
from dbt.contracts.graph.nodes import ResultNode, ManifestNode
|
||||
from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
|
||||
from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
|
||||
from dbt.contracts.graph.model_config import BaseConfig
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.relation import Policy, HasQuoting
|
||||
@@ -46,7 +48,11 @@ class RelationProtocol(Protocol):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self:
|
||||
def create_from(
|
||||
cls: Type[Self],
|
||||
config: HasQuoting,
|
||||
node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition],
|
||||
) -> Self:
|
||||
...
|
||||
|
||||
|
||||
@@ -59,7 +65,7 @@ class CompilerProtocol(Protocol):
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> ManifestNode:
|
||||
) -> NonSourceCompiledNode:
|
||||
...
|
||||
|
||||
|
||||
|
||||
@@ -14,12 +14,7 @@ def lowercase(value: Optional[str]) -> Optional[str]:
|
||||
return value.lower()
|
||||
|
||||
|
||||
# For backwards compatibility. New code should use _make_ref_key
|
||||
def _make_key(relation: Any) -> _ReferenceKey:
|
||||
return _make_ref_key(relation)
|
||||
|
||||
|
||||
def _make_ref_key(relation: Any) -> _ReferenceKey:
|
||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||
to keep track of quoting
|
||||
"""
|
||||
@@ -27,11 +22,3 @@ def _make_ref_key(relation: Any) -> _ReferenceKey:
|
||||
return _ReferenceKey(
|
||||
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
||||
)
|
||||
|
||||
|
||||
def _make_ref_key_dict(relation: Any):
|
||||
return {
|
||||
"database": relation.database,
|
||||
"schema": relation.schema,
|
||||
"identifier": relation.identifier,
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# RelationConfig
|
||||
This package serves as an initial abstraction for managing the inspection of existing relations and determining
|
||||
changes on those relations. It arose from the materialized view work and is currently only supporting
|
||||
materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main
|
||||
classes in this package.
|
||||
|
||||
## RelationConfigBase
|
||||
This is a very small class that only has a `from_dict()` method and a default `NotImplementedError()`. At some
|
||||
point this could be replaced by a more robust framework, like `mashumaro` or `pydantic`.
|
||||
|
||||
## RelationConfigChange
|
||||
This class inherits from `RelationConfigBase` ; however, this can be thought of as a separate class. The subclassing
|
||||
merely points to the idea that both classes would likely inherit from the same class in a `mashumaro` or
|
||||
`pydantic` implementation. This class is much more restricted in attribution. It should really only
|
||||
ever need an `action` and a `context`. This can be though of as being analogous to a web request. You need to
|
||||
know what you're doing (`action`: 'create' = GET, 'drop' = DELETE, etc.) and the information (`context`) needed
|
||||
to make the change. In our scenarios, the context tends to be an instance of `RelationConfigBase` corresponding
|
||||
to the new state.
|
||||
|
||||
## RelationConfigValidationMixin
|
||||
This mixin provides optional validation mechanics that can be applied to either `RelationConfigBase` or
|
||||
`RelationConfigChange` subclasses. A validation rule is a combination of a `validation_check`, something
|
||||
that should evaluate to `True`, and an optional `validation_error`, an instance of `DbtRuntimeError`
|
||||
that should be raised in the event the `validation_check` fails. While optional, it's recommended that
|
||||
the `validation_error` be provided for clearer transparency to the end user.
|
||||
@@ -1,12 +0,0 @@
|
||||
from dbt.adapters.relation_configs.config_base import ( # noqa: F401
|
||||
RelationConfigBase,
|
||||
RelationResults,
|
||||
)
|
||||
from dbt.adapters.relation_configs.config_change import ( # noqa: F401
|
||||
RelationConfigChangeAction,
|
||||
RelationConfigChange,
|
||||
)
|
||||
from dbt.adapters.relation_configs.config_validation import ( # noqa: F401
|
||||
RelationConfigValidationMixin,
|
||||
RelationConfigValidationRule,
|
||||
)
|
||||
@@ -1,44 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Union, Dict
|
||||
|
||||
import agate
|
||||
from dbt.utils import filter_null_values
|
||||
|
||||
|
||||
"""
|
||||
This is what relation metadata from the database looks like. It's a dictionary because there will be
|
||||
multiple grains of data for a single object. For example, a materialized view in Postgres has base level information,
|
||||
like name. But it also can have multiple indexes, which needs to be a separate query. It might look like this:
|
||||
|
||||
{
|
||||
"base": agate.Row({"table_name": "table_abc", "query": "select * from table_def"})
|
||||
"indexes": agate.Table("rows": [
|
||||
agate.Row({"name": "index_a", "columns": ["column_a"], "type": "hash", "unique": False}),
|
||||
agate.Row({"name": "index_b", "columns": ["time_dim_a"], "type": "btree", "unique": False}),
|
||||
])
|
||||
}
|
||||
"""
|
||||
RelationResults = Dict[str, Union[agate.Row, agate.Table]]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RelationConfigBase:
|
||||
@classmethod
|
||||
def from_dict(cls, kwargs_dict) -> "RelationConfigBase":
|
||||
"""
|
||||
This assumes the subclass of `RelationConfigBase` is flat, in the sense that no attribute is
|
||||
itself another subclass of `RelationConfigBase`. If that's not the case, this should be overriden
|
||||
to manually manage that complexity.
|
||||
|
||||
Args:
|
||||
kwargs_dict: the dict representation of this instance
|
||||
|
||||
Returns: the `RelationConfigBase` representation associated with the provided dict
|
||||
"""
|
||||
return cls(**filter_null_values(kwargs_dict)) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def _not_implemented_error(cls) -> NotImplementedError:
|
||||
return NotImplementedError(
|
||||
"This relation type has not been fully configured for this adapter."
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Hashable
|
||||
|
||||
from dbt.adapters.relation_configs.config_base import RelationConfigBase
|
||||
from dbt.dataclass_schema import StrEnum
|
||||
|
||||
|
||||
class RelationConfigChangeAction(StrEnum):
|
||||
alter = "alter"
|
||||
create = "create"
|
||||
drop = "drop"
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||
class RelationConfigChange(RelationConfigBase, ABC):
|
||||
action: RelationConfigChangeAction
|
||||
context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def requires_full_refresh(self) -> bool:
|
||||
raise self._not_implemented_error()
|
||||
@@ -1,57 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Set, Optional
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=True, unsafe_hash=True)
|
||||
class RelationConfigValidationRule:
|
||||
validation_check: bool
|
||||
validation_error: Optional[DbtRuntimeError]
|
||||
|
||||
@property
|
||||
def default_error(self):
|
||||
return DbtRuntimeError(
|
||||
"There was a validation error in preparing this relation config."
|
||||
"No additional context was provided by this adapter."
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RelationConfigValidationMixin:
|
||||
def __post_init__(self):
|
||||
self.run_validation_rules()
|
||||
|
||||
@property
|
||||
def validation_rules(self) -> Set[RelationConfigValidationRule]:
|
||||
"""
|
||||
A set of validation rules to run against the object upon creation.
|
||||
|
||||
A validation rule is a combination of a validation check (bool) and an optional error message.
|
||||
|
||||
This defaults to no validation rules if not implemented. It's recommended to override this with values,
|
||||
but that may not always be necessary.
|
||||
|
||||
Returns: a set of validation rules
|
||||
"""
|
||||
return set()
|
||||
|
||||
def run_validation_rules(self):
|
||||
for validation_rule in self.validation_rules:
|
||||
try:
|
||||
assert validation_rule.validation_check
|
||||
except AssertionError:
|
||||
if validation_rule.validation_error:
|
||||
raise validation_rule.validation_error
|
||||
else:
|
||||
raise validation_rule.default_error
|
||||
self.run_child_validation_rules()
|
||||
|
||||
def run_child_validation_rules(self):
|
||||
for attr_value in vars(self).values():
|
||||
if hasattr(attr_value, "validation_rules"):
|
||||
attr_value.run_validation_rules()
|
||||
if isinstance(attr_value, set):
|
||||
for member in attr_value:
|
||||
if hasattr(member, "validation_rules"):
|
||||
member.run_validation_rules()
|
||||
@@ -1,6 +1,6 @@
|
||||
import abc
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict, Union
|
||||
from typing import List, Optional, Tuple, Any, Iterable, Dict
|
||||
|
||||
import agate
|
||||
|
||||
@@ -10,8 +10,6 @@ from dbt.adapters.base import BaseConnectionManager
|
||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.utils import cast_to_str
|
||||
|
||||
|
||||
class SQLConnectionManager(BaseConnectionManager):
|
||||
@@ -27,7 +25,9 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
@abc.abstractmethod
|
||||
def cancel(self, connection: Connection):
|
||||
"""Cancel the given connection."""
|
||||
raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!")
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`cancel` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def cancel_open(self) -> List[str]:
|
||||
names = []
|
||||
@@ -55,13 +55,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
connection = self.get_thread_connection()
|
||||
if auto_begin and connection.transaction_open is False:
|
||||
self.begin()
|
||||
fire_event(
|
||||
ConnectionUsed(
|
||||
conn_type=self.TYPE,
|
||||
conn_name=cast_to_str(connection.name),
|
||||
node_info=get_node_info(),
|
||||
)
|
||||
)
|
||||
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=connection.name))
|
||||
|
||||
with self.exception_handler(sql):
|
||||
if abridge_sql_log:
|
||||
@@ -69,11 +63,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
else:
|
||||
log_sql = sql
|
||||
|
||||
fire_event(
|
||||
SQLQuery(
|
||||
conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info()
|
||||
)
|
||||
)
|
||||
fire_event(SQLQuery(conn_name=connection.name, sql=log_sql))
|
||||
pre = time.time()
|
||||
|
||||
cursor = connection.handle.cursor()
|
||||
@@ -81,9 +71,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
fire_event(
|
||||
SQLQueryStatus(
|
||||
status=str(self.get_response(cursor)),
|
||||
elapsed=round((time.time() - pre)),
|
||||
node_info=get_node_info(),
|
||||
status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -93,7 +81,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
@abc.abstractmethod
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
"`get_response` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@@ -117,36 +105,25 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
return [dict(zip(column_names, row)) for row in rows]
|
||||
|
||||
@classmethod
|
||||
def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table:
|
||||
def get_result_from_cursor(cls, cursor: Any) -> agate.Table:
|
||||
data: List[Any] = []
|
||||
column_names: List[str] = []
|
||||
|
||||
if cursor.description is not None:
|
||||
column_names = [col[0] for col in cursor.description]
|
||||
if limit:
|
||||
rows = cursor.fetchmany(limit)
|
||||
else:
|
||||
rows = cursor.fetchall()
|
||||
rows = cursor.fetchall()
|
||||
data = cls.process_results(column_names, rows)
|
||||
|
||||
return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
|
||||
|
||||
@classmethod
|
||||
def data_type_code_to_name(cls, type_code: Union[int, str]) -> str:
|
||||
"""Get the string representation of the data type from the type_code."""
|
||||
# https://peps.python.org/pep-0249/#type-objects
|
||||
raise dbt.exceptions.NotImplementedError(
|
||||
"`data_type_code_to_name` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
def execute(
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None
|
||||
self, sql: str, auto_begin: bool = False, fetch: bool = False
|
||||
) -> Tuple[AdapterResponse, agate.Table]:
|
||||
sql = self._add_query_comment(sql)
|
||||
_, cursor = self.add_query(sql, auto_begin)
|
||||
response = self.get_response(cursor)
|
||||
if fetch:
|
||||
table = self.get_result_from_cursor(cursor, limit)
|
||||
table = self.get_result_from_cursor(cursor)
|
||||
else:
|
||||
table = dbt.clients.agate_helper.empty_table()
|
||||
return response, table
|
||||
@@ -157,14 +134,10 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def add_commit_query(self):
|
||||
return self.add_query("COMMIT", auto_begin=False)
|
||||
|
||||
def add_select_query(self, sql: str) -> Tuple[Connection, Any]:
|
||||
sql = self._add_query_comment(sql)
|
||||
return self.add_query(sql, auto_begin=False)
|
||||
|
||||
def begin(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is True:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to begin a new transaction on connection "{}", but '
|
||||
"it already had one open!".format(connection.name)
|
||||
)
|
||||
@@ -177,12 +150,12 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
def commit(self):
|
||||
connection = self.get_thread_connection()
|
||||
if connection.transaction_open is False:
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.exceptions.InternalException(
|
||||
'Tried to commit transaction on connection "{}", but '
|
||||
"it does not have one open!".format(connection.name)
|
||||
)
|
||||
|
||||
fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info()))
|
||||
fire_event(SQLCommit(conn_name=connection.name))
|
||||
self.add_commit_query()
|
||||
|
||||
connection.transaction_open = False
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import agate
|
||||
from typing import Any, Optional, Tuple, Type, List
|
||||
|
||||
from dbt.contracts.connection import Connection, AdapterResponse
|
||||
from dbt.exceptions import RelationTypeNullError
|
||||
import dbt.clients.agate_helper
|
||||
from dbt.contracts.connection import Connection
|
||||
import dbt.exceptions
|
||||
from dbt.adapters.base import BaseAdapter, available
|
||||
from dbt.adapters.cache import _make_ref_key_dict
|
||||
from dbt.adapters.cache import _make_key
|
||||
from dbt.adapters.sql import SQLConnectionManager
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
||||
@@ -22,7 +23,6 @@ RENAME_RELATION_MACRO_NAME = "rename_relation"
|
||||
TRUNCATE_RELATION_MACRO_NAME = "truncate_relation"
|
||||
DROP_RELATION_MACRO_NAME = "drop_relation"
|
||||
ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
|
||||
VALIDATE_SQL_MACRO_NAME = "validate_sql"
|
||||
|
||||
|
||||
class SQLAdapter(BaseAdapter):
|
||||
@@ -110,7 +110,7 @@ class SQLAdapter(BaseAdapter):
|
||||
ColTypeChange(
|
||||
orig_type=target_column.data_type,
|
||||
new_type=new_type,
|
||||
table=_make_ref_key_dict(current),
|
||||
table=_make_key(current),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -132,7 +132,9 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def drop_relation(self, relation):
|
||||
if relation.type is None:
|
||||
raise RelationTypeNullError(relation)
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
"Tried to drop relation {}, but its type is null.".format(relation)
|
||||
)
|
||||
|
||||
self.cache_dropped(relation)
|
||||
self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
|
||||
@@ -153,7 +155,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def create_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
fire_event(SchemaCreation(relation=_make_ref_key_dict(relation)))
|
||||
fire_event(SchemaCreation(relation=_make_key(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
@@ -164,7 +166,7 @@ class SQLAdapter(BaseAdapter):
|
||||
|
||||
def drop_schema(self, relation: BaseRelation) -> None:
|
||||
relation = relation.without_identifier()
|
||||
fire_event(SchemaDrop(relation=_make_ref_key_dict(relation)))
|
||||
fire_event(SchemaDrop(relation=_make_key(relation)))
|
||||
kwargs = {
|
||||
"relation": relation,
|
||||
}
|
||||
@@ -198,7 +200,6 @@ class SQLAdapter(BaseAdapter):
|
||||
)
|
||||
return relations
|
||||
|
||||
@classmethod
|
||||
def quote(self, identifier):
|
||||
return '"{}"'.format(identifier)
|
||||
|
||||
@@ -219,34 +220,6 @@ class SQLAdapter(BaseAdapter):
|
||||
results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
|
||||
return results[0][0] > 0
|
||||
|
||||
def validate_sql(self, sql: str) -> AdapterResponse:
|
||||
"""Submit the given SQL to the engine for validation, but not execution.
|
||||
|
||||
By default we simply prefix the query with the explain keyword and allow the
|
||||
exceptions thrown by the underlying engine on invalid SQL inputs to bubble up
|
||||
to the exception handler. For adjustments to the explain statement - such as
|
||||
for adapters that have different mechanisms for hinting at query validation
|
||||
or dry-run - callers may be able to override the validate_sql_query macro with
|
||||
the addition of an <adapter>__validate_sql implementation.
|
||||
|
||||
:param sql str: The sql to validate
|
||||
"""
|
||||
kwargs = {
|
||||
"sql": sql,
|
||||
}
|
||||
result = self.execute_macro(VALIDATE_SQL_MACRO_NAME, kwargs=kwargs)
|
||||
# The statement macro always returns an AdapterResponse in the output AttrDict's
|
||||
# `response` property, and we preserve the full payload in case we want to
|
||||
# return fetched output for engines where explain plans are emitted as columnar
|
||||
# results. Any macro override that deviates from this behavior may encounter an
|
||||
# assertion error in the runtime.
|
||||
adapter_response = result.response # type: ignore[attr-defined]
|
||||
assert isinstance(adapter_response, AdapterResponse), (
|
||||
f"Expected AdapterResponse from validate_sql macro execution, "
|
||||
f"got {type(adapter_response)}."
|
||||
)
|
||||
return adapter_response
|
||||
|
||||
# This is for use in the test suite
|
||||
def run_sql_for_tests(self, sql, fetch, conn):
|
||||
cursor = conn.handle.cursor()
|
||||
|
||||
@@ -1,71 +1 @@
|
||||
# Adding a new command
|
||||
|
||||
## `main.py`
|
||||
Add the new command with all necessary decorators. Every command will need at minimum:
|
||||
- a decorator for the click group it belongs to which also names the command
|
||||
- the postflight decorator (must come before other decorators from the `requires` module for error handling)
|
||||
- the preflight decorator
|
||||
```py
|
||||
@cli.command("my-new-command")
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def my_new_command(ctx, **kwargs):
|
||||
...
|
||||
```
|
||||
|
||||
## `types.py`
|
||||
Add an entry to the `Command` enum with your new command. Commands that are sub-commands should have entries
|
||||
that represent their full command path (e.g. `source freshness -> SOURCE_FRESHNESS`, `docs serve -> DOCS_SERVE`).
|
||||
|
||||
## `flags.py`
|
||||
Add the new command to the dictionary within the `command_args` function.
|
||||
|
||||
# Exception Handling
|
||||
|
||||
## `requires.py`
|
||||
|
||||
### `postflight`
|
||||
In the postflight decorator, the click command is invoked (i.e. `func(*args, **kwargs)`) and wrapped in a `try/except` block to handle any exceptions thrown.
|
||||
Any exceptions thrown from `postflight` are wrapped by custom exceptions from the `dbt.cli.exceptions` module (i.e. `ResultExit`, `ExceptionExit`) to instruct click to complete execution with a particular exit code.
|
||||
|
||||
Some `dbt-core` handled exceptions have an attribute named `results` which contains results from running nodes (e.g. `FailFastError`). These are wrapped in the `ResultExit` exception to represent runs that have failed in a way that `dbt-core` expects.
|
||||
If the invocation of the command does not throw any exceptions but does not succeed, `postflight` will still raise the `ResultExit` exception to make use of the exit code.
|
||||
These exceptions produce an exit code of `1`.
|
||||
|
||||
Exceptions wrapped with `ExceptionExit` may be thrown by `dbt-core` intentionally (i.e. an exception that inherits from `dbt.exceptions.Exception`) or unintentionally (i.e. exceptions thrown by the python runtime). In either case these are considered errors that `dbt-core` did not expect and are treated as genuine exceptions.
|
||||
These exceptions produce an exit code of `2`.
|
||||
|
||||
If no exceptions are thrown from invoking the command and the command succeeds, `postflight` will not raise any exceptions.
|
||||
When no exceptions are raised an exit code of `0` is produced.
|
||||
|
||||
## `main.py`
|
||||
|
||||
### `dbtRunner`
|
||||
`dbtRunner` provides a programmatic interface for our click CLI and wraps the invocation of the click commands to handle any exceptions thrown.
|
||||
|
||||
`dbtRunner.invoke` should ideally only ever return an instantiated `dbtRunnerResult` which contains the following fields:
|
||||
- `success`: A boolean representing whether the command invocation was successful
|
||||
- `result`: The optional result of the command invoked. This attribute can have many types, please see the definition of `dbtRunnerResult` for more information
|
||||
- `exception`: If an exception was thrown during command invocation it will be saved here, otherwise it will be `None`. Please note that the exceptions held in this attribute are not the exceptions thrown by `preflight` but instead the exceptions that `ResultExit` and `ExceptionExit` wrap
|
||||
|
||||
Programmatic exception handling might look like the following:
|
||||
```python
|
||||
res = dbtRunner().invoke(["run"])
|
||||
if not res.success:
|
||||
...
|
||||
if type(res.exception) == SomeExceptionType:
|
||||
...
|
||||
```
|
||||
|
||||
## `dbt/tests/util.py`
|
||||
|
||||
### `run_dbt`
|
||||
In many of our functional and integration tests, we want to be sure that an invocation of `dbt` raises a certain exception.
|
||||
A common pattern for these assertions:
|
||||
```python
|
||||
class TestSomething:
|
||||
def test_something(self, project):
|
||||
with pytest.raises(SomeException):
|
||||
run_dbt(["run"])
|
||||
```
|
||||
To allow these tests to assert that exceptions have been thrown, the `run_dbt` function will raise any exceptions it recieves from the invocation of a `dbt` command.
|
||||
TODO
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
from .main import cli as dbt_cli # noqa
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import click
|
||||
from typing import Optional
|
||||
|
||||
from dbt.cli.main import cli as dbt
|
||||
|
||||
|
||||
def make_context(args, command=dbt) -> Optional[click.Context]:
|
||||
try:
|
||||
ctx = command.make_context(command.name, args)
|
||||
except click.exceptions.Exit:
|
||||
return None
|
||||
|
||||
ctx.invoked_subcommand = ctx.protected_args[0] if ctx.protected_args else None
|
||||
ctx.obj = {}
|
||||
|
||||
return ctx
|
||||
@@ -1,43 +0,0 @@
|
||||
from typing import Optional, IO
|
||||
|
||||
from click.exceptions import ClickException
|
||||
from dbt.utils import ExitCodes
|
||||
|
||||
|
||||
class DbtUsageException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DbtInternalException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CliException(ClickException):
|
||||
"""The base exception class for our implementation of the click CLI.
|
||||
The exit_code attribute is used by click to determine which exit code to produce
|
||||
after an invocation."""
|
||||
|
||||
def __init__(self, exit_code: ExitCodes) -> None:
|
||||
self.exit_code = exit_code.value
|
||||
|
||||
# the typing of _file is to satisfy the signature of ClickException.show
|
||||
# overriding this method prevents click from printing any exceptions to stdout
|
||||
def show(self, _file: Optional[IO] = None) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class ResultExit(CliException):
|
||||
"""This class wraps any exception that contains results while invoking dbt, or the
|
||||
results of an invocation that did not succeed but did not throw any exceptions."""
|
||||
|
||||
def __init__(self, result) -> None:
|
||||
super().__init__(ExitCodes.ModelError)
|
||||
self.result = result
|
||||
|
||||
|
||||
class ExceptionExit(CliException):
|
||||
"""This class wraps any exception that does not contain results thrown while invoking dbt."""
|
||||
|
||||
def __init__(self, exception: Exception) -> None:
|
||||
super().__init__(ExitCodes.UnhandledError)
|
||||
self.exception = exception
|
||||
@@ -1,404 +1,44 @@
|
||||
# TODO Move this to /core/dbt/flags.py when we're ready to break things
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from importlib import import_module
|
||||
from multiprocessing import get_context
|
||||
from pprint import pformat as pf
|
||||
from typing import Any, Callable, Dict, List, Optional, Set, Union
|
||||
|
||||
from click import Context, get_current_context, Parameter
|
||||
from click.core import Command as ClickCommand, Group, ParameterSource
|
||||
from dbt.cli.exceptions import DbtUsageException
|
||||
from dbt.cli.resolvers import default_log_path, default_project_dir
|
||||
from dbt.cli.types import Command as CliCommand
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.project import UserConfig
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.deprecations import renamed_env_var
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
from click import get_current_context
|
||||
|
||||
if os.name != "nt":
|
||||
# https://bugs.python.org/issue41567
|
||||
import multiprocessing.popen_spawn_posix # type: ignore # noqa: F401
|
||||
|
||||
FLAGS_DEFAULTS = {
|
||||
"INDIRECT_SELECTION": "eager",
|
||||
"TARGET_PATH": None,
|
||||
# Cli args without user_config or env var option.
|
||||
"FULL_REFRESH": False,
|
||||
"STRICT_MODE": False,
|
||||
"STORE_FAILURES": False,
|
||||
"INTROSPECT": True,
|
||||
}
|
||||
|
||||
DEPRECATED_PARAMS = {
|
||||
"deprecated_defer": "defer",
|
||||
"deprecated_favor_state": "favor_state",
|
||||
"deprecated_print": "print",
|
||||
"deprecated_state": "state",
|
||||
}
|
||||
|
||||
|
||||
WHICH_KEY = "which"
|
||||
|
||||
|
||||
def convert_config(config_name, config_value):
|
||||
"""Convert the values from config and original set_from_args to the correct type."""
|
||||
ret = config_value
|
||||
if config_name.lower() == "warn_error_options" and type(config_value) == dict:
|
||||
ret = WarnErrorOptions(
|
||||
include=config_value.get("include", []), exclude=config_value.get("exclude", [])
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
def args_to_context(args: List[str]) -> Context:
|
||||
"""Convert a list of args to a click context with proper hierarchy for dbt commands"""
|
||||
from dbt.cli.main import cli
|
||||
|
||||
cli_ctx = cli.make_context(cli.name, args)
|
||||
# Split args if they're a comma seperated string.
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if isinstance(sub_command, Group):
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
|
||||
assert isinstance(sub_command, ClickCommand)
|
||||
sub_command_ctx = sub_command.make_context(sub_command_name, args)
|
||||
sub_command_ctx.parent = cli_ctx
|
||||
return sub_command_ctx
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Flags:
|
||||
"""Primary configuration artifact for running dbt"""
|
||||
|
||||
def __init__(
|
||||
self, ctx: Optional[Context] = None, user_config: Optional[UserConfig] = None
|
||||
) -> None:
|
||||
|
||||
# Set the default flags.
|
||||
for key, value in FLAGS_DEFAULTS.items():
|
||||
object.__setattr__(self, key, value)
|
||||
def __init__(self, ctx=None) -> None:
|
||||
|
||||
if ctx is None:
|
||||
ctx = get_current_context()
|
||||
|
||||
def _get_params_by_source(ctx: Context, source_type: ParameterSource):
|
||||
"""Generates all params of a given source type."""
|
||||
yield from [
|
||||
name for name, source in ctx._parameter_source.items() if source is source_type
|
||||
]
|
||||
if ctx.parent:
|
||||
yield from _get_params_by_source(ctx.parent, source_type)
|
||||
|
||||
# Ensure that any params sourced from the commandline are not present more than once.
|
||||
# Click handles this exclusivity, but only at a per-subcommand level.
|
||||
seen_params = []
|
||||
for param in _get_params_by_source(ctx, ParameterSource.COMMANDLINE):
|
||||
if param in seen_params:
|
||||
raise DbtUsageException(
|
||||
f"{param.lower()} was provided both before and after the subcommand, it can only be set either before or after.",
|
||||
)
|
||||
seen_params.append(param)
|
||||
|
||||
def _assign_params(
|
||||
ctx: Context,
|
||||
params_assigned_from_default: set,
|
||||
deprecated_env_vars: Dict[str, Callable],
|
||||
):
|
||||
def assign_params(ctx):
|
||||
"""Recursively adds all click params to flag object"""
|
||||
for param_name, param_value in ctx.params.items():
|
||||
# N.B. You have to use the base MRO method (object.__setattr__) to set attributes
|
||||
# when using frozen dataclasses.
|
||||
# https://docs.python.org/3/library/dataclasses.html#frozen-instances
|
||||
|
||||
# Handle deprecated env vars while still respecting old values
|
||||
# e.g. DBT_NO_PRINT -> DBT_PRINT if DBT_NO_PRINT is set, it is
|
||||
# respected over DBT_PRINT or --print.
|
||||
new_name: Union[str, None] = None
|
||||
if param_name in DEPRECATED_PARAMS:
|
||||
|
||||
# Deprecated env vars can only be set via env var.
|
||||
# We use the deprecated option in click to serialize the value
|
||||
# from the env var string.
|
||||
param_source = ctx.get_parameter_source(param_name)
|
||||
if param_source == ParameterSource.DEFAULT:
|
||||
continue
|
||||
elif param_source != ParameterSource.ENVIRONMENT:
|
||||
raise DbtUsageException(
|
||||
"Deprecated parameters can only be set via environment variables",
|
||||
)
|
||||
|
||||
# Rename for clarity.
|
||||
dep_name = param_name
|
||||
new_name = DEPRECATED_PARAMS.get(dep_name)
|
||||
try:
|
||||
assert isinstance(new_name, str)
|
||||
except AssertionError:
|
||||
raise Exception(
|
||||
f"No deprecated param name match in DEPRECATED_PARAMS from {dep_name} to {new_name}"
|
||||
)
|
||||
|
||||
# Find param objects for their envvar name.
|
||||
try:
|
||||
dep_param = [x for x in ctx.command.params if x.name == dep_name][0]
|
||||
new_param = [x for x in ctx.command.params if x.name == new_name][0]
|
||||
except IndexError:
|
||||
raise Exception(
|
||||
f"No deprecated param name match in context from {dep_name} to {new_name}"
|
||||
)
|
||||
|
||||
# Remove param from defaulted set since the deprecated
|
||||
# value is not set from default, but from an env var.
|
||||
if new_name in params_assigned_from_default:
|
||||
params_assigned_from_default.remove(new_name)
|
||||
|
||||
# Add the deprecation warning function to the set.
|
||||
assert isinstance(dep_param.envvar, str)
|
||||
assert isinstance(new_param.envvar, str)
|
||||
deprecated_env_vars[new_name] = renamed_env_var(
|
||||
old_name=dep_param.envvar,
|
||||
new_name=new_param.envvar,
|
||||
)
|
||||
|
||||
# Set the flag value.
|
||||
is_duplicate = hasattr(self, param_name.upper())
|
||||
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||
flag_name = (new_name or param_name).upper()
|
||||
|
||||
if (is_duplicate and not is_default) or not is_duplicate:
|
||||
object.__setattr__(self, flag_name, param_value)
|
||||
|
||||
# Track default assigned params.
|
||||
if is_default:
|
||||
params_assigned_from_default.add(param_name)
|
||||
|
||||
if hasattr(self, param_name):
|
||||
raise Exception(f"Duplicate flag names found in click command: {param_name}")
|
||||
object.__setattr__(self, param_name.upper(), param_value)
|
||||
if ctx.parent:
|
||||
_assign_params(ctx.parent, params_assigned_from_default, deprecated_env_vars)
|
||||
assign_params(ctx.parent)
|
||||
|
||||
params_assigned_from_default = set() # type: Set[str]
|
||||
deprecated_env_vars: Dict[str, Callable] = {}
|
||||
_assign_params(ctx, params_assigned_from_default, deprecated_env_vars)
|
||||
assign_params(ctx)
|
||||
|
||||
# Set deprecated_env_var_warnings to be fired later after events have been init.
|
||||
object.__setattr__(
|
||||
self, "deprecated_env_var_warnings", [x for x in deprecated_env_vars.values()]
|
||||
)
|
||||
|
||||
# Get the invoked command flags.
|
||||
invoked_subcommand_name = (
|
||||
ctx.invoked_subcommand if hasattr(ctx, "invoked_subcommand") else None
|
||||
)
|
||||
if invoked_subcommand_name is not None:
|
||||
invoked_subcommand = getattr(import_module("dbt.cli.main"), invoked_subcommand_name)
|
||||
invoked_subcommand.allow_extra_args = True
|
||||
invoked_subcommand.ignore_unknown_options = True
|
||||
invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv)
|
||||
_assign_params(
|
||||
invoked_subcommand_ctx, params_assigned_from_default, deprecated_env_vars
|
||||
)
|
||||
|
||||
if not user_config:
|
||||
profiles_dir = getattr(self, "PROFILES_DIR", None)
|
||||
user_config = read_user_config(profiles_dir) if profiles_dir else None
|
||||
|
||||
# Add entire invocation command to flags
|
||||
object.__setattr__(self, "INVOCATION_COMMAND", "dbt " + " ".join(sys.argv[1:]))
|
||||
|
||||
# Overwrite default assignments with user config if available.
|
||||
if user_config:
|
||||
param_assigned_from_default_copy = params_assigned_from_default.copy()
|
||||
for param_assigned_from_default in params_assigned_from_default:
|
||||
user_config_param_value = getattr(user_config, param_assigned_from_default, None)
|
||||
if user_config_param_value is not None:
|
||||
object.__setattr__(
|
||||
self,
|
||||
param_assigned_from_default.upper(),
|
||||
convert_config(param_assigned_from_default, user_config_param_value),
|
||||
)
|
||||
param_assigned_from_default_copy.remove(param_assigned_from_default)
|
||||
params_assigned_from_default = param_assigned_from_default_copy
|
||||
|
||||
# Set hard coded flags.
|
||||
object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name)
|
||||
# Hard coded flags
|
||||
object.__setattr__(self, "WHICH", ctx.info_name)
|
||||
object.__setattr__(self, "MP_CONTEXT", get_context("spawn"))
|
||||
|
||||
# Apply the lead/follow relationship between some parameters.
|
||||
self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default)
|
||||
self._override_if_set("LOG_LEVEL", "LOG_LEVEL_FILE", params_assigned_from_default)
|
||||
self._override_if_set("LOG_FORMAT", "LOG_FORMAT_FILE", params_assigned_from_default)
|
||||
|
||||
# Set default LOG_PATH from PROJECT_DIR, if available.
|
||||
# Starting in v1.5, if `log-path` is set in `dbt_project.yml`, it will raise a deprecation warning,
|
||||
# with the possibility of removing it in a future release.
|
||||
if getattr(self, "LOG_PATH", None) is None:
|
||||
project_dir = getattr(self, "PROJECT_DIR", default_project_dir())
|
||||
version_check = getattr(self, "VERSION_CHECK", True)
|
||||
object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check))
|
||||
|
||||
# Support console DO NOT TRACK initiative.
|
||||
if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"):
|
||||
object.__setattr__(self, "SEND_ANONYMOUS_USAGE_STATS", False)
|
||||
|
||||
# Check mutual exclusivity once all flags are set.
|
||||
self._assert_mutually_exclusive(
|
||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||
)
|
||||
|
||||
# Support lower cased access for legacy code.
|
||||
params = set(
|
||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||
)
|
||||
for param in params:
|
||||
object.__setattr__(self, param.lower(), getattr(self, param))
|
||||
# Support console DO NOT TRACK initiave
|
||||
if os.getenv("DO_NOT_TRACK", "").lower() in (1, "t", "true", "y", "yes"):
|
||||
object.__setattr__(self, "ANONYMOUS_USAGE_STATS", False)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(pf(self.__dict__))
|
||||
|
||||
def _override_if_set(self, lead: str, follow: str, defaulted: Set[str]) -> None:
|
||||
"""If the value of the lead parameter was set explicitly, apply the value to follow, unless follow was also set explicitly."""
|
||||
if lead.lower() not in defaulted and follow.lower() in defaulted:
|
||||
object.__setattr__(self, follow.upper(), getattr(self, lead.upper(), None))
|
||||
|
||||
def _assert_mutually_exclusive(
|
||||
self, params_assigned_from_default: Set[str], group: List[str]
|
||||
) -> None:
|
||||
"""
|
||||
Ensure no elements from group are simultaneously provided by a user, as inferred from params_assigned_from_default.
|
||||
Raises click.UsageError if any two elements from group are simultaneously provided by a user.
|
||||
"""
|
||||
set_flag = None
|
||||
for flag in group:
|
||||
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||
if flag_set_by_user and set_flag:
|
||||
raise DbtUsageException(
|
||||
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||
)
|
||||
elif flag_set_by_user:
|
||||
set_flag = flag
|
||||
|
||||
def fire_deprecations(self):
|
||||
"""Fires events for deprecated env_var usage."""
|
||||
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
||||
# It is necessary to remove this attr from the class so it does
|
||||
# not get pickled when written to disk as json.
|
||||
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||
command_arg_list = command_params(command, args_dict)
|
||||
ctx = args_to_context(command_arg_list)
|
||||
flags = cls(ctx=ctx)
|
||||
flags.fire_deprecations()
|
||||
return flags
|
||||
|
||||
|
||||
CommandParams = List[str]
|
||||
|
||||
|
||||
def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandParams:
|
||||
"""Given a command and a dict, returns a list of strings representing
|
||||
the CLI params for that command. The order of this list is consistent with
|
||||
which flags are expected at the parent level vs the command level.
|
||||
|
||||
e.g. fn("run", {"defer": True, "print": False}) -> ["--no-print", "run", "--defer"]
|
||||
|
||||
The result of this function can be passed in to the args_to_context function
|
||||
to produce a click context to instantiate Flags with.
|
||||
"""
|
||||
|
||||
cmd_args = set(command_args(command))
|
||||
prnt_args = set(parent_args())
|
||||
default_args = set([x.lower() for x in FLAGS_DEFAULTS.keys()])
|
||||
|
||||
res = command.to_list()
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
if k == WHICH_KEY:
|
||||
if v != command.value:
|
||||
raise DbtInternalError(
|
||||
f"Command '{command.value}' does not match value of which: '{v}'"
|
||||
)
|
||||
continue
|
||||
|
||||
# param was assigned from defaults and should not be included
|
||||
if k not in (cmd_args | prnt_args) - default_args:
|
||||
continue
|
||||
|
||||
# if the param is in parent args, it should come before the arg name
|
||||
# e.g. ["--print", "run"] vs ["run", "--print"]
|
||||
add_fn = res.append
|
||||
if k in prnt_args:
|
||||
|
||||
def add_fn(x):
|
||||
res.insert(0, x)
|
||||
|
||||
spinal_cased = k.replace("_", "-")
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
elif v in (None, False):
|
||||
add_fn(f"--no-{spinal_cased}")
|
||||
elif v is True:
|
||||
add_fn(f"--{spinal_cased}")
|
||||
else:
|
||||
add_fn(f"--{spinal_cased}={v}")
|
||||
|
||||
return res
|
||||
|
||||
|
||||
ArgsList = List[str]
|
||||
|
||||
|
||||
def parent_args() -> ArgsList:
|
||||
"""Return a list representing the params the base click command takes."""
|
||||
from dbt.cli.main import cli
|
||||
|
||||
return format_params(cli.params)
|
||||
|
||||
|
||||
def command_args(command: CliCommand) -> ArgsList:
|
||||
"""Given a command, return a list of strings representing the params
|
||||
that command takes. This function only returns params assigned to a
|
||||
specific command, not those of its parent command.
|
||||
|
||||
e.g. fn("run") -> ["defer", "favor_state", "exclude", ...]
|
||||
"""
|
||||
import dbt.cli.main as cli
|
||||
|
||||
CMD_DICT: Dict[CliCommand, ClickCommand] = {
|
||||
CliCommand.BUILD: cli.build,
|
||||
CliCommand.CLEAN: cli.clean,
|
||||
CliCommand.CLONE: cli.clone,
|
||||
CliCommand.COMPILE: cli.compile,
|
||||
CliCommand.DOCS_GENERATE: cli.docs_generate,
|
||||
CliCommand.DOCS_SERVE: cli.docs_serve,
|
||||
CliCommand.DEBUG: cli.debug,
|
||||
CliCommand.DEPS: cli.deps,
|
||||
CliCommand.INIT: cli.init,
|
||||
CliCommand.LIST: cli.list,
|
||||
CliCommand.PARSE: cli.parse,
|
||||
CliCommand.RUN: cli.run,
|
||||
CliCommand.RUN_OPERATION: cli.run_operation,
|
||||
CliCommand.SEED: cli.seed,
|
||||
CliCommand.SHOW: cli.show,
|
||||
CliCommand.SNAPSHOT: cli.snapshot,
|
||||
CliCommand.SOURCE_FRESHNESS: cli.freshness,
|
||||
CliCommand.TEST: cli.test,
|
||||
CliCommand.RETRY: cli.retry,
|
||||
}
|
||||
click_cmd: Optional[ClickCommand] = CMD_DICT.get(command, None)
|
||||
if click_cmd is None:
|
||||
raise DbtInternalError(f"No command found for name '{command.name}'")
|
||||
return format_params(click_cmd.params)
|
||||
|
||||
|
||||
def format_params(params: List[Parameter]) -> ArgsList:
|
||||
return [str(x.name) for x in params if not str(x.name).lower().startswith("deprecated_")]
|
||||
|
||||
@@ -1,121 +1,22 @@
|
||||
import inspect # This is temporary for RAT-ing
|
||||
from copy import copy
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, List, Optional, Union
|
||||
from pprint import pformat as pf # This is temporary for RAT-ing
|
||||
|
||||
import click
|
||||
from click.exceptions import (
|
||||
Exit as ClickExit,
|
||||
BadOptionUsage,
|
||||
NoSuchOption,
|
||||
UsageError,
|
||||
)
|
||||
|
||||
from dbt.cli import requires, params as p
|
||||
from dbt.cli.exceptions import (
|
||||
DbtInternalException,
|
||||
DbtUsageException,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.results import (
|
||||
CatalogArtifact,
|
||||
RunExecutionResult,
|
||||
)
|
||||
from dbt.events.base_types import EventMsg
|
||||
from dbt.task.build import BuildTask
|
||||
from dbt.task.clean import CleanTask
|
||||
from dbt.task.clone import CloneTask
|
||||
from dbt.task.compile import CompileTask
|
||||
from dbt.task.debug import DebugTask
|
||||
from dbt.task.deps import DepsTask
|
||||
from dbt.task.freshness import FreshnessTask
|
||||
from dbt.task.generate import GenerateTask
|
||||
from dbt.task.init import InitTask
|
||||
from dbt.task.list import ListTask
|
||||
from dbt.task.retry import RetryTask
|
||||
from dbt.task.run import RunTask
|
||||
from dbt.task.run_operation import RunOperationTask
|
||||
from dbt.task.seed import SeedTask
|
||||
from dbt.task.serve import ServeTask
|
||||
from dbt.task.show import ShowTask
|
||||
from dbt.task.snapshot import SnapshotTask
|
||||
from dbt.task.test import TestTask
|
||||
from dbt.adapters.factory import adapter_management
|
||||
from dbt.cli import params as p
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.profiler import profiler
|
||||
|
||||
|
||||
@dataclass
|
||||
class dbtRunnerResult:
|
||||
"""Contains the result of an invocation of the dbtRunner"""
|
||||
def cli_runner():
|
||||
# Alias "list" to "ls"
|
||||
ls = copy(cli.commands["list"])
|
||||
ls.hidden = True
|
||||
cli.add_command(ls, "ls")
|
||||
|
||||
success: bool
|
||||
|
||||
exception: Optional[BaseException] = None
|
||||
result: Union[
|
||||
bool, # debug
|
||||
CatalogArtifact, # docs generate
|
||||
List[str], # list/ls
|
||||
Manifest, # parse
|
||||
None, # clean, deps, init, source
|
||||
RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation
|
||||
] = None
|
||||
|
||||
|
||||
# Programmatic invocation
|
||||
class dbtRunner:
|
||||
def __init__(
|
||||
self,
|
||||
manifest: Optional[Manifest] = None,
|
||||
callbacks: Optional[List[Callable[[EventMsg], None]]] = None,
|
||||
):
|
||||
self.manifest = manifest
|
||||
|
||||
if callbacks is None:
|
||||
callbacks = []
|
||||
self.callbacks = callbacks
|
||||
|
||||
def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult:
|
||||
try:
|
||||
dbt_ctx = cli.make_context(cli.name, args)
|
||||
dbt_ctx.obj = {
|
||||
"manifest": self.manifest,
|
||||
"callbacks": self.callbacks,
|
||||
}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
dbt_ctx.params[key] = value
|
||||
# Hack to set parameter source to custom string
|
||||
dbt_ctx.set_parameter_source(key, "kwargs") # type: ignore
|
||||
|
||||
result, success = cli.invoke(dbt_ctx)
|
||||
return dbtRunnerResult(
|
||||
result=result,
|
||||
success=success,
|
||||
)
|
||||
except requires.ResultExit as e:
|
||||
return dbtRunnerResult(
|
||||
result=e.result,
|
||||
success=False,
|
||||
)
|
||||
except requires.ExceptionExit as e:
|
||||
return dbtRunnerResult(
|
||||
exception=e.exception,
|
||||
success=False,
|
||||
)
|
||||
except (BadOptionUsage, NoSuchOption, UsageError) as e:
|
||||
return dbtRunnerResult(
|
||||
exception=DbtUsageException(e.message),
|
||||
success=False,
|
||||
)
|
||||
except ClickExit as e:
|
||||
if e.exit_code == 0:
|
||||
return dbtRunnerResult(success=True)
|
||||
return dbtRunnerResult(
|
||||
exception=DbtInternalException(f"unhandled exit code {e.exit_code}"),
|
||||
success=False,
|
||||
)
|
||||
except BaseException as e:
|
||||
return dbtRunnerResult(
|
||||
exception=e,
|
||||
success=False,
|
||||
)
|
||||
# Run the cli
|
||||
cli()
|
||||
|
||||
|
||||
# dbt
|
||||
@@ -126,86 +27,74 @@ class dbtRunner:
|
||||
epilog="Specify one of these sub-commands and you can find more help from there.",
|
||||
)
|
||||
@click.pass_context
|
||||
@p.anonymous_usage_stats
|
||||
@p.cache_selected_only
|
||||
@p.debug
|
||||
@p.deprecated_print
|
||||
@p.enable_legacy_logger
|
||||
@p.event_buffer_size
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
@p.log_level_file
|
||||
@p.log_path
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@p.quiet
|
||||
@p.record_timing_info
|
||||
@p.send_anonymous_usage_stats
|
||||
@p.single_threaded
|
||||
@p.static_parser
|
||||
@p.use_colors
|
||||
@p.use_colors_file
|
||||
@p.use_experimental_parser
|
||||
@p.version
|
||||
@p.version_check
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
def cli(ctx, **kwargs):
|
||||
"""An ELT tool for managing your SQL transformations and data models.
|
||||
For more documentation on these commands, visit: docs.getdbt.com
|
||||
"""
|
||||
incomplete_flags = Flags()
|
||||
|
||||
# Profiling
|
||||
if incomplete_flags.RECORD_TIMING_INFO:
|
||||
ctx.with_resource(profiler(enable=True, outfile=incomplete_flags.RECORD_TIMING_INFO))
|
||||
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
# Version info
|
||||
if incomplete_flags.VERSION:
|
||||
click.echo(f"`version` called\n ctx.params: {pf(ctx.params)}")
|
||||
return
|
||||
else:
|
||||
del ctx.params["version"]
|
||||
|
||||
|
||||
# dbt build
|
||||
@cli.command("build")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.indirect_selection
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.store_failures
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def build(ctx, **kwargs):
|
||||
"""Run all seeds, models, snapshots, and tests in DAG order"""
|
||||
task = BuildTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
"""Run all Seeds, Models, Snapshots, and tests in DAG order"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt clean
|
||||
@@ -215,19 +104,11 @@ def build(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def clean(ctx, **kwargs):
|
||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt docs
|
||||
@@ -242,41 +123,23 @@ def docs(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.compile_docs
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.empty_catalog
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest(write=False)
|
||||
def docs_generate(ctx, **kwargs):
|
||||
"""Generate the documentation website for your project"""
|
||||
task = GenerateTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt docs serve
|
||||
@@ -288,184 +151,81 @@ def docs_generate(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
def docs_serve(ctx, **kwargs):
|
||||
"""Serve the documentation website for your project"""
|
||||
task = ServeTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt compile
|
||||
@cli.command("compile")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.show_output_format
|
||||
@p.indirect_selection
|
||||
@p.introspect
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.parse_only
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.inline
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def compile(ctx, **kwargs):
|
||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the
|
||||
target/ directory."""
|
||||
task = CompileTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt show
|
||||
@cli.command("show")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.full_refresh
|
||||
@p.show_output_format
|
||||
@p.show_limit
|
||||
@p.indirect_selection
|
||||
@p.introspect
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.inline
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def show(ctx, **kwargs):
|
||||
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
|
||||
results. Does not materialize anything to the warehouse."""
|
||||
task = ShowTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
"""Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt debug
|
||||
@cli.command("debug")
|
||||
@click.pass_context
|
||||
@p.debug_connection
|
||||
@p.config_dir
|
||||
@p.profile
|
||||
@p.profiles_dir_exists_false
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def debug(ctx, **kwargs):
|
||||
"""Show information on the current dbt environment and check dependencies, then test the database connection. Not to be confused with the --debug option which increases verbosity."""
|
||||
|
||||
task = DebugTask(
|
||||
ctx.obj["flags"],
|
||||
None,
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
"""Show some helpful information about dbt for debugging. Not to be confused with the --debug option which increases verbosity."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt deps
|
||||
@cli.command("deps")
|
||||
@click.pass_context
|
||||
@p.profile
|
||||
@p.profiles_dir_exists_false
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.unset_profile
|
||||
@requires.project
|
||||
def deps(ctx, **kwargs):
|
||||
"""Pull the most recent version of the dependencies listed in packages.yml"""
|
||||
task = DepsTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt init
|
||||
@cli.command("init")
|
||||
@click.pass_context
|
||||
# for backwards compatibility, accept 'project_name' as an optional positional argument
|
||||
@click.argument("project_name", required=False)
|
||||
@p.profile
|
||||
@p.profiles_dir_exists_false
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.skip_profile_setup
|
||||
@p.target
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
def init(ctx, **kwargs):
|
||||
"""Initialize a new dbt project."""
|
||||
task = InitTask(ctx.obj["flags"], None)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
"""Initialize a new DBT project."""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt list
|
||||
@@ -480,42 +240,21 @@ def init(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.raw_select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def list(ctx, **kwargs):
|
||||
"""List the resources in your project"""
|
||||
task = ListTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Alias "list" to "ls"
|
||||
ls = copy(cli.commands["list"])
|
||||
ls.hidden = True
|
||||
cli.add_command(ls, "ls")
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt parse
|
||||
@cli.command("parse")
|
||||
@click.pass_context
|
||||
@p.compile_parse
|
||||
@p.log_path
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -524,157 +263,51 @@ cli.add_command(ls, "ls")
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest(write_perf_info=True)
|
||||
@p.write_manifest
|
||||
def parse(ctx, **kwargs):
|
||||
"""Parses the project and provides information on performance"""
|
||||
# manifest generation and writing happens in @requires.manifest
|
||||
|
||||
return ctx.obj["manifest"], True
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt run
|
||||
@cli.command("run")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run(ctx, **kwargs):
|
||||
"""Compile SQL and execute against the current target database."""
|
||||
task = RunTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt retry
|
||||
@cli.command("retry")
|
||||
@click.pass_context
|
||||
@p.project_dir
|
||||
@p.profiles_dir
|
||||
@p.vars
|
||||
@p.profile
|
||||
@p.target
|
||||
@p.state
|
||||
@p.threads
|
||||
@p.fail_fast
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def retry(ctx, **kwargs):
|
||||
"""Retry the nodes that failed in the previous run."""
|
||||
task = RetryTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# dbt clone
|
||||
@cli.command("clone")
|
||||
@click.pass_context
|
||||
@p.defer_state
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.resource_type
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state # required
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
@requires.postflight
|
||||
def clone(ctx, **kwargs):
|
||||
"""Create clones of selected nodes based on their location in the manifest provided to --state."""
|
||||
task = CloneTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt run operation
|
||||
@cli.command("run-operation")
|
||||
@click.pass_context
|
||||
@click.argument("macro")
|
||||
@p.args
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def run_operation(ctx, **kwargs):
|
||||
"""Run the named macro with any supplied arguments."""
|
||||
task = RunOperationTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt seed
|
||||
@@ -682,75 +315,43 @@ def run_operation(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@p.exclude
|
||||
@p.full_refresh
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.show
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def seed(ctx, **kwargs):
|
||||
"""Load data from csv files into your data warehouse."""
|
||||
task = SeedTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt snapshot
|
||||
@cli.command("snapshot")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def snapshot(ctx, **kwargs):
|
||||
"""Execute snapshots defined in your project"""
|
||||
task = SnapshotTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt source
|
||||
@@ -764,87 +365,48 @@ def source(ctx, **kwargs):
|
||||
@source.command("freshness")
|
||||
@click.pass_context
|
||||
@p.exclude
|
||||
@p.models
|
||||
@p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate?
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def freshness(ctx, **kwargs):
|
||||
"""check the current freshness of the project's sources"""
|
||||
task = FreshnessTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
# Alias "source freshness" to "snapshot-freshness"
|
||||
snapshot_freshness = copy(cli.commands["source"].commands["freshness"]) # type: ignore
|
||||
snapshot_freshness.hidden = True
|
||||
cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") # type: ignore
|
||||
"""Snapshots the current freshness of the project's sources"""
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# dbt test
|
||||
@cli.command("test")
|
||||
@click.pass_context
|
||||
@p.defer
|
||||
@p.deprecated_defer
|
||||
@p.exclude
|
||||
@p.fail_fast
|
||||
@p.favor_state
|
||||
@p.deprecated_favor_state
|
||||
@p.indirect_selection
|
||||
@p.log_path
|
||||
@p.models
|
||||
@p.profile
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.store_failures
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@p.version_check
|
||||
@requires.postflight
|
||||
@requires.preflight
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def test(ctx, **kwargs):
|
||||
"""Runs tests on data in deployed models. Run this after `dbt run`"""
|
||||
task = TestTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
flags = Flags()
|
||||
click.echo(f"`{inspect.stack()[0][3]}` called\n flags: {flags}")
|
||||
|
||||
|
||||
# Support running as a module
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
cli_runner()
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
from click import ParamType, Choice
|
||||
|
||||
from dbt.config.utils import parse_cli_yaml_string
|
||||
from dbt.exceptions import ValidationError, DbtValidationError, OptionNotYamlDictError
|
||||
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
from click import ParamType
|
||||
import yaml
|
||||
|
||||
|
||||
class YAML(ParamType):
|
||||
@@ -16,26 +12,11 @@ class YAML(ParamType):
|
||||
if not isinstance(value, str):
|
||||
self.fail(f"Cannot load YAML from type {type(value)}", param, ctx)
|
||||
try:
|
||||
param_option_name = param.opts[0] if param.opts else param.name
|
||||
return parse_cli_yaml_string(value, param_option_name.strip("-"))
|
||||
except (ValidationError, DbtValidationError, OptionNotYamlDictError):
|
||||
return yaml.load(value, Loader=yaml.Loader)
|
||||
except yaml.parser.ParserError:
|
||||
self.fail(f"String '{value}' is not valid YAML", param, ctx)
|
||||
|
||||
|
||||
class WarnErrorOptionsType(YAML):
|
||||
"""The Click WarnErrorOptions type. Converts YAML strings into objects."""
|
||||
|
||||
name = "WarnErrorOptionsType"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# this function is being used by param in click
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
|
||||
return WarnErrorOptions(
|
||||
include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", [])
|
||||
)
|
||||
|
||||
|
||||
class Truthy(ParamType):
|
||||
"""The Click Truthy type. Converts strings into a "truthy" type"""
|
||||
|
||||
@@ -50,13 +31,3 @@ class Truthy(ParamType):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class ChoiceTuple(Choice):
|
||||
name = "CHOICE_TUPLE"
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
for value_item in value:
|
||||
super().convert(value_item, param, ctx)
|
||||
|
||||
return value
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
import click
|
||||
import inspect
|
||||
import typing as t
|
||||
from click import Context
|
||||
from dbt.cli.option_types import ChoiceTuple
|
||||
|
||||
|
||||
# Implementation from: https://stackoverflow.com/a/48394004
|
||||
# Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012)
|
||||
class MultiOption(click.Option):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.save_other_options = kwargs.pop("save_other_options", True)
|
||||
nargs = kwargs.pop("nargs", -1)
|
||||
assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs)
|
||||
super(MultiOption, self).__init__(*args, **kwargs)
|
||||
self._previous_parser_process = None
|
||||
self._eat_all_parser = None
|
||||
|
||||
# validate that multiple=True
|
||||
multiple = kwargs.pop("multiple", None)
|
||||
msg = f"MultiOption named `{self.name}` must have multiple=True (rather than {multiple})"
|
||||
assert multiple, msg
|
||||
|
||||
# validate that type=tuple or type=ChoiceTuple
|
||||
option_type = kwargs.pop("type", None)
|
||||
msg = f"MultiOption named `{self.name}` must be tuple or ChoiceTuple (rather than {option_type})"
|
||||
if inspect.isclass(option_type):
|
||||
assert issubclass(option_type, tuple), msg
|
||||
else:
|
||||
assert isinstance(option_type, ChoiceTuple), msg
|
||||
|
||||
def add_to_parser(self, parser, ctx):
|
||||
def parser_process(value, state):
|
||||
# method to hook to the parser.process
|
||||
done = False
|
||||
value = [value]
|
||||
if self.save_other_options:
|
||||
# grab everything up to the next option
|
||||
while state.rargs and not done:
|
||||
for prefix in self._eat_all_parser.prefixes:
|
||||
if state.rargs[0].startswith(prefix):
|
||||
done = True
|
||||
if not done:
|
||||
value.append(state.rargs.pop(0))
|
||||
else:
|
||||
# grab everything remaining
|
||||
value += state.rargs
|
||||
state.rargs[:] = []
|
||||
value = tuple(value)
|
||||
# call the actual process
|
||||
self._previous_parser_process(value, state)
|
||||
|
||||
retval = super(MultiOption, self).add_to_parser(parser, ctx)
|
||||
for name in self.opts:
|
||||
our_parser = parser._long_opt.get(name) or parser._short_opt.get(name)
|
||||
if our_parser:
|
||||
self._eat_all_parser = our_parser
|
||||
self._previous_parser_process = our_parser.process
|
||||
our_parser.process = parser_process
|
||||
break
|
||||
return retval
|
||||
|
||||
def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
|
||||
def flatten(data):
|
||||
if isinstance(data, tuple):
|
||||
for x in data:
|
||||
yield from flatten(x)
|
||||
else:
|
||||
yield data
|
||||
|
||||
# there will be nested tuples to flatten when multiple=True
|
||||
value = super(MultiOption, self).type_cast_value(ctx, value)
|
||||
if value:
|
||||
value = tuple(flatten(value))
|
||||
return value
|
||||
@@ -1,10 +1,20 @@
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import click
|
||||
from dbt.cli.options import MultiOption
|
||||
from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType
|
||||
from dbt.cli.option_types import YAML
|
||||
from dbt.cli.resolvers import default_project_dir, default_profiles_dir
|
||||
from dbt.version import get_version_information
|
||||
|
||||
|
||||
# TODO: The name (reflected in flags) is a correction!
|
||||
# The original name was `SEND_ANONYMOUS_USAGE_STATS` and used an env var called "DBT_SEND_ANONYMOUS_USAGE_STATS"
|
||||
# Both of which break existing naming conventions (doesn't match param flag).
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
anonymous_usage_stats = click.option(
|
||||
"--anonymous-usage-stats/--no-anonymous-usage-stats",
|
||||
envvar="DBT_ANONYMOUS_USAGE_STATS",
|
||||
help="Send anonymous usage stats to dbt Labs.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
args = click.option(
|
||||
"--args",
|
||||
@@ -23,28 +33,28 @@ browser = click.option(
|
||||
cache_selected_only = click.option(
|
||||
"--cache-selected-only/--no-cache-selected-only",
|
||||
envvar="DBT_CACHE_SELECTED_ONLY",
|
||||
help="At start of run, populate relational cache only for schemas containing selected nodes, or for all schemas of interest.",
|
||||
)
|
||||
|
||||
introspect = click.option(
|
||||
"--introspect/--no-introspect",
|
||||
envvar="DBT_INTROSPECT",
|
||||
help="Whether to scaffold introspective queries as part of compilation",
|
||||
default=True,
|
||||
help="Pre cache database objects relevant to selected resource only.",
|
||||
)
|
||||
|
||||
compile_docs = click.option(
|
||||
"--compile/--no-compile",
|
||||
envvar=None,
|
||||
help="Whether or not to run 'dbt compile' as part of docs generation",
|
||||
help="Wether or not to run 'dbt compile' as part of docs generation",
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_parse = click.option(
|
||||
"--compile/--no-compile",
|
||||
envvar=None,
|
||||
help="TODO: No help text currently available",
|
||||
default=True,
|
||||
)
|
||||
|
||||
config_dir = click.option(
|
||||
"--config-dir",
|
||||
envvar=None,
|
||||
help="Print a system-specific command to access the directory that the current dbt project is searching for a profiles.yml. Then, exit. This flag renders other debug step flags no-ops.",
|
||||
is_flag=True,
|
||||
help="If specified, DBT will show path information for this project",
|
||||
type=click.STRING,
|
||||
)
|
||||
|
||||
debug = click.option(
|
||||
@@ -54,19 +64,14 @@ debug = click.option(
|
||||
help="Display debug logging during dbt execution. Useful for debugging and making bug reports.",
|
||||
)
|
||||
|
||||
# flag was previously named DEFER_MODE
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
# The original name was `DEFER_MODE` and used an env var called "DBT_DEFER_TO_STATE"
|
||||
# Both of which break existing naming conventions.
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
defer = click.option(
|
||||
"--defer/--no-defer",
|
||||
envvar="DBT_DEFER",
|
||||
help="If set, resolve unselected nodes by deferring to the manifest within the --state directory.",
|
||||
)
|
||||
|
||||
deprecated_defer = click.option(
|
||||
"--deprecated-defer",
|
||||
envvar="DBT_DEFER_TO_STATE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
default=False,
|
||||
hidden=True,
|
||||
help="If set, defer to the state variable for resolving unselected nodes.",
|
||||
)
|
||||
|
||||
enable_legacy_logger = click.option(
|
||||
@@ -75,15 +80,16 @@ enable_legacy_logger = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
exclude = click.option(
|
||||
"--exclude",
|
||||
envvar=None,
|
||||
type=tuple,
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
help="Specify the nodes to exclude.",
|
||||
event_buffer_size = click.option(
|
||||
"--event-buffer-size",
|
||||
envvar="DBT_EVENT_BUFFER_SIZE",
|
||||
help="Sets the max number of events to buffer in EVENT_HISTORY.",
|
||||
default=100000,
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
exclude = click.option("--exclude", envvar=None, help="Specify the nodes to exclude.")
|
||||
|
||||
fail_fast = click.option(
|
||||
"--fail-fast/--no-fail-fast",
|
||||
"-x/ ",
|
||||
@@ -91,18 +97,6 @@ fail_fast = click.option(
|
||||
help="Stop execution on first failure.",
|
||||
)
|
||||
|
||||
favor_state = click.option(
|
||||
"--favor-state/--no-favor-state",
|
||||
envvar="DBT_FAVOR_STATE",
|
||||
help="If set, defer to the argument provided to the state flag for resolving unselected nodes, even if the node(s) exist as a database object in the current environment.",
|
||||
)
|
||||
|
||||
deprecated_favor_state = click.option(
|
||||
"--deprecated-favor-state",
|
||||
envvar="DBT_FAVOR_STATE_MODE",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
)
|
||||
|
||||
full_refresh = click.option(
|
||||
"--full-refresh",
|
||||
"-f",
|
||||
@@ -114,69 +108,30 @@ full_refresh = click.option(
|
||||
indirect_selection = click.option(
|
||||
"--indirect-selection",
|
||||
envvar="DBT_INDIRECT_SELECTION",
|
||||
help="Choose which tests to select that are adjacent to selected resources. Eager is most inclusive, cautious is most exclusive, and buildable is in between. Empty includes no tests at all.",
|
||||
type=click.Choice(["eager", "cautious", "buildable", "empty"], case_sensitive=False),
|
||||
help="Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.",
|
||||
type=click.Choice(["eager", "cautious"], case_sensitive=False),
|
||||
default="eager",
|
||||
)
|
||||
|
||||
log_cache_events = click.option(
|
||||
"--log-cache-events/--no-log-cache-events",
|
||||
help="Enable verbose logging for relational cache events to help when debugging.",
|
||||
help="Enable verbose adapter cache logging.",
|
||||
envvar="DBT_LOG_CACHE_EVENTS",
|
||||
)
|
||||
|
||||
log_format = click.option(
|
||||
"--log-format",
|
||||
envvar="DBT_LOG_FORMAT",
|
||||
help="Specify the format of logging to the console and the log file. Use --log-format-file to configure the format for the log file differently than the console.",
|
||||
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||
help="Specify the log format, overriding the command's default.",
|
||||
type=click.Choice(["text", "json", "default"], case_sensitive=False),
|
||||
default="default",
|
||||
)
|
||||
|
||||
log_format_file = click.option(
|
||||
"--log-format-file",
|
||||
envvar="DBT_LOG_FORMAT_FILE",
|
||||
help="Specify the format of logging to the log file by overriding the default value and the general --log-format setting.",
|
||||
type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False),
|
||||
default="debug",
|
||||
)
|
||||
|
||||
log_level = click.option(
|
||||
"--log-level",
|
||||
envvar="DBT_LOG_LEVEL",
|
||||
help="Specify the minimum severity of events that are logged to the console and the log file. Use --log-level-file to configure the severity for the log file differently than the console.",
|
||||
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||
default="info",
|
||||
)
|
||||
|
||||
log_level_file = click.option(
|
||||
"--log-level-file",
|
||||
envvar="DBT_LOG_LEVEL_FILE",
|
||||
help="Specify the minimum severity of events that are logged to the log file by overriding the default value and the general --log-level setting.",
|
||||
type=click.Choice(["debug", "info", "warn", "error", "none"], case_sensitive=False),
|
||||
default="debug",
|
||||
)
|
||||
|
||||
use_colors = click.option(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Specify whether log output is colorized in the console and the log file. Use --use-colors-file/--no-use-colors-file to colorize the log file differently than the console.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_colors_file = click.option(
|
||||
"--use-colors-file/--no-use-colors-file",
|
||||
envvar="DBT_USE_COLORS_FILE",
|
||||
help="Specify whether log file output is colorized by overriding the default value and the general --use-colors/--no-use-colors setting.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
help="Configure the 'log-path'. Only applies this setting for the current run. Overrides the 'DBT_LOG_PATH' if it is set.",
|
||||
default=None,
|
||||
type=click.Path(resolve_path=True, path_type=Path),
|
||||
type=click.Path(),
|
||||
)
|
||||
|
||||
macro_debugging = click.option(
|
||||
@@ -185,51 +140,41 @@ macro_debugging = click.option(
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
# This less standard usage of --output where output_path below is more standard
|
||||
models = click.option(
|
||||
"-m",
|
||||
"-s",
|
||||
"models",
|
||||
envvar=None,
|
||||
help="Specify the nodes to include.",
|
||||
multiple=True,
|
||||
)
|
||||
|
||||
output = click.option(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="Specify the output format: either JSON or a newline-delimited list of selectors, paths, or names",
|
||||
help="TODO: No current help text",
|
||||
type=click.Choice(["json", "name", "path", "selector"], case_sensitive=False),
|
||||
default="selector",
|
||||
)
|
||||
|
||||
show_output_format = click.option(
|
||||
"--output",
|
||||
envvar=None,
|
||||
help="Output format for dbt compile and dbt show",
|
||||
type=click.Choice(["json", "text"], case_sensitive=False),
|
||||
default="text",
|
||||
)
|
||||
|
||||
show_limit = click.option(
|
||||
"--limit",
|
||||
envvar=None,
|
||||
help="Limit the number of results returned by dbt show",
|
||||
type=click.INT,
|
||||
default=5,
|
||||
default="name",
|
||||
)
|
||||
|
||||
output_keys = click.option(
|
||||
"--output-keys",
|
||||
envvar=None,
|
||||
help=(
|
||||
"Space-delimited listing of node properties to include as custom keys for JSON output "
|
||||
"(e.g. `--output json --output-keys name resource_type description`)"
|
||||
),
|
||||
type=tuple,
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
default=[],
|
||||
"--output-keys", envvar=None, help="TODO: No current help text", type=click.STRING
|
||||
)
|
||||
|
||||
output_path = click.option(
|
||||
"--output",
|
||||
"-o",
|
||||
envvar=None,
|
||||
help="Specify the output path for the JSON report. By default, outputs to 'target/sources.json'",
|
||||
help="Specify the output path for the json report. By default, outputs to 'target/sources.json'",
|
||||
type=click.Path(file_okay=True, dir_okay=False, writable=True),
|
||||
default=None,
|
||||
default=PurePath.joinpath(Path.cwd(), "target/sources.json"),
|
||||
)
|
||||
|
||||
parse_only = click.option(
|
||||
"--parse-only",
|
||||
envvar=None,
|
||||
help="TODO: No help text currently available",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
partial_parse = click.option(
|
||||
@@ -239,22 +184,6 @@ partial_parse = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
partial_parse_file_path = click.option(
|
||||
"--partial-parse-file-path",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_PATH",
|
||||
help="Internal flag for path to partial_parse.manifest file.",
|
||||
default=None,
|
||||
hidden=True,
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
help="At start of run, use `show` or `information_schema` queries to populate a relational cache, which can speed up subsequent materializations.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
port = click.option(
|
||||
"--port",
|
||||
envvar=None,
|
||||
@@ -263,6 +192,10 @@ port = click.option(
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
# The original name was `NO_PRINT` and used the env var `DBT_NO_PRINT`.
|
||||
# Both of which break existing naming conventions.
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
print = click.option(
|
||||
"--print/--no-print",
|
||||
envvar="DBT_PRINT",
|
||||
@@ -270,15 +203,6 @@ print = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
deprecated_print = click.option(
|
||||
"--deprecated-print/--deprecated-no-print",
|
||||
envvar="DBT_NO_PRINT",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
default=True,
|
||||
hidden=True,
|
||||
callback=lambda ctx, param, value: not value,
|
||||
)
|
||||
|
||||
printer_width = click.option(
|
||||
"--printer-width",
|
||||
envvar="DBT_PRINTER_WIDTH",
|
||||
@@ -297,32 +221,20 @@ profiles_dir = click.option(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
default=default_profiles_dir,
|
||||
default=default_profiles_dir(),
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
# `dbt debug` uses this because it implements custom behaviour for non-existent profiles.yml directories
|
||||
# `dbt deps` does not load a profile at all
|
||||
# `dbt init` will write profiles.yml if it doesn't yet exist
|
||||
profiles_dir_exists_false = click.option(
|
||||
"--profiles-dir",
|
||||
envvar="DBT_PROFILES_DIR",
|
||||
help="Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/",
|
||||
default=default_profiles_dir,
|
||||
type=click.Path(exists=False),
|
||||
)
|
||||
|
||||
project_dir = click.option(
|
||||
"--project-dir",
|
||||
envvar="DBT_PROJECT_DIR",
|
||||
envvar=None,
|
||||
help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.",
|
||||
default=default_project_dir,
|
||||
default=default_project_dir(),
|
||||
type=click.Path(exists=True),
|
||||
)
|
||||
|
||||
quiet = click.option(
|
||||
"--quiet/--no-quiet",
|
||||
"-q",
|
||||
envvar="DBT_QUIET",
|
||||
help="Suppress all non-error logging to stdout. Does not affect {{ print() }} macro calls.",
|
||||
)
|
||||
@@ -336,11 +248,10 @@ record_timing_info = click.option(
|
||||
)
|
||||
|
||||
resource_type = click.option(
|
||||
"--resource-types",
|
||||
"--resource-type",
|
||||
envvar=None,
|
||||
help="Restricts the types of resources that dbt will include",
|
||||
type=ChoiceTuple(
|
||||
help="TODO: No current help text",
|
||||
type=click.Choice(
|
||||
[
|
||||
"metric",
|
||||
"source",
|
||||
@@ -355,120 +266,35 @@ resource_type = click.option(
|
||||
],
|
||||
case_sensitive=False,
|
||||
),
|
||||
cls=MultiOption,
|
||||
multiple=True,
|
||||
default=(),
|
||||
default="default",
|
||||
)
|
||||
|
||||
model_decls = ("-m", "--models", "--model")
|
||||
select_decls = ("-s", "--select")
|
||||
select_attrs = {
|
||||
"envvar": None,
|
||||
"help": "Specify the nodes to include.",
|
||||
"cls": MultiOption,
|
||||
"multiple": True,
|
||||
"type": tuple,
|
||||
}
|
||||
|
||||
inline = click.option(
|
||||
"--inline",
|
||||
envvar=None,
|
||||
help="Pass SQL inline to dbt compile and show",
|
||||
)
|
||||
|
||||
# `--select` and `--models` are analogous for most commands except `dbt list` for legacy reasons.
|
||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||
models = click.option(*model_decls, **select_attrs)
|
||||
raw_select = click.option(*select_decls, **select_attrs)
|
||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
||||
|
||||
selector = click.option(
|
||||
"--selector",
|
||||
envvar=None,
|
||||
help="The selector name to use, as defined in selectors.yml",
|
||||
)
|
||||
|
||||
send_anonymous_usage_stats = click.option(
|
||||
"--send-anonymous-usage-stats/--no-send-anonymous-usage-stats",
|
||||
envvar="DBT_SEND_ANONYMOUS_USAGE_STATS",
|
||||
help="Send anonymous usage stats to dbt Labs.",
|
||||
default=True,
|
||||
"--selector", envvar=None, help="The selector name to use, as defined in selectors.yml"
|
||||
)
|
||||
|
||||
show = click.option(
|
||||
"--show",
|
||||
envvar=None,
|
||||
help="Show a sample of the loaded data in the terminal",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
# TODO: The env var is a correction!
|
||||
# The original env var was `DBT_TEST_SINGLE_THREADED`.
|
||||
# This broke the existing naming convention.
|
||||
# This will need to be communicated as a change to the community!
|
||||
#
|
||||
# N.B. This flag is only used for testing, hence it's hidden from help text.
|
||||
single_threaded = click.option(
|
||||
"--single-threaded/--no-single-threaded",
|
||||
envvar="DBT_SINGLE_THREADED",
|
||||
default=False,
|
||||
hidden=True,
|
||||
"--show", envvar=None, help="Show a sample of the loaded data in the terminal", is_flag=True
|
||||
)
|
||||
|
||||
skip_profile_setup = click.option(
|
||||
"--skip-profile-setup",
|
||||
"-s",
|
||||
envvar=None,
|
||||
help="Skip interactive profile setup.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
empty_catalog = click.option(
|
||||
"--empty-catalog",
|
||||
help="If specified, generate empty catalog.json file during the `dbt docs generate` command.",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
"--skip-profile-setup", "-s", envvar=None, help="Skip interative profile setup.", is_flag=True
|
||||
)
|
||||
|
||||
# TODO: The env var and name (reflected in flags) are corrections!
|
||||
# The original name was `ARTIFACT_STATE_PATH` and used the env var `DBT_ARTIFACT_STATE_PATH`.
|
||||
# Both of which break existing naming conventions.
|
||||
# This will need to be fixed before use in the main codebase and communicated as a change to the community!
|
||||
state = click.option(
|
||||
"--state",
|
||||
envvar="DBT_STATE",
|
||||
help="Unless overridden, use this state directory for both state comparison and deferral.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=False,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
defer_state = click.option(
|
||||
"--defer-state",
|
||||
envvar="DBT_DEFER_STATE",
|
||||
help="Override the state directory for deferral only.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=False,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
deprecated_state = click.option(
|
||||
"--deprecated-state",
|
||||
envvar="DBT_ARTIFACT_STATE_PATH",
|
||||
help="Internal flag for deprecating old env var.",
|
||||
hidden=True,
|
||||
help="If set, use the given directory as the source for json files to compare with this project.",
|
||||
type=click.Path(
|
||||
dir_okay=True,
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True,
|
||||
path_type=Path,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -487,10 +313,7 @@ store_failures = click.option(
|
||||
)
|
||||
|
||||
target = click.option(
|
||||
"--target",
|
||||
"-t",
|
||||
envvar=None,
|
||||
help="Which target to load for the given profile",
|
||||
"--target", "-t", envvar=None, help="Which target to load for the given profile"
|
||||
)
|
||||
|
||||
target_path = click.option(
|
||||
@@ -500,21 +323,21 @@ target_path = click.option(
|
||||
type=click.Path(),
|
||||
)
|
||||
|
||||
debug_connection = click.option(
|
||||
"--connection",
|
||||
envvar=None,
|
||||
help="Test the connection to the target database independent of dependency checks.",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
threads = click.option(
|
||||
"--threads",
|
||||
envvar=None,
|
||||
help="Specify number of threads to use while executing models. Overrides settings in profiles.yml.",
|
||||
default=None,
|
||||
default=1,
|
||||
type=click.INT,
|
||||
)
|
||||
|
||||
use_colors = click.option(
|
||||
"--use-colors/--no-use-colors",
|
||||
envvar="DBT_USE_COLORS",
|
||||
help="Output is colorized by default and may also be set in a profile or at the command line.",
|
||||
default=True,
|
||||
)
|
||||
|
||||
use_experimental_parser = click.option(
|
||||
"--use-experimental-parser/--no-use-experimental-parser",
|
||||
envvar="DBT_USE_EXPERIMENTAL_PARSER",
|
||||
@@ -526,58 +349,38 @@ vars = click.option(
|
||||
envvar=None,
|
||||
help="Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable: my_value}'",
|
||||
type=YAML(),
|
||||
default="{}",
|
||||
)
|
||||
|
||||
|
||||
# TODO: when legacy flags are deprecated use
|
||||
# click.version_option instead of a callback
|
||||
def _version_callback(ctx, _param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
click.echo(get_version_information())
|
||||
ctx.exit()
|
||||
|
||||
|
||||
version = click.option(
|
||||
"--version",
|
||||
"-V",
|
||||
"-v",
|
||||
callback=_version_callback,
|
||||
envvar=None,
|
||||
expose_value=False,
|
||||
help="Show version information and exit",
|
||||
is_eager=True,
|
||||
help="Show version information",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
version_check = click.option(
|
||||
"--version-check/--no-version-check",
|
||||
envvar="DBT_VERSION_CHECK",
|
||||
help="If set, ensure the installed dbt version matches the require-dbt-version specified in the dbt_project.yml file (if any). Otherwise, allow them to differ.",
|
||||
help="Ensure dbt's version matches the one specified in the dbt_project.yml file ('require-dbt-version')",
|
||||
default=True,
|
||||
)
|
||||
|
||||
warn_error = click.option(
|
||||
"--warn-error",
|
||||
"--warn-error/--no-warn-error",
|
||||
envvar="DBT_WARN_ERROR",
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
default=None,
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
warn_error_options = click.option(
|
||||
"--warn-error-options",
|
||||
envvar="DBT_WARN_ERROR_OPTIONS",
|
||||
default="{}",
|
||||
help="""If dbt would normally warn, instead raise an exception based on include/exclude configuration. Examples include --select that selects nothing, deprecations, configurations with no associated models, invalid test configurations,
|
||||
and missing sources/refs in tests. This argument should be a YAML string, with keys 'include' or 'exclude'. eg. '{"include": "all", "exclude": ["NoNodesForSelectionCriteria"]}'""",
|
||||
type=WarnErrorOptionsType(),
|
||||
help="If dbt would normally warn, instead raise an exception. Examples include --models that selects nothing, deprecations, configurations with no associated models, invalid test configurations, and missing sources/refs in tests.",
|
||||
)
|
||||
|
||||
write_json = click.option(
|
||||
"--write-json/--no-write-json",
|
||||
envvar="DBT_WRITE_JSON",
|
||||
help="Whether or not to write the manifest.json and run_results.json files to the target directory",
|
||||
help="Writing the manifest and run_results.json files to disk",
|
||||
default=True,
|
||||
)
|
||||
|
||||
write_manifest = click.option(
|
||||
"--write-manifest/--no-write-manifest",
|
||||
envvar=None,
|
||||
help="TODO: No help text currently available",
|
||||
default=True,
|
||||
)
|
||||
|
||||
@@ -1,267 +0,0 @@
|
||||
import dbt.tracking
|
||||
from dbt.version import installed as installed_version
|
||||
from dbt.adapters.factory import adapter_management, register_adapter
|
||||
from dbt.flags import set_flags, get_flag_dict
|
||||
from dbt.cli.exceptions import (
|
||||
ExceptionExit,
|
||||
ResultExit,
|
||||
)
|
||||
from dbt.cli.flags import Flags
|
||||
from dbt.config import RuntimeConfig
|
||||
from dbt.config.runtime import load_project, load_profile, UnsetProfile
|
||||
from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger
|
||||
from dbt.events.types import (
|
||||
CommandCompleted,
|
||||
MainReportVersion,
|
||||
MainReportArgs,
|
||||
MainTrackingUserState,
|
||||
)
|
||||
from dbt.events.helpers import get_json_string_utcnow
|
||||
from dbt.events.types import MainEncounteredError, MainStackTrace
|
||||
from dbt.exceptions import Exception as DbtException, DbtProjectError, FailFastError
|
||||
from dbt.parser.manifest import ManifestLoader, write_manifest
|
||||
from dbt.profiler import profiler
|
||||
from dbt.tracking import active_user, initialize_from_flags, track_run
|
||||
from dbt.utils import cast_dict_to_dict_of_strings
|
||||
from dbt.plugins import set_up_plugin_manager, get_plugin_manager
|
||||
|
||||
from click import Context
|
||||
from functools import update_wrapper
|
||||
import time
|
||||
import traceback
|
||||
|
||||
|
||||
def preflight(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
ctx.obj = ctx.obj or {}
|
||||
|
||||
# Flags
|
||||
flags = Flags(ctx)
|
||||
ctx.obj["flags"] = flags
|
||||
set_flags(flags)
|
||||
|
||||
# Logging
|
||||
callbacks = ctx.obj.get("callbacks", [])
|
||||
set_invocation_id()
|
||||
setup_event_logger(flags=flags, callbacks=callbacks)
|
||||
|
||||
# Tracking
|
||||
initialize_from_flags(flags.SEND_ANONYMOUS_USAGE_STATS, flags.PROFILES_DIR)
|
||||
ctx.with_resource(track_run(run_command=flags.WHICH))
|
||||
|
||||
# Now that we have our logger, fire away!
|
||||
fire_event(MainReportVersion(version=str(installed_version), log_version=LOG_VERSION))
|
||||
flags_dict_str = cast_dict_to_dict_of_strings(get_flag_dict())
|
||||
fire_event(MainReportArgs(args=flags_dict_str))
|
||||
|
||||
# Deprecation warnings
|
||||
flags.fire_deprecations()
|
||||
|
||||
if active_user is not None: # mypy appeasement, always true
|
||||
fire_event(MainTrackingUserState(user_state=active_user.state()))
|
||||
|
||||
# Profiling
|
||||
if flags.RECORD_TIMING_INFO:
|
||||
ctx.with_resource(profiler(enable=True, outfile=flags.RECORD_TIMING_INFO))
|
||||
|
||||
# Adapter management
|
||||
ctx.with_resource(adapter_management())
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def postflight(func):
|
||||
"""The decorator that handles all exception handling for the click commands.
|
||||
This decorator must be used before any other decorators that may throw an exception."""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
start_func = time.perf_counter()
|
||||
success = False
|
||||
|
||||
try:
|
||||
result, success = func(*args, **kwargs)
|
||||
except FailFastError as e:
|
||||
fire_event(MainEncounteredError(exc=str(e)))
|
||||
raise ResultExit(e.result)
|
||||
except DbtException as e:
|
||||
fire_event(MainEncounteredError(exc=str(e)))
|
||||
raise ExceptionExit(e)
|
||||
except BaseException as e:
|
||||
fire_event(MainEncounteredError(exc=str(e)))
|
||||
fire_event(MainStackTrace(stack_trace=traceback.format_exc()))
|
||||
raise ExceptionExit(e)
|
||||
finally:
|
||||
fire_event(
|
||||
CommandCompleted(
|
||||
command=ctx.command_path,
|
||||
success=success,
|
||||
completed_at=get_json_string_utcnow(),
|
||||
elapsed=time.perf_counter() - start_func,
|
||||
)
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise ResultExit(result)
|
||||
|
||||
return (result, success)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
# TODO: UnsetProfile is necessary for deps and clean to load a project.
|
||||
# This decorator and its usage can be removed once https://github.com/dbt-labs/dbt-core/issues/6257 is closed.
|
||||
def unset_profile(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
profile = UnsetProfile()
|
||||
ctx.obj["profile"] = profile
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def profile(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
# TODO: Generalize safe access to flags.THREADS:
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6259
|
||||
threads = getattr(flags, "THREADS", None)
|
||||
profile = load_profile(flags.PROJECT_DIR, flags.VARS, flags.PROFILE, flags.TARGET, threads)
|
||||
ctx.obj["profile"] = profile
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def project(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
# TODO: Decouple target from profile, and remove the need for profile here:
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6257
|
||||
if not ctx.obj.get("profile"):
|
||||
raise DbtProjectError("profile required for project")
|
||||
|
||||
flags = ctx.obj["flags"]
|
||||
project = load_project(
|
||||
flags.PROJECT_DIR, flags.VERSION_CHECK, ctx.obj["profile"], flags.VARS
|
||||
)
|
||||
ctx.obj["project"] = project
|
||||
|
||||
# Plugins
|
||||
set_up_plugin_manager(project_name=project.project_name)
|
||||
|
||||
if dbt.tracking.active_user is not None:
|
||||
project_id = None if project is None else project.hashed_name()
|
||||
|
||||
dbt.tracking.track_project_id({"project_id": project_id})
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def runtime_config(func):
|
||||
"""A decorator used by click command functions for generating a runtime
|
||||
config given a profile and project.
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["profile", "project"]
|
||||
reqs = [ctx.obj.get(req_str) for req_str in req_strs]
|
||||
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile and project required for runtime_config")
|
||||
|
||||
config = RuntimeConfig.from_parts(
|
||||
ctx.obj["project"],
|
||||
ctx.obj["profile"],
|
||||
ctx.obj["flags"],
|
||||
)
|
||||
|
||||
ctx.obj["runtime_config"] = config
|
||||
|
||||
if dbt.tracking.active_user is not None:
|
||||
adapter_type = (
|
||||
getattr(config.credentials, "type", None)
|
||||
if hasattr(config, "credentials")
|
||||
else None
|
||||
)
|
||||
adapter_unique_id = (
|
||||
config.credentials.hashed_unique_field()
|
||||
if hasattr(config, "credentials")
|
||||
else None
|
||||
)
|
||||
|
||||
dbt.tracking.track_adapter_info(
|
||||
{
|
||||
"adapter_type": adapter_type,
|
||||
"adapter_unique_id": adapter_unique_id,
|
||||
}
|
||||
)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def manifest(*args0, write=True, write_perf_info=False):
|
||||
"""A decorator used by click command functions for generating a manifest
|
||||
given a profile, project, and runtime config. This also registers the adapter
|
||||
from the runtime config and conditionally writes the manifest to disk.
|
||||
"""
|
||||
|
||||
def outer_wrapper(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
assert isinstance(ctx, Context)
|
||||
|
||||
req_strs = ["profile", "project", "runtime_config"]
|
||||
reqs = [ctx.obj.get(dep) for dep in req_strs]
|
||||
|
||||
if None in reqs:
|
||||
raise DbtProjectError("profile, project, and runtime_config required for manifest")
|
||||
|
||||
runtime_config = ctx.obj["runtime_config"]
|
||||
register_adapter(runtime_config)
|
||||
|
||||
# a manifest has already been set on the context, so don't overwrite it
|
||||
if ctx.obj.get("manifest") is None:
|
||||
manifest = ManifestLoader.get_full_manifest(
|
||||
runtime_config,
|
||||
write_perf_info=write_perf_info,
|
||||
)
|
||||
|
||||
ctx.obj["manifest"] = manifest
|
||||
if write and ctx.obj["flags"].write_json:
|
||||
write_manifest(manifest, runtime_config.project_target_path)
|
||||
pm = get_plugin_manager(runtime_config.project_name)
|
||||
plugin_artifacts = pm.get_manifest_artifacts(manifest)
|
||||
for path, plugin_artifact in plugin_artifacts.items():
|
||||
plugin_artifact.write(path)
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
# if there are no args, the decorator was used without params @decorator
|
||||
# otherwise, the decorator was called with params @decorator(arg)
|
||||
if len(args0) == 0:
|
||||
return outer_wrapper
|
||||
return outer_wrapper(args0[0])
|
||||
@@ -1,31 +1,11 @@
|
||||
from pathlib import Path
|
||||
from dbt.config.project import PartialProject
|
||||
from dbt.exceptions import DbtProjectError
|
||||
|
||||
|
||||
def default_project_dir() -> Path:
|
||||
def default_project_dir():
|
||||
paths = list(Path.cwd().parents)
|
||||
paths.insert(0, Path.cwd())
|
||||
return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd())
|
||||
|
||||
|
||||
def default_profiles_dir() -> Path:
|
||||
def default_profiles_dir():
|
||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||
|
||||
|
||||
def default_log_path(project_dir: Path, verify_version: bool = False) -> Path:
|
||||
"""If available, derive a default log path from dbt_project.yml. Otherwise, default to "logs".
|
||||
Known limitations:
|
||||
1. Using PartialProject here, so no jinja rendering of log-path.
|
||||
2. Programmatic invocations of the cli via dbtRunner may pass a Project object directly,
|
||||
which is not being taken into consideration here to extract a log-path.
|
||||
"""
|
||||
default_log_path = Path("logs")
|
||||
try:
|
||||
partial = PartialProject.from_project_root(str(project_dir), verify_version=verify_version)
|
||||
partial_log_path = partial.project_dict.get("log-path") or default_log_path
|
||||
default_log_path = Path(project_dir) / partial_log_path
|
||||
except DbtProjectError:
|
||||
pass
|
||||
|
||||
return default_log_path
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
|
||||
from dbt.exceptions import DbtInternalError
|
||||
|
||||
|
||||
class Command(Enum):
|
||||
BUILD = "build"
|
||||
CLEAN = "clean"
|
||||
COMPILE = "compile"
|
||||
CLONE = "clone"
|
||||
DOCS_GENERATE = "generate"
|
||||
DOCS_SERVE = "serve"
|
||||
DEBUG = "debug"
|
||||
DEPS = "deps"
|
||||
INIT = "init"
|
||||
LIST = "list"
|
||||
PARSE = "parse"
|
||||
RUN = "run"
|
||||
RUN_OPERATION = "run-operation"
|
||||
SEED = "seed"
|
||||
SHOW = "show"
|
||||
SNAPSHOT = "snapshot"
|
||||
SOURCE_FRESHNESS = "freshness"
|
||||
TEST = "test"
|
||||
RETRY = "retry"
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s: str) -> "Command":
|
||||
try:
|
||||
return cls(s)
|
||||
except ValueError:
|
||||
raise DbtInternalError(f"No value '{s}' exists in Command enum")
|
||||
|
||||
def to_list(self) -> List[str]:
|
||||
return {
|
||||
Command.DOCS_GENERATE: ["docs", "generate"],
|
||||
Command.DOCS_SERVE: ["docs", "serve"],
|
||||
Command.SOURCE_FRESHNESS: ["source", "freshness"],
|
||||
}.get(self, [self.value])
|
||||
@@ -1,15 +1,7 @@
|
||||
import re
|
||||
from collections import namedtuple
|
||||
|
||||
from dbt.exceptions import (
|
||||
BlockDefinitionNotAtTopError,
|
||||
DbtInternalError,
|
||||
MissingCloseTagError,
|
||||
MissingControlFlowStartTagError,
|
||||
NestedTagsError,
|
||||
UnexpectedControlFlowEndTagError,
|
||||
UnexpectedMacroEOFError,
|
||||
)
|
||||
import dbt.exceptions
|
||||
|
||||
|
||||
def regex(pat):
|
||||
@@ -147,7 +139,10 @@ class TagIterator:
|
||||
def _expect_match(self, expected_name, *patterns, **kwargs):
|
||||
match = self._first_match(*patterns, **kwargs)
|
||||
if match is None:
|
||||
raise UnexpectedMacroEOFError(expected_name, self.data[self.pos :])
|
||||
msg = 'unexpected EOF, expected {}, got "{}"'.format(
|
||||
expected_name, self.data[self.pos :]
|
||||
)
|
||||
dbt.exceptions.raise_compiler_error(msg)
|
||||
return match
|
||||
|
||||
def handle_expr(self, match):
|
||||
@@ -261,7 +256,7 @@ class TagIterator:
|
||||
elif block_type_name is not None:
|
||||
yield self.handle_tag(match)
|
||||
else:
|
||||
raise DbtInternalError(
|
||||
raise dbt.exceptions.InternalException(
|
||||
"Invalid regex match in next_block, expected block start, "
|
||||
"expr start, or comment start"
|
||||
)
|
||||
@@ -270,6 +265,13 @@ class TagIterator:
|
||||
return self.find_tags()
|
||||
|
||||
|
||||
duplicate_tags = (
|
||||
"Got nested tags: {outer.block_type_name} (started at {outer.start}) did "
|
||||
"not have a matching {{% end{outer.block_type_name} %}} before a "
|
||||
"subsequent {inner.block_type_name} was found (started at {inner.start})"
|
||||
)
|
||||
|
||||
|
||||
_CONTROL_FLOW_TAGS = {
|
||||
"if": "endif",
|
||||
"for": "endfor",
|
||||
@@ -317,16 +319,33 @@ class BlockIterator:
|
||||
found = self.stack.pop()
|
||||
else:
|
||||
expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name]
|
||||
raise UnexpectedControlFlowEndTagError(tag, expected, self.tag_parser)
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"never saw a preceeding {} (@ {})"
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
expected = _CONTROL_FLOW_TAGS[found]
|
||||
if expected != tag.block_type_name:
|
||||
raise MissingControlFlowStartTagError(tag, expected, self.tag_parser)
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got an unexpected control flow end tag, got {} but "
|
||||
"expected {} next (@ {})"
|
||||
).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
|
||||
if tag.block_type_name in allowed_blocks:
|
||||
if self.stack:
|
||||
raise BlockDefinitionNotAtTopError(self.tag_parser, tag.start)
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Got a block definition inside control flow at {}. "
|
||||
"All dbt block definitions must be at the top level"
|
||||
).format(self.tag_parser.linepos(tag.start))
|
||||
)
|
||||
if self.current is not None:
|
||||
raise NestedTagsError(outer=self.current, inner=tag)
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
duplicate_tags.format(outer=self.current, inner=tag)
|
||||
)
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position : tag.start]
|
||||
self.last_position = tag.start
|
||||
@@ -347,7 +366,11 @@ class BlockIterator:
|
||||
|
||||
if self.current:
|
||||
linecount = self.data[: self.current.end].count("\n") + 1
|
||||
raise MissingCloseTagError(self.current.block_type_name, linecount)
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Reached EOF without finding a close tag for " "{} (searched from line {})"
|
||||
).format(self.current.block_type_name, linecount)
|
||||
)
|
||||
|
||||
if collect_raw_data:
|
||||
raw_data = self.data[self.last_position :]
|
||||
|
||||
@@ -7,7 +7,7 @@ import json
|
||||
import dbt.utils
|
||||
from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.exceptions import RuntimeException
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
@@ -168,7 +168,7 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
return
|
||||
elif not isinstance(value, type(existing_type)):
|
||||
# actual type mismatch!
|
||||
raise DbtRuntimeError(
|
||||
raise RuntimeException(
|
||||
f"Tables contain columns with the same names ({key}), "
|
||||
f"but different types ({value} vs {existing_type})"
|
||||
)
|
||||
|
||||
@@ -14,10 +14,10 @@ from dbt.events.types import (
|
||||
)
|
||||
from dbt.exceptions import (
|
||||
CommandResultError,
|
||||
GitCheckoutError,
|
||||
GitCloningError,
|
||||
UnknownGitCloningProblemError,
|
||||
DbtRuntimeError,
|
||||
RuntimeException,
|
||||
bad_package_spec,
|
||||
raise_git_cloning_error,
|
||||
raise_git_cloning_problem,
|
||||
)
|
||||
from packaging import version
|
||||
|
||||
@@ -27,6 +27,16 @@ def _is_commit(revision: str) -> bool:
|
||||
return bool(re.match(r"\b[0-9a-f]{40}\b", revision))
|
||||
|
||||
|
||||
def _raise_git_cloning_error(repo, revision, error):
|
||||
stderr = error.stderr.strip()
|
||||
if "usage: git" in stderr:
|
||||
stderr = stderr.split("\nusage: git")[0]
|
||||
if re.match("fatal: destination path '(.+)' already exists", stderr):
|
||||
raise_git_cloning_error(error)
|
||||
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
|
||||
|
||||
def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None):
|
||||
has_revision = revision is not None
|
||||
is_commit = _is_commit(revision or "")
|
||||
@@ -54,7 +64,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
|
||||
try:
|
||||
result = run_cmd(cwd, clone_cmd, env={"LC_ALL": "C"})
|
||||
except CommandResultError as exc:
|
||||
raise GitCloningError(repo, revision, exc)
|
||||
_raise_git_cloning_error(repo, revision, exc)
|
||||
|
||||
if subdirectory:
|
||||
cwd_subdir = os.path.join(cwd, dirname or "")
|
||||
@@ -62,7 +72,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
|
||||
try:
|
||||
run_cmd(cwd_subdir, clone_cmd_subdir)
|
||||
except CommandResultError as exc:
|
||||
raise GitCloningError(repo, revision, exc)
|
||||
_raise_git_cloning_error(repo, revision, exc)
|
||||
|
||||
if remove_git_dir:
|
||||
rmdir(os.path.join(dirname, ".git"))
|
||||
@@ -105,7 +115,8 @@ def checkout(cwd, repo, revision=None):
|
||||
try:
|
||||
return _checkout(cwd, repo, revision)
|
||||
except CommandResultError as exc:
|
||||
raise GitCheckoutError(repo=repo, revision=revision, error=exc)
|
||||
stderr = exc.stderr.strip()
|
||||
bad_package_spec(repo, revision, stderr)
|
||||
|
||||
|
||||
def get_current_sha(cwd):
|
||||
@@ -134,7 +145,7 @@ def clone_and_checkout(
|
||||
err = exc.stderr
|
||||
exists = re.match("fatal: destination path '(.+)' already exists", err)
|
||||
if not exists:
|
||||
raise UnknownGitCloningProblemError(repo)
|
||||
raise_git_cloning_problem(repo)
|
||||
|
||||
directory = None
|
||||
start_sha = None
|
||||
@@ -144,7 +155,7 @@ def clone_and_checkout(
|
||||
else:
|
||||
matches = re.match("Cloning into '(.+)'", err.decode("utf-8"))
|
||||
if matches is None:
|
||||
raise DbtRuntimeError(f'Error cloning {repo} - never saw "Cloning into ..." from git')
|
||||
raise RuntimeException(f'Error cloning {repo} - never saw "Cloning into ..." from git')
|
||||
directory = matches.group(1)
|
||||
fire_event(GitProgressPullingNewDependency(dir=directory))
|
||||
full_path = os.path.join(cwd, directory)
|
||||
|
||||
@@ -25,22 +25,19 @@ from dbt.utils import (
|
||||
)
|
||||
|
||||
from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
|
||||
from dbt.contracts.graph.nodes import GenericTestNode
|
||||
from dbt.contracts.graph.compiled import CompiledGenericTestNode
|
||||
from dbt.contracts.graph.parsed import ParsedGenericTestNode
|
||||
|
||||
from dbt.exceptions import (
|
||||
CaughtMacroError,
|
||||
CaughtMacroErrorWithNodeError,
|
||||
CompilationError,
|
||||
DbtInternalError,
|
||||
MaterializationArgError,
|
||||
JinjaRenderingError,
|
||||
InternalException,
|
||||
raise_compiler_error,
|
||||
CompilationException,
|
||||
invalid_materialization_argument,
|
||||
MacroReturn,
|
||||
MaterializtionMacroNotUsedError,
|
||||
NoSupportedLanguagesFoundError,
|
||||
UndefinedCompilationError,
|
||||
UndefinedMacroError,
|
||||
JinjaRenderingException,
|
||||
UndefinedMacroException,
|
||||
)
|
||||
from dbt.flags import get_flags
|
||||
from dbt import flags
|
||||
from dbt.node_types import ModelLanguage
|
||||
|
||||
|
||||
@@ -99,9 +96,8 @@ class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):
|
||||
If the value is 'write', also write the files to disk.
|
||||
WARNING: This can write a ton of data if you aren't careful.
|
||||
"""
|
||||
macro_debugging = get_flags().MACRO_DEBUGGING
|
||||
if filename == "<template>" and macro_debugging:
|
||||
write = macro_debugging == "write"
|
||||
if filename == "<template>" and flags.MACRO_DEBUGGING:
|
||||
write = flags.MACRO_DEBUGGING == "write"
|
||||
filename = _linecache_inject(source, write)
|
||||
|
||||
return super()._compile(source, filename) # type: ignore
|
||||
@@ -162,9 +158,9 @@ def quoted_native_concat(nodes):
|
||||
except (ValueError, SyntaxError, MemoryError):
|
||||
result = raw
|
||||
if isinstance(raw, BoolMarker) and not isinstance(result, bool):
|
||||
raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'bool'")
|
||||
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'bool'")
|
||||
if isinstance(raw, NumberMarker) and not _is_number(result):
|
||||
raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'number'")
|
||||
raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'number'")
|
||||
|
||||
return result
|
||||
|
||||
@@ -242,12 +238,12 @@ class BaseMacroGenerator:
|
||||
try:
|
||||
yield
|
||||
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
|
||||
raise CaughtMacroError(e)
|
||||
raise_compiler_error(str(e))
|
||||
|
||||
def call_macro(self, *args, **kwargs):
|
||||
# called from __call__ methods
|
||||
if self.context is None:
|
||||
raise DbtInternalError("Context is still None in call_macro!")
|
||||
raise InternalException("Context is still None in call_macro!")
|
||||
assert self.context is not None
|
||||
|
||||
macro = self.get_macro()
|
||||
@@ -274,7 +270,7 @@ class MacroStack(threading.local):
|
||||
def pop(self, name):
|
||||
got = self.call_stack.pop()
|
||||
if got != name:
|
||||
raise DbtInternalError(f"popped {got}, expected {name}")
|
||||
raise InternalException(f"popped {got}, expected {name}")
|
||||
|
||||
|
||||
class MacroGenerator(BaseMacroGenerator):
|
||||
@@ -301,8 +297,8 @@ class MacroGenerator(BaseMacroGenerator):
|
||||
try:
|
||||
yield
|
||||
except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
|
||||
raise CaughtMacroErrorWithNodeError(exc=e, node=self.macro)
|
||||
except CompilationError as e:
|
||||
raise_compiler_error(str(e), self.macro)
|
||||
except CompilationException as e:
|
||||
e.stack.append(self.macro)
|
||||
raise e
|
||||
|
||||
@@ -381,7 +377,7 @@ class MaterializationExtension(jinja2.ext.Extension):
|
||||
node.defaults.append(languages)
|
||||
|
||||
else:
|
||||
raise MaterializationArgError(materialization_name, target.name)
|
||||
invalid_materialization_argument(materialization_name, target.name)
|
||||
|
||||
if SUPPORTED_LANG_ARG not in node.args:
|
||||
node.args.append(SUPPORTED_LANG_ARG)
|
||||
@@ -456,7 +452,7 @@ def create_undefined(node=None):
|
||||
return self
|
||||
|
||||
def __reduce__(self):
|
||||
raise UndefinedCompilationError(name=self.name, node=node)
|
||||
raise_compiler_error(f"{self.name} is undefined", node=node)
|
||||
|
||||
return Undefined
|
||||
|
||||
@@ -483,7 +479,7 @@ def get_environment(
|
||||
native: bool = False,
|
||||
) -> jinja2.Environment:
|
||||
args: Dict[str, List[Union[str, Type[jinja2.ext.Extension]]]] = {
|
||||
"extensions": ["jinja2.ext.do", "jinja2.ext.loopcontrols"]
|
||||
"extensions": ["jinja2.ext.do"]
|
||||
}
|
||||
|
||||
if capture_macros:
|
||||
@@ -514,10 +510,10 @@ def catch_jinja(node=None) -> Iterator[None]:
|
||||
yield
|
||||
except jinja2.exceptions.TemplateSyntaxError as e:
|
||||
e.translated = False
|
||||
raise CompilationError(str(e), node) from e
|
||||
raise CompilationException(str(e), node) from e
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
raise UndefinedMacroError(str(e), node) from e
|
||||
except CompilationError as exc:
|
||||
raise UndefinedMacroException(str(e), node) from e
|
||||
except CompilationException as exc:
|
||||
exc.add_node(node)
|
||||
raise
|
||||
|
||||
@@ -565,8 +561,6 @@ def _requote_result(raw_value: str, rendered: str) -> str:
|
||||
# is small enough that I've just chosen the more readable option.
|
||||
_HAS_RENDER_CHARS_PAT = re.compile(r"({[{%#]|[#}%]})")
|
||||
|
||||
_render_cache: Dict[str, Any] = dict()
|
||||
|
||||
|
||||
def get_rendered(
|
||||
string: str,
|
||||
@@ -574,21 +568,15 @@ def get_rendered(
|
||||
node=None,
|
||||
capture_macros: bool = False,
|
||||
native: bool = False,
|
||||
) -> Any:
|
||||
) -> str:
|
||||
# performance optimization: if there are no jinja control characters in the
|
||||
# string, we can just return the input. Fall back to jinja if the type is
|
||||
# not a string or if native rendering is enabled (so '1' -> 1, etc...)
|
||||
# If this is desirable in the native env as well, we could handle the
|
||||
# native=True case by passing the input string to ast.literal_eval, like
|
||||
# the native renderer does.
|
||||
has_render_chars = not isinstance(string, str) or _HAS_RENDER_CHARS_PAT.search(string)
|
||||
|
||||
if not has_render_chars:
|
||||
if not native:
|
||||
return string
|
||||
elif string in _render_cache:
|
||||
return _render_cache[string]
|
||||
|
||||
if not native and isinstance(string, str) and _HAS_RENDER_CHARS_PAT.search(string) is None:
|
||||
return string
|
||||
template = get_template(
|
||||
string,
|
||||
ctx,
|
||||
@@ -596,13 +584,7 @@ def get_rendered(
|
||||
capture_macros=capture_macros,
|
||||
native=native,
|
||||
)
|
||||
|
||||
rendered = render_template(template, ctx, node)
|
||||
|
||||
if not has_render_chars and native:
|
||||
_render_cache[string] = rendered
|
||||
|
||||
return rendered
|
||||
return render_template(template, ctx, node)
|
||||
|
||||
|
||||
def undefined_error(msg) -> NoReturn:
|
||||
@@ -638,7 +620,7 @@ GENERIC_TEST_KWARGS_NAME = "_dbt_generic_test_kwargs"
|
||||
|
||||
def add_rendered_test_kwargs(
|
||||
context: Dict[str, Any],
|
||||
node: GenericTestNode,
|
||||
node: Union[ParsedGenericTestNode, CompiledGenericTestNode],
|
||||
capture_macros: bool = False,
|
||||
) -> None:
|
||||
"""Render each of the test kwargs in the given context using the native
|
||||
@@ -670,13 +652,13 @@ def add_rendered_test_kwargs(
|
||||
|
||||
def get_supported_languages(node: jinja2.nodes.Macro) -> List[ModelLanguage]:
|
||||
if "materialization" not in node.name:
|
||||
raise MaterializtionMacroNotUsedError(node=node)
|
||||
raise_compiler_error("Only materialization macros can be used with this function")
|
||||
|
||||
no_kwargs = not node.defaults
|
||||
no_langs_found = SUPPORTED_LANG_ARG not in node.args
|
||||
|
||||
if no_kwargs or no_langs_found:
|
||||
raise NoSupportedLanguagesFoundError(node=node)
|
||||
raise_compiler_error(f"No supported_languages found in materialization macro {node.name}")
|
||||
|
||||
lang_idx = node.args.index(SUPPORTED_LANG_ARG)
|
||||
# indexing defaults from the end
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import jinja2
|
||||
from dbt.clients.jinja import get_environment
|
||||
from dbt.exceptions import MacroNamespaceNotStringError, MacroNameNotStringError
|
||||
from dbt.exceptions import raise_compiler_error
|
||||
|
||||
|
||||
def statically_extract_macro_calls(string, ctx, db_wrapper=None):
|
||||
@@ -117,14 +117,20 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
func_name = kwarg.value.value
|
||||
possible_macro_calls.append(func_name)
|
||||
else:
|
||||
raise MacroNameNotStringError(kwarg_value=kwarg.value.value)
|
||||
raise_compiler_error(
|
||||
f"The macro_name parameter ({kwarg.value.value}) "
|
||||
"to adapter.dispatch was not a string"
|
||||
)
|
||||
elif kwarg.key == "macro_namespace":
|
||||
# This will remain to enable static resolution
|
||||
kwarg_type = type(kwarg.value).__name__
|
||||
if kwarg_type == "Const":
|
||||
macro_namespace = kwarg.value.value
|
||||
else:
|
||||
raise MacroNamespaceNotStringError(kwarg_type)
|
||||
raise_compiler_error(
|
||||
"The macro_namespace parameter to adapter.dispatch "
|
||||
f"is a {kwarg_type}, not a string"
|
||||
)
|
||||
|
||||
# positional arguments
|
||||
if packages_arg:
|
||||
@@ -141,7 +147,7 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
|
||||
macro = db_wrapper.dispatch(func_name, macro_namespace=macro_namespace).macro
|
||||
func_name = f"{macro.package_name}.{macro.name}"
|
||||
possible_macro_calls.append(func_name)
|
||||
else: # this is only for tests/unit/test_macro_calls.py
|
||||
else: # this is only for test/unit/test_macro_calls.py
|
||||
if macro_namespace:
|
||||
packages = [macro_namespace]
|
||||
else:
|
||||
|
||||
@@ -3,9 +3,9 @@ from typing import Any, Dict, List
|
||||
import requests
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
RegistryProgressGETRequest,
|
||||
RegistryProgressMakingGETRequest,
|
||||
RegistryProgressGETResponse,
|
||||
RegistryIndexProgressGETRequest,
|
||||
RegistryIndexProgressMakingGETRequest,
|
||||
RegistryIndexProgressGETResponse,
|
||||
RegistryResponseUnexpectedType,
|
||||
RegistryResponseMissingTopKeys,
|
||||
@@ -38,7 +38,7 @@ def _get_with_retries(package_name, registry_base_url=None):
|
||||
|
||||
def _get(package_name, registry_base_url=None):
|
||||
url = _get_url(package_name, registry_base_url)
|
||||
fire_event(RegistryProgressGETRequest(url=url))
|
||||
fire_event(RegistryProgressMakingGETRequest(url=url))
|
||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||
resp = requests.get(url, timeout=30)
|
||||
fire_event(RegistryProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||
@@ -162,7 +162,7 @@ def get_compatible_versions(package_name, dbt_version, should_version_check) ->
|
||||
def _get_index(registry_base_url=None):
|
||||
|
||||
url = _get_url("index", registry_base_url)
|
||||
fire_event(RegistryIndexProgressGETRequest(url=url))
|
||||
fire_event(RegistryIndexProgressMakingGETRequest(url=url))
|
||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||
resp = requests.get(url, timeout=30)
|
||||
fire_event(RegistryIndexProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||
|
||||
@@ -1,31 +1,30 @@
|
||||
import errno
|
||||
import fnmatch
|
||||
import functools
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union
|
||||
|
||||
import dbt.exceptions
|
||||
import requests
|
||||
import stat
|
||||
from typing import Type, NoReturn, List, Optional, Dict, Any, Tuple, Callable, Union
|
||||
from pathspec import PathSpec # type: ignore
|
||||
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
SystemErrorRetrievingModTime,
|
||||
SystemCouldNotWrite,
|
||||
SystemExecutingCmd,
|
||||
SystemStdOut,
|
||||
SystemStdErr,
|
||||
SystemStdOutMsg,
|
||||
SystemStdErrMsg,
|
||||
SystemReportReturnCode,
|
||||
)
|
||||
from dbt.exceptions import DbtInternalError
|
||||
import dbt.exceptions
|
||||
from dbt.utils import _connection_exception_retry as connection_exception_retry
|
||||
from pathspec import PathSpec # type: ignore
|
||||
|
||||
if sys.platform == "win32":
|
||||
from ctypes import WinDLL, c_bool
|
||||
@@ -76,7 +75,11 @@ def find_matching(
|
||||
relative_path = os.path.relpath(absolute_path, absolute_path_to_search)
|
||||
relative_path_to_root = os.path.join(relative_path_to_search, relative_path)
|
||||
|
||||
modification_time = os.path.getmtime(absolute_path)
|
||||
modification_time = 0.0
|
||||
try:
|
||||
modification_time = os.path.getmtime(absolute_path)
|
||||
except OSError:
|
||||
fire_event(SystemErrorRetrievingModTime(path=absolute_path))
|
||||
if reobj.match(local_file) and (
|
||||
not ignore_spec or not ignore_spec.match_file(relative_path_to_root)
|
||||
):
|
||||
@@ -103,18 +106,12 @@ def load_file_contents(path: str, strip: bool = True) -> str:
|
||||
return to_return
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def make_directory(path=None) -> None:
|
||||
def make_directory(path: str) -> None:
|
||||
"""
|
||||
Make a directory and any intermediate directories that don't already
|
||||
exist. This function handles the case where two threads try to create
|
||||
a directory at once.
|
||||
"""
|
||||
raise DbtInternalError(f"Can not create directory from {type(path)} ")
|
||||
|
||||
|
||||
@make_directory.register
|
||||
def _(path: str) -> None:
|
||||
path = convert_path(path)
|
||||
if not os.path.exists(path):
|
||||
# concurrent writes that try to create the same dir can fail
|
||||
@@ -128,11 +125,6 @@ def _(path: str) -> None:
|
||||
raise e
|
||||
|
||||
|
||||
@make_directory.register
|
||||
def _(path: Path) -> None:
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool:
|
||||
"""
|
||||
Make a file at `path` assuming that the directory it resides in already
|
||||
@@ -152,8 +144,7 @@ def make_symlink(source: str, link_path: str) -> None:
|
||||
Create a symlink at `link_path` referring to `source`.
|
||||
"""
|
||||
if not supports_symlinks():
|
||||
# TODO: why not import these at top?
|
||||
raise dbt.exceptions.SymbolicLinkError()
|
||||
dbt.exceptions.system_error("create a symbolic link")
|
||||
|
||||
os.symlink(source, link_path)
|
||||
|
||||
@@ -211,7 +202,7 @@ def _windows_rmdir_readonly(func: Callable[[str], Any], path: str, exc: Tuple[An
|
||||
|
||||
def resolve_path_from_base(path_to_resolve: str, base_path: str) -> str:
|
||||
"""
|
||||
If path_to_resolve is a relative path, create an absolute path
|
||||
If path-to_resolve is a relative path, create an absolute path
|
||||
with base_path as the base.
|
||||
|
||||
If path_to_resolve is an absolute path or a user path (~), just
|
||||
@@ -420,7 +411,7 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
|
||||
_handle_posix_error(exc, cwd, cmd)
|
||||
|
||||
# this should not be reachable, raise _something_ at least!
|
||||
raise dbt.exceptions.DbtInternalError(
|
||||
raise dbt.exceptions.InternalException(
|
||||
"Unhandled exception in _interpret_oserror: {}".format(exc)
|
||||
)
|
||||
|
||||
@@ -449,8 +440,8 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T
|
||||
except OSError as exc:
|
||||
_interpret_oserror(exc, cwd, cmd)
|
||||
|
||||
fire_event(SystemStdOut(bmsg=str(out)))
|
||||
fire_event(SystemStdErr(bmsg=str(err)))
|
||||
fire_event(SystemStdOutMsg(bmsg=out))
|
||||
fire_event(SystemStdErrMsg(bmsg=err))
|
||||
|
||||
if proc.returncode != 0:
|
||||
fire_event(SystemReportReturnCode(returncode=proc.returncode))
|
||||
|
||||
@@ -60,4 +60,4 @@ def load_yaml_text(contents, path=None):
|
||||
else:
|
||||
error = str(e)
|
||||
|
||||
raise dbt.exceptions.DbtValidationError(error)
|
||||
raise dbt.exceptions.ValidationException(error)
|
||||
|
||||
@@ -1,45 +1,49 @@
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, cast, Optional
|
||||
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt import flags
|
||||
from dbt.adapters.factory import get_adapter
|
||||
from dbt.clients import jinja
|
||||
from dbt.clients.system import make_directory
|
||||
from dbt.context.providers import generate_runtime_model_context
|
||||
from dbt.contracts.graph.manifest import Manifest, UniqueID
|
||||
from dbt.contracts.graph.nodes import (
|
||||
ManifestNode,
|
||||
ManifestSQLNode,
|
||||
GenericTestNode,
|
||||
from dbt.contracts.graph.compiled import (
|
||||
COMPILED_TYPES,
|
||||
CompiledGenericTestNode,
|
||||
GraphMemberNode,
|
||||
InjectedCTE,
|
||||
SeedNode,
|
||||
ManifestNode,
|
||||
NonSourceCompiledNode,
|
||||
)
|
||||
from dbt.contracts.graph.parsed import ParsedNode
|
||||
from dbt.exceptions import (
|
||||
GraphDependencyNotFoundError,
|
||||
DbtInternalError,
|
||||
DbtRuntimeError,
|
||||
dependency_not_found,
|
||||
InternalException,
|
||||
RuntimeException,
|
||||
)
|
||||
from dbt.graph import Graph
|
||||
from dbt.events.functions import fire_event, get_invocation_id
|
||||
from dbt.events.types import FoundStats, Note, WritingInjectedSQLForNode
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import FoundStats, CompilingNode, WritingInjectedSQLForNode
|
||||
from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
|
||||
def _compiled_type_for(model: ParsedNode):
|
||||
if type(model) not in COMPILED_TYPES:
|
||||
raise InternalException(
|
||||
f"Asked to compile {type(model)} node, but it has no compiled form"
|
||||
)
|
||||
return COMPILED_TYPES[type(model)]
|
||||
|
||||
|
||||
def print_compile_stats(stats):
|
||||
names = {
|
||||
NodeType.Model: "model",
|
||||
@@ -48,12 +52,10 @@ def print_compile_stats(stats):
|
||||
NodeType.Analysis: "analysis",
|
||||
NodeType.Macro: "macro",
|
||||
NodeType.Operation: "operation",
|
||||
NodeType.Seed: "seed",
|
||||
NodeType.Seed: "seed file",
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.SemanticModel: "semantic model",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Group: "group",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -64,8 +66,7 @@ def print_compile_stats(stats):
|
||||
resource_counts = {k.pluralize(): v for k, v in results.items()}
|
||||
dbt.tracking.track_resource_counts(resource_counts)
|
||||
|
||||
# do not include resource types that are not actually defined in the project
|
||||
stat_line = ", ".join([pluralize(ct, names.get(t)) for t, ct in stats.items() if t in names])
|
||||
stat_line = ", ".join([pluralize(ct, names.get(t)) for t, ct in results.items() if t in names])
|
||||
|
||||
fire_event(FoundStats(stat_line=stat_line))
|
||||
|
||||
@@ -84,26 +85,23 @@ def _generate_stats(manifest: Manifest):
|
||||
if _node_enabled(node):
|
||||
stats[node.resource_type] += 1
|
||||
|
||||
# Disabled nodes don't appear in the following collections, so we don't check.
|
||||
stats[NodeType.Source] += len(manifest.sources)
|
||||
stats[NodeType.Exposure] += len(manifest.exposures)
|
||||
stats[NodeType.Metric] += len(manifest.metrics)
|
||||
stats[NodeType.Macro] += len(manifest.macros)
|
||||
stats[NodeType.Group] += len(manifest.groups)
|
||||
stats[NodeType.SemanticModel] += len(manifest.semantic_models)
|
||||
|
||||
# TODO: should we be counting dimensions + entities?
|
||||
|
||||
for source in manifest.sources.values():
|
||||
stats[source.resource_type] += 1
|
||||
for exposure in manifest.exposures.values():
|
||||
stats[exposure.resource_type] += 1
|
||||
for metric in manifest.metrics.values():
|
||||
stats[metric.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def _add_prepended_cte(prepended_ctes, new_cte):
|
||||
for cte in prepended_ctes:
|
||||
if cte.id == new_cte.id and new_cte.sql:
|
||||
if cte.id == new_cte.id:
|
||||
cte.sql = new_cte.sql
|
||||
return
|
||||
if new_cte.sql:
|
||||
prepended_ctes.append(new_cte)
|
||||
prepended_ctes.append(new_cte)
|
||||
|
||||
|
||||
def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
|
||||
@@ -165,39 +163,285 @@ class Linker:
|
||||
with open(outfile, "wb") as outfh:
|
||||
pickle.dump(out_graph, outfh, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def link_node(self, node: GraphMemberNode, manifest: Manifest):
|
||||
self.add_node(node.unique_id)
|
||||
|
||||
class Compiler:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.target_path)
|
||||
make_directory(self.config.packages_install_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
def _create_node_context(
|
||||
self,
|
||||
node: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
context = generate_runtime_model_context(node, self.config, manifest)
|
||||
context.update(extra_context)
|
||||
|
||||
if isinstance(node, CompiledGenericTestNode):
|
||||
# for test nodes, add a special keyword args value to the context
|
||||
jinja.add_rendered_test_kwargs(context, node)
|
||||
|
||||
return context
|
||||
|
||||
def add_ephemeral_prefix(self, name: str):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
return relation_cls.add_ephemeral_prefix(name)
|
||||
|
||||
def _get_relation_name(self, node: ParsedNode):
|
||||
relation_name = None
|
||||
if node.is_relational and not node.is_ephemeral_model:
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
return relation_name
|
||||
|
||||
def _inject_ctes_into_sql(self, sql: str, ctes: List[InjectedCTE]) -> str:
|
||||
"""
|
||||
`ctes` is a list of InjectedCTEs like:
|
||||
|
||||
[
|
||||
InjectedCTE(
|
||||
id="cte_id_1",
|
||||
sql="__dbt__cte__ephemeral as (select * from table)",
|
||||
),
|
||||
InjectedCTE(
|
||||
id="cte_id_2",
|
||||
sql="__dbt__cte__events as (select id, type from events)",
|
||||
),
|
||||
]
|
||||
|
||||
Given `sql` like:
|
||||
|
||||
"with internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
This will spit out:
|
||||
|
||||
"with __dbt__cte__ephemeral as (select * from table),
|
||||
__dbt__cte__events as (select id, type from events),
|
||||
with internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
(Whitespace enhanced for readability.)
|
||||
"""
|
||||
if len(ctes) == 0:
|
||||
return sql
|
||||
|
||||
parsed_stmts = sqlparse.parse(sql)
|
||||
parsed = parsed_stmts[0]
|
||||
|
||||
with_stmt = None
|
||||
for token in parsed.tokens:
|
||||
if token.is_keyword and token.normalized == "WITH":
|
||||
with_stmt = token
|
||||
break
|
||||
|
||||
if with_stmt is None:
|
||||
# no with stmt, add one, and inject CTEs right at the beginning
|
||||
first_token = parsed.token_first()
|
||||
with_stmt = sqlparse.sql.Token(sqlparse.tokens.Keyword, "with")
|
||||
parsed.insert_before(first_token, with_stmt)
|
||||
else:
|
||||
# stmt exists, add a comma (which will come after injected CTEs)
|
||||
trailing_comma = sqlparse.sql.Token(sqlparse.tokens.Punctuation, ",")
|
||||
parsed.insert_after(with_stmt, trailing_comma)
|
||||
|
||||
token = sqlparse.sql.Token(sqlparse.tokens.Keyword, ", ".join(c.sql for c in ctes))
|
||||
parsed.insert_after(with_stmt, token)
|
||||
|
||||
return str(parsed)
|
||||
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: NonSourceCompiledNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[NonSourceCompiledNode, List[InjectedCTE]]:
|
||||
"""This method is called by the 'compile_node' method. Starting
|
||||
from the node that it is passed in, it will recursively call
|
||||
itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
not produce SQL that is executed directly, instead they
|
||||
are rolled up into the models that refer to them by
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_code is None:
|
||||
raise RuntimeException("Cannot inject ctes into an unparsed node", model)
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if not model.extra_ctes:
|
||||
model.extra_ctes_injected = True
|
||||
manifest.update_node(model)
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
prepended_ctes: List[InjectedCTE] = []
|
||||
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise InternalException(
|
||||
f"During compilation, found a cte reference that "
|
||||
f"could not be resolved: {cte.id}"
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise InternalException(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled, so it's been
|
||||
# through here before
|
||||
if getattr(cte_model, "compiled", False):
|
||||
assert isinstance(cte_model, tuple(COMPILED_TYPES.values()))
|
||||
cte_model = cast(NonSourceCompiledNode, cte_model)
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Compile and update the node
|
||||
cte_model = self._compile_node(cte_model, manifest, extra_context)
|
||||
# recursively call this method
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Save compiled SQL file and sync manifest
|
||||
self._write_node(cte_model)
|
||||
manifest.sync_update_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = self._inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes_injected = True
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.validate(model.to_dict(omit_none=True))
|
||||
manifest.update_node(model)
|
||||
|
||||
return model, prepended_ctes
|
||||
|
||||
# creates a compiled_node from the ManifestNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_node(
|
||||
self,
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> NonSourceCompiledNode:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
fire_event(CompilingNode(unique_id=node.unique_id))
|
||||
|
||||
data = node.to_dict(omit_none=True)
|
||||
data.update(
|
||||
{
|
||||
"compiled": False,
|
||||
"compiled_code": None,
|
||||
"extra_ctes_injected": False,
|
||||
"extra_ctes": [],
|
||||
}
|
||||
)
|
||||
compiled_node = _compiled_type_for(node).from_dict(data)
|
||||
|
||||
if compiled_node.language == ModelLanguage.python:
|
||||
# TODO could we also 'minify' this code at all? just aesthetic, not functional
|
||||
|
||||
# quoating seems like something very specific to sql so far
|
||||
# for all python implementations we are seeing there's no quating.
|
||||
# TODO try to find better way to do this, given that
|
||||
original_quoting = self.config.quoting
|
||||
self.config.quoting = {key: False for key in original_quoting.keys()}
|
||||
context = self._create_node_context(compiled_node, manifest, extra_context)
|
||||
|
||||
postfix = jinja.get_rendered(
|
||||
"{{ py_script_postfix(model) }}",
|
||||
context,
|
||||
node,
|
||||
)
|
||||
# we should NOT jinja render the python model's 'raw code'
|
||||
compiled_node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
# restore quoting settings in the end since context is lazy evaluated
|
||||
self.config.quoting = original_quoting
|
||||
|
||||
else:
|
||||
context = self._create_node_context(compiled_node, manifest, extra_context)
|
||||
compiled_node.compiled_code = jinja.get_rendered(
|
||||
node.raw_code,
|
||||
context,
|
||||
node,
|
||||
)
|
||||
|
||||
compiled_node.relation_name = self._get_relation_name(node)
|
||||
|
||||
compiled_node.compiled = True
|
||||
|
||||
return compiled_node
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
graph_path = os.path.join(self.config.target_path, filename)
|
||||
if flags.WRITE_JSON:
|
||||
linker.write_graph(graph_path, manifest)
|
||||
|
||||
def link_node(self, linker: Linker, node: GraphMemberNode, manifest: Manifest):
|
||||
linker.add_node(node.unique_id)
|
||||
|
||||
for dependency in node.depends_on_nodes:
|
||||
if dependency in manifest.nodes:
|
||||
self.dependency(node.unique_id, (manifest.nodes[dependency].unique_id))
|
||||
linker.dependency(node.unique_id, (manifest.nodes[dependency].unique_id))
|
||||
elif dependency in manifest.sources:
|
||||
self.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
linker.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
elif dependency in manifest.metrics:
|
||||
self.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
elif dependency in manifest.semantic_models:
|
||||
self.dependency(node.unique_id, (manifest.semantic_models[dependency].unique_id))
|
||||
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
else:
|
||||
raise GraphDependencyNotFoundError(node, dependency)
|
||||
dependency_not_found(node, dependency)
|
||||
|
||||
def link_graph(self, manifest: Manifest):
|
||||
def link_graph(self, linker: Linker, manifest: Manifest, add_test_edges: bool = False):
|
||||
for source in manifest.sources.values():
|
||||
self.add_node(source.unique_id)
|
||||
for semantic_model in manifest.semantic_models.values():
|
||||
self.add_node(semantic_model.unique_id)
|
||||
linker.add_node(source.unique_id)
|
||||
for node in manifest.nodes.values():
|
||||
self.link_node(node, manifest)
|
||||
self.link_node(linker, node, manifest)
|
||||
for exposure in manifest.exposures.values():
|
||||
self.link_node(exposure, manifest)
|
||||
self.link_node(linker, exposure, manifest)
|
||||
for metric in manifest.metrics.values():
|
||||
self.link_node(metric, manifest)
|
||||
self.link_node(linker, metric, manifest)
|
||||
|
||||
cycle = self.find_cycles()
|
||||
cycle = linker.find_cycles()
|
||||
|
||||
if cycle:
|
||||
raise RuntimeError("Found a cycle: {}".format(cycle))
|
||||
|
||||
def add_test_edges(self, manifest: Manifest) -> None:
|
||||
if add_test_edges:
|
||||
manifest.build_parent_and_child_maps()
|
||||
self.add_test_edges(linker, manifest)
|
||||
|
||||
def add_test_edges(self, linker: Linker, manifest: Manifest) -> None:
|
||||
"""This method adds additional edges to the DAG. For a given non-test
|
||||
executable node, add an edge from an upstream test to the given node if
|
||||
the set of nodes the test depends on is a subset of the upstream nodes
|
||||
@@ -217,7 +461,7 @@ class Linker:
|
||||
# \/ | test2 ----| |
|
||||
# test1 ----|---------------|
|
||||
|
||||
for node_id in self.graph:
|
||||
for node_id in linker.graph:
|
||||
# If node is executable (in manifest.nodes) and does _not_
|
||||
# represent a test, continue.
|
||||
if (
|
||||
@@ -225,7 +469,7 @@ class Linker:
|
||||
and manifest.nodes[node_id].resource_type != NodeType.Test
|
||||
):
|
||||
# Get *everything* upstream of the node
|
||||
all_upstream_nodes = nx.traversal.bfs_tree(self.graph, node_id, reverse=True)
|
||||
all_upstream_nodes = nx.traversal.bfs_tree(linker.graph, node_id, reverse=True)
|
||||
# Get the set of upstream nodes not including the current node.
|
||||
upstream_nodes = set([n for n in all_upstream_nodes if n != node_id])
|
||||
|
||||
@@ -247,356 +491,50 @@ class Linker:
|
||||
# is a subset of all upstream nodes of the current node,
|
||||
# add an edge from the upstream test to the current node.
|
||||
if test_depends_on.issubset(upstream_nodes):
|
||||
self.graph.add_edge(upstream_test, node_id, edge_type="parent_test")
|
||||
linker.graph.add_edge(upstream_test, node_id)
|
||||
|
||||
def get_graph(self, manifest: Manifest) -> Graph:
|
||||
self.link_graph(manifest)
|
||||
return Graph(self.graph)
|
||||
|
||||
def get_graph_summary(self, manifest: Manifest) -> Dict[int, Dict[str, Any]]:
|
||||
"""Create a smaller summary of the graph, suitable for basic diagnostics
|
||||
and performance tuning. The summary includes only the edge structure,
|
||||
node types, and node names. Each of the n nodes is assigned an integer
|
||||
index 0, 1, 2,..., n-1 for compactness"""
|
||||
graph_nodes = dict()
|
||||
index_dict = dict()
|
||||
for node_index, node_name in enumerate(self.graph):
|
||||
index_dict[node_name] = node_index
|
||||
data = manifest.expect(node_name).to_dict(omit_none=True)
|
||||
graph_nodes[node_index] = {"name": node_name, "type": data["resource_type"]}
|
||||
|
||||
for node_index, node in graph_nodes.items():
|
||||
successors = [index_dict[n] for n in self.graph.successors(node["name"])]
|
||||
if successors:
|
||||
node["succ"] = [index_dict[n] for n in self.graph.successors(node["name"])]
|
||||
|
||||
return graph_nodes
|
||||
|
||||
|
||||
class Compiler:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def initialize(self):
|
||||
make_directory(self.config.project_target_path)
|
||||
make_directory(self.config.packages_install_path)
|
||||
|
||||
# creates a ModelContext which is converted to
|
||||
# a dict for jinja rendering of SQL
|
||||
def _create_node_context(
|
||||
self,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
context = generate_runtime_model_context(node, self.config, manifest)
|
||||
context.update(extra_context)
|
||||
|
||||
if isinstance(node, GenericTestNode):
|
||||
# for test nodes, add a special keyword args value to the context
|
||||
jinja.add_rendered_test_kwargs(context, node)
|
||||
|
||||
return context
|
||||
|
||||
def add_ephemeral_prefix(self, name: str):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
return relation_cls.add_ephemeral_prefix(name)
|
||||
|
||||
def _recursively_prepend_ctes(
|
||||
self,
|
||||
model: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]],
|
||||
) -> Tuple[ManifestSQLNode, List[InjectedCTE]]:
|
||||
"""This method is called by the 'compile_node' method. Starting
|
||||
from the node that it is passed in, it will recursively call
|
||||
itself using the 'extra_ctes'. The 'ephemeral' models do
|
||||
not produce SQL that is executed directly, instead they
|
||||
are rolled up into the models that refer to them by
|
||||
inserting CTEs into the SQL.
|
||||
"""
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
# Just to make it plain that nothing is actually injected for this case
|
||||
if len(model.extra_ctes) == 0:
|
||||
# SeedNodes don't have compilation attributes
|
||||
if not isinstance(model, SeedNode):
|
||||
model.extra_ctes_injected = True
|
||||
return (model, [])
|
||||
|
||||
# This stores the ctes which will all be recursively
|
||||
# gathered and then "injected" into the model.
|
||||
prepended_ctes: List[InjectedCTE] = []
|
||||
|
||||
# extra_ctes are added to the model by
|
||||
# RuntimeRefResolver.create_relation, which adds an
|
||||
# extra_cte for every model relation which is an
|
||||
# ephemeral model. InjectedCTEs have a unique_id and sql.
|
||||
# extra_ctes start out with sql set to None, and the sql is set in this loop.
|
||||
for cte in model.extra_ctes:
|
||||
if cte.id not in manifest.nodes:
|
||||
raise DbtInternalError(
|
||||
f"During compilation, found a cte reference that "
|
||||
f"could not be resolved: {cte.id}"
|
||||
)
|
||||
cte_model = manifest.nodes[cte.id]
|
||||
assert not isinstance(cte_model, SeedNode)
|
||||
|
||||
if not cte_model.is_ephemeral_model:
|
||||
raise DbtInternalError(f"{cte.id} is not ephemeral")
|
||||
|
||||
# This model has already been compiled and extra_ctes_injected, so it's been
|
||||
# through here before. We already checked above for extra_ctes_injected, but
|
||||
# checking again because updates maybe have happened in another thread.
|
||||
if cte_model.compiled is True and cte_model.extra_ctes_injected is True:
|
||||
new_prepended_ctes = cte_model.extra_ctes
|
||||
|
||||
# if the cte_model isn't compiled, i.e. first time here
|
||||
else:
|
||||
# This is an ephemeral parsed model that we can compile.
|
||||
# Render the raw_code and set compiled to True
|
||||
cte_model = self._compile_code(cte_model, manifest, extra_context)
|
||||
# recursively call this method, sets extra_ctes_injected to True
|
||||
cte_model, new_prepended_ctes = self._recursively_prepend_ctes(
|
||||
cte_model, manifest, extra_context
|
||||
)
|
||||
# Write compiled SQL file
|
||||
self._write_node(cte_model)
|
||||
|
||||
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
|
||||
|
||||
new_cte_name = self.add_ephemeral_prefix(cte_model.name)
|
||||
rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code
|
||||
sql = f" {new_cte_name} as (\n{rendered_sql}\n)"
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
|
||||
# Sets compiled_code and compiled flag in the ManifestSQLNode passed in,
|
||||
# creates a "context" dictionary for jinja rendering,
|
||||
# and then renders the "compiled_code" using the node, the
|
||||
# raw_code and the context.
|
||||
def _compile_code(
|
||||
self,
|
||||
node: ManifestSQLNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
) -> ManifestSQLNode:
|
||||
if extra_context is None:
|
||||
extra_context = {}
|
||||
|
||||
if node.language == ModelLanguage.python:
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
|
||||
postfix = jinja.get_rendered(
|
||||
"{{ py_script_postfix(model) }}",
|
||||
context,
|
||||
node,
|
||||
)
|
||||
# we should NOT jinja render the python model's 'raw code'
|
||||
node.compiled_code = f"{node.raw_code}\n\n{postfix}"
|
||||
|
||||
else:
|
||||
context = self._create_node_context(node, manifest, extra_context)
|
||||
node.compiled_code = jinja.get_rendered(
|
||||
node.raw_code,
|
||||
context,
|
||||
node,
|
||||
)
|
||||
|
||||
node.compiled = True
|
||||
|
||||
# relation_name is set at parse time, except for tests without store_failures,
|
||||
# but cli param can turn on store_failures, so we set here.
|
||||
if (
|
||||
node.resource_type == NodeType.Test
|
||||
and node.relation_name is None
|
||||
and node.is_relational
|
||||
):
|
||||
adapter = get_adapter(self.config)
|
||||
relation_cls = adapter.Relation
|
||||
relation_name = str(relation_cls.create_from(self.config, node))
|
||||
node.relation_name = relation_name
|
||||
|
||||
return node
|
||||
|
||||
# This method doesn't actually "compile" any of the nodes. That is done by the
|
||||
# "compile_node" method. This creates a Linker and builds the networkx graph,
|
||||
# writes out the graph.gpickle file, and prints the stats, returning a Graph object.
|
||||
def compile(self, manifest: Manifest, write=True, add_test_edges=False) -> Graph:
|
||||
self.initialize()
|
||||
linker = Linker()
|
||||
linker.link_graph(manifest)
|
||||
|
||||
# Create a file containing basic information about graph structure,
|
||||
# supporting diagnostics and performance analysis.
|
||||
summaries: Dict = dict()
|
||||
summaries["_invocation_id"] = get_invocation_id()
|
||||
summaries["linked"] = linker.get_graph_summary(manifest)
|
||||
|
||||
if add_test_edges:
|
||||
manifest.build_parent_and_child_maps()
|
||||
linker.add_test_edges(manifest)
|
||||
|
||||
# Create another diagnostic summary, just as above, but this time
|
||||
# including the test edges.
|
||||
summaries["with_test_edges"] = linker.get_graph_summary(manifest)
|
||||
|
||||
with open(
|
||||
os.path.join(self.config.project_target_path, "graph_summary.json"), "w"
|
||||
) as out_stream:
|
||||
try:
|
||||
out_stream.write(json.dumps(summaries))
|
||||
except Exception as e: # This is non-essential information, so merely note failures.
|
||||
fire_event(
|
||||
Note(
|
||||
msg=f"An error was encountered writing the graph summary information: {e}"
|
||||
)
|
||||
)
|
||||
self.link_graph(linker, manifest, add_test_edges)
|
||||
|
||||
stats = _generate_stats(manifest)
|
||||
|
||||
if write:
|
||||
self.write_graph_file(linker, manifest)
|
||||
|
||||
# Do not print these for ListTask's
|
||||
if not (
|
||||
self.config.args.__class__ == argparse.Namespace
|
||||
and self.config.args.cls == list_task.ListTask
|
||||
):
|
||||
stats = _generate_stats(manifest)
|
||||
print_compile_stats(stats)
|
||||
print_compile_stats(stats)
|
||||
|
||||
return Graph(linker.graph)
|
||||
|
||||
def write_graph_file(self, linker: Linker, manifest: Manifest):
|
||||
filename = graph_file_name
|
||||
graph_path = os.path.join(self.config.project_target_path, filename)
|
||||
flags = get_flags()
|
||||
if flags.WRITE_JSON:
|
||||
linker.write_graph(graph_path, manifest)
|
||||
|
||||
# writes the "compiled_code" into the target/compiled directory
|
||||
def _write_node(self, node: ManifestSQLNode) -> ManifestSQLNode:
|
||||
if not node.extra_ctes_injected or node.resource_type in (
|
||||
NodeType.Snapshot,
|
||||
NodeType.Seed,
|
||||
):
|
||||
def _write_node(self, node: NonSourceCompiledNode) -> ManifestNode:
|
||||
if not node.extra_ctes_injected or node.resource_type == NodeType.Snapshot:
|
||||
return node
|
||||
fire_event(WritingInjectedSQLForNode(node_info=get_node_info()))
|
||||
fire_event(WritingInjectedSQLForNode(unique_id=node.unique_id))
|
||||
|
||||
if node.compiled_code:
|
||||
node.compiled_path = node.get_target_write_path(self.config.target_path, "compiled")
|
||||
node.write_node(self.config.project_root, node.compiled_path, node.compiled_code)
|
||||
node.compiled_path = node.write_node(
|
||||
self.config.target_path, "compiled", node.compiled_code
|
||||
)
|
||||
return node
|
||||
|
||||
def compile_node(
|
||||
self,
|
||||
node: ManifestSQLNode,
|
||||
node: ManifestNode,
|
||||
manifest: Manifest,
|
||||
extra_context: Optional[Dict[str, Any]] = None,
|
||||
write: bool = True,
|
||||
) -> ManifestSQLNode:
|
||||
) -> NonSourceCompiledNode:
|
||||
"""This is the main entry point into this code. It's called by
|
||||
CompileRunner.compile, GenericRPCRunner.compile, and
|
||||
RunTask.get_hook_sql. It calls '_compile_code' to render
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
RunTask.get_hook_sql. It calls '_compile_node' to convert
|
||||
the node into a compiled node, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
node = self._compile_node(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
if write:
|
||||
self._write_node(node)
|
||||
return node
|
||||
|
||||
|
||||
def inject_ctes_into_sql(sql: str, ctes: List[InjectedCTE]) -> str:
|
||||
"""
|
||||
`ctes` is a list of InjectedCTEs like:
|
||||
|
||||
[
|
||||
InjectedCTE(
|
||||
id="cte_id_1",
|
||||
sql="__dbt__cte__ephemeral as (select * from table)",
|
||||
),
|
||||
InjectedCTE(
|
||||
id="cte_id_2",
|
||||
sql="__dbt__cte__events as (select id, type from events)",
|
||||
),
|
||||
]
|
||||
|
||||
Given `sql` like:
|
||||
|
||||
"with internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
This will spit out:
|
||||
|
||||
"with __dbt__cte__ephemeral as (select * from table),
|
||||
__dbt__cte__events as (select id, type from events),
|
||||
internal_cte as (select * from sessions)
|
||||
select * from internal_cte"
|
||||
|
||||
(Whitespace enhanced for readability.)
|
||||
"""
|
||||
if len(ctes) == 0:
|
||||
return sql
|
||||
|
||||
parsed_stmts = sqlparse.parse(sql)
|
||||
parsed = parsed_stmts[0]
|
||||
|
||||
with_stmt = None
|
||||
for token in parsed.tokens:
|
||||
if token.is_keyword and token.normalized == "WITH":
|
||||
with_stmt = token
|
||||
elif token.is_keyword and token.normalized == "RECURSIVE" and with_stmt is not None:
|
||||
with_stmt = token
|
||||
break
|
||||
elif not token.is_whitespace and with_stmt is not None:
|
||||
break
|
||||
|
||||
if with_stmt is None:
|
||||
# no with stmt, add one, and inject CTEs right at the beginning
|
||||
# [original_sql]
|
||||
first_token = parsed.token_first()
|
||||
with_token = sqlparse.sql.Token(sqlparse.tokens.Keyword, "with")
|
||||
parsed.insert_before(first_token, with_token)
|
||||
# [with][original_sql]
|
||||
injected_ctes = ", ".join(c.sql for c in ctes) + " "
|
||||
injected_ctes_token = sqlparse.sql.Token(sqlparse.tokens.Keyword, injected_ctes)
|
||||
parsed.insert_after(with_token, injected_ctes_token)
|
||||
# [with][joined_ctes][original_sql]
|
||||
else:
|
||||
# with stmt exists so we don't need to add one, but we do need to add a comma
|
||||
# between the injected ctes and the original sql
|
||||
# [with][original_sql]
|
||||
injected_ctes = ", ".join(c.sql for c in ctes)
|
||||
injected_ctes_token = sqlparse.sql.Token(sqlparse.tokens.Keyword, injected_ctes)
|
||||
parsed.insert_after(with_stmt, injected_ctes_token)
|
||||
# [with][joined_ctes][original_sql]
|
||||
comma_token = sqlparse.sql.Token(sqlparse.tokens.Punctuation, ", ")
|
||||
parsed.insert_after(injected_ctes_token, comma_token)
|
||||
# [with][joined_ctes][, ][original_sql]
|
||||
|
||||
return str(parsed)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# all these are just exports, they need "noqa" so flake8 will not complain.
|
||||
from .profile import Profile, read_user_config # noqa
|
||||
from .project import Project, IsFQNResource, PartialProject # noqa
|
||||
from .runtime import RuntimeConfig # noqa
|
||||
from .project import Project, IsFQNResource # noqa
|
||||
from .runtime import RuntimeConfig, UnsetProfileConfig # noqa
|
||||
|
||||
@@ -4,19 +4,17 @@ import os
|
||||
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt import flags
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import Credentials, HasCredentials
|
||||
from dbt.contracts.project import ProfileConfig, UserConfig
|
||||
from dbt.exceptions import (
|
||||
CompilationError,
|
||||
DbtProfileError,
|
||||
DbtProjectError,
|
||||
DbtValidationError,
|
||||
DbtRuntimeError,
|
||||
ProfileConfigError,
|
||||
)
|
||||
from dbt.exceptions import CompilationException
|
||||
from dbt.exceptions import DbtProfileError
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import ValidationException
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.events.types import MissingProfileTarget
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.utils import coerce_dict_str
|
||||
@@ -32,6 +30,22 @@ dbt encountered an error while trying to read your profiles.yml file.
|
||||
"""
|
||||
|
||||
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=flags.DEFAULT_PROFILES_DIR
|
||||
)
|
||||
|
||||
|
||||
def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
path = os.path.join(profiles_dir, "profiles.yml")
|
||||
|
||||
@@ -44,9 +58,9 @@ def read_profile(profiles_dir: str) -> Dict[str, Any]:
|
||||
msg = f"The profiles.yml file at {path} is empty"
|
||||
raise DbtProfileError(INVALID_PROFILE_MESSAGE.format(error_string=msg))
|
||||
return yaml_content
|
||||
except DbtValidationError as e:
|
||||
except ValidationException as e:
|
||||
msg = INVALID_PROFILE_MESSAGE.format(error_string=e)
|
||||
raise DbtValidationError(msg) from e
|
||||
raise ValidationException(msg) from e
|
||||
|
||||
return {}
|
||||
|
||||
@@ -59,7 +73,7 @@ def read_user_config(directory: str) -> UserConfig:
|
||||
if user_config is not None:
|
||||
UserConfig.validate(user_config)
|
||||
return UserConfig.from_dict(user_config)
|
||||
except (DbtRuntimeError, ValidationError):
|
||||
except (RuntimeException, ValidationError):
|
||||
pass
|
||||
return UserConfig()
|
||||
|
||||
@@ -142,7 +156,7 @@ class Profile(HasCredentials):
|
||||
dct = self.to_profile_info(serialize_credentials=True)
|
||||
ProfileConfig.validate(dct)
|
||||
except ValidationError as exc:
|
||||
raise ProfileConfigError(exc) from exc
|
||||
raise DbtProfileError(validator_error_message(exc)) from exc
|
||||
|
||||
@staticmethod
|
||||
def _credentials_from_profile(
|
||||
@@ -166,8 +180,8 @@ class Profile(HasCredentials):
|
||||
data = cls.translate_aliases(profile)
|
||||
cls.validate(data)
|
||||
credentials = cls.from_dict(data)
|
||||
except (DbtRuntimeError, ValidationError) as e:
|
||||
msg = str(e) if isinstance(e, DbtRuntimeError) else e.message
|
||||
except (RuntimeException, ValidationError) as e:
|
||||
msg = str(e) if isinstance(e, RuntimeException) else e.message
|
||||
raise DbtProfileError(
|
||||
'Credentials in profile "{}", target "{}" invalid: {}'.format(
|
||||
profile_name, target_name, msg
|
||||
@@ -181,33 +195,10 @@ class Profile(HasCredentials):
|
||||
args_profile_name: Optional[str],
|
||||
project_profile_name: Optional[str] = None,
|
||||
) -> str:
|
||||
# TODO: Duplicating this method as direct copy of the implementation in dbt.cli.resolvers
|
||||
# dbt.cli.resolvers implementation can't be used because it causes a circular dependency.
|
||||
# This should be removed and use a safe default access on the Flags module when
|
||||
# https://github.com/dbt-labs/dbt-core/issues/6259 is closed.
|
||||
def default_profiles_dir():
|
||||
from pathlib import Path
|
||||
|
||||
return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
|
||||
|
||||
profile_name = project_profile_name
|
||||
if args_profile_name is not None:
|
||||
profile_name = args_profile_name
|
||||
if profile_name is None:
|
||||
NO_SUPPLIED_PROFILE_ERROR = """\
|
||||
dbt cannot run because no profile was specified for this dbt project.
|
||||
To specify a profile for this project, add a line like the this to
|
||||
your dbt_project.yml file:
|
||||
|
||||
profile: [profile name]
|
||||
|
||||
Here, [profile name] should be replaced with a profile name
|
||||
defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
{profiles_file}/profiles.yml
|
||||
""".format(
|
||||
profiles_file=default_profiles_dir()
|
||||
)
|
||||
raise DbtProjectError(NO_SUPPLIED_PROFILE_ERROR)
|
||||
return profile_name
|
||||
|
||||
@@ -306,7 +297,7 @@ defined in your profiles.yml file. You can find profiles.yml here:
|
||||
|
||||
try:
|
||||
profile_data = renderer.render_data(raw_profile_data)
|
||||
except CompilationError as exc:
|
||||
except CompilationException as exc:
|
||||
raise DbtProfileError(str(exc)) from exc
|
||||
return target_name, profile_data
|
||||
|
||||
@@ -408,13 +399,11 @@ defined in your profiles.yml file. You can find profiles.yml here:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def render(
|
||||
def render_from_args(
|
||||
cls,
|
||||
args: Any,
|
||||
renderer: ProfileRenderer,
|
||||
project_profile_name: Optional[str],
|
||||
profile_name_override: Optional[str] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> "Profile":
|
||||
"""Given the raw profiles as read from disk and the name of the desired
|
||||
profile if specified, return the profile component of the runtime
|
||||
@@ -430,9 +419,10 @@ defined in your profiles.yml file. You can find profiles.yml here:
|
||||
target could not be found.
|
||||
:returns Profile: The new Profile object.
|
||||
"""
|
||||
flags = get_flags()
|
||||
threads_override = getattr(args, "threads", None)
|
||||
target_override = getattr(args, "target", None)
|
||||
raw_profiles = read_profile(flags.PROFILES_DIR)
|
||||
profile_name = cls.pick_profile_name(profile_name_override, project_profile_name)
|
||||
profile_name = cls.pick_profile_name(getattr(args, "profile", None), project_profile_name)
|
||||
return cls.from_raw_profiles(
|
||||
raw_profiles=raw_profiles,
|
||||
profile_name=profile_name,
|
||||
|
||||
@@ -12,35 +12,34 @@ from typing import (
|
||||
)
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt import deprecations
|
||||
from dbt.constants import DEPENDENCIES_FILE_NAME, PACKAGES_FILE_NAME
|
||||
from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents
|
||||
from dbt import flags, deprecations
|
||||
from dbt.clients.system import resolve_path_from_base
|
||||
from dbt.clients.system import path_exists
|
||||
from dbt.clients.system import load_file_contents
|
||||
from dbt.clients.yaml_helper import load_yaml_text
|
||||
from dbt.contracts.connection import QueryComment
|
||||
from dbt.exceptions import (
|
||||
DbtProjectError,
|
||||
SemverError,
|
||||
ProjectContractBrokenError,
|
||||
ProjectContractError,
|
||||
DbtRuntimeError,
|
||||
)
|
||||
from dbt.exceptions import DbtProjectError
|
||||
from dbt.exceptions import SemverException
|
||||
from dbt.exceptions import validator_error_message
|
||||
from dbt.exceptions import RuntimeException
|
||||
from dbt.graph import SelectionSpec
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.semver import VersionSpecifier, versions_compatible
|
||||
from dbt.semver import VersionSpecifier
|
||||
from dbt.semver import versions_compatible
|
||||
from dbt.version import get_installed_version
|
||||
from dbt.utils import MultiDict, md5
|
||||
from dbt.utils import MultiDict
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.config.selectors import SelectorDict
|
||||
from dbt.contracts.project import (
|
||||
Project as ProjectContract,
|
||||
SemverString,
|
||||
)
|
||||
from dbt.contracts.project import PackageConfig, ProjectPackageMetadata
|
||||
from dbt.contracts.project import PackageConfig
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from .renderer import DbtProjectYamlRenderer, PackageRenderer
|
||||
from .renderer import DbtProjectYamlRenderer
|
||||
from .selectors import (
|
||||
selector_config_from_data,
|
||||
selector_data_from_root,
|
||||
@@ -76,11 +75,6 @@ Validator Error:
|
||||
{error}
|
||||
"""
|
||||
|
||||
MISSING_DBT_PROJECT_ERROR = """\
|
||||
No dbt_project.yml found at expected path {path}
|
||||
Verify that each entry within packages.yml (and their transitive dependencies) contains a file named dbt_project.yml
|
||||
"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class IsFQNResource(Protocol):
|
||||
@@ -94,36 +88,17 @@ def _load_yaml(path):
|
||||
return load_yaml_text(contents)
|
||||
|
||||
|
||||
def package_and_project_data_from_root(project_root):
|
||||
package_filepath = resolve_path_from_base(PACKAGES_FILE_NAME, project_root)
|
||||
dependencies_filepath = resolve_path_from_base(DEPENDENCIES_FILE_NAME, project_root)
|
||||
def package_data_from_root(project_root):
|
||||
package_filepath = resolve_path_from_base("packages.yml", project_root)
|
||||
|
||||
packages_yml_dict = {}
|
||||
dependencies_yml_dict = {}
|
||||
if path_exists(package_filepath):
|
||||
packages_yml_dict = _load_yaml(package_filepath) or {}
|
||||
if path_exists(dependencies_filepath):
|
||||
dependencies_yml_dict = _load_yaml(dependencies_filepath) or {}
|
||||
|
||||
if "packages" in packages_yml_dict and "packages" in dependencies_yml_dict:
|
||||
msg = "The 'packages' key cannot be specified in both packages.yml and dependencies.yml"
|
||||
raise DbtProjectError(msg)
|
||||
if "projects" in packages_yml_dict:
|
||||
msg = "The 'projects' key cannot be specified in packages.yml"
|
||||
raise DbtProjectError(msg)
|
||||
|
||||
packages_specified_path = PACKAGES_FILE_NAME
|
||||
packages_dict = {}
|
||||
if "packages" in dependencies_yml_dict:
|
||||
packages_dict["packages"] = dependencies_yml_dict["packages"]
|
||||
packages_specified_path = DEPENDENCIES_FILE_NAME
|
||||
else: # don't check for "packages" here so we capture invalid keys in packages.yml
|
||||
packages_dict = packages_yml_dict
|
||||
|
||||
return packages_dict, packages_specified_path
|
||||
packages_dict = _load_yaml(package_filepath)
|
||||
else:
|
||||
packages_dict = None
|
||||
return packages_dict
|
||||
|
||||
|
||||
def package_config_from_data(packages_data: Dict[str, Any]) -> PackageConfig:
|
||||
def package_config_from_data(packages_data: Dict[str, Any]):
|
||||
if not packages_data:
|
||||
packages_data = {"packages": []}
|
||||
|
||||
@@ -157,10 +132,11 @@ def _all_source_paths(
|
||||
analysis_paths: List[str],
|
||||
macro_paths: List[str],
|
||||
) -> List[str]:
|
||||
paths = chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)
|
||||
# Strip trailing slashes since the path is the same even though the name is not
|
||||
stripped_paths = map(lambda s: s.rstrip("/"), paths)
|
||||
return list(set(stripped_paths))
|
||||
# We need to turn a list of lists into just a list, then convert to a set to
|
||||
# get only unique elements, then back to a list
|
||||
return list(
|
||||
set(list(chain(model_paths, seed_paths, snapshot_paths, analysis_paths, macro_paths)))
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
@@ -180,14 +156,16 @@ def value_or(value: Optional[T], default: T) -> T:
|
||||
return value
|
||||
|
||||
|
||||
def load_raw_project(project_root: str) -> Dict[str, Any]:
|
||||
def _raw_project_from(project_root: str) -> Dict[str, Any]:
|
||||
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_yaml_filepath = os.path.join(project_root, "dbt_project.yml")
|
||||
|
||||
# get the project.yml contents
|
||||
if not path_exists(project_yaml_filepath):
|
||||
raise DbtProjectError(MISSING_DBT_PROJECT_ERROR.format(path=project_yaml_filepath))
|
||||
raise DbtProjectError(
|
||||
"no dbt_project.yml found at expected path {}".format(project_yaml_filepath)
|
||||
)
|
||||
|
||||
project_dict = _load_yaml(project_yaml_filepath)
|
||||
|
||||
@@ -241,7 +219,7 @@ def _get_required_version(
|
||||
|
||||
try:
|
||||
dbt_version = _parse_versions(dbt_raw_version)
|
||||
except SemverError as e:
|
||||
except SemverException as e:
|
||||
raise DbtProjectError(str(e)) from e
|
||||
|
||||
if verify_version:
|
||||
@@ -264,14 +242,13 @@ class RenderComponents:
|
||||
|
||||
@dataclass
|
||||
class PartialProject(RenderComponents):
|
||||
# This class includes the project_dict, packages_dict, selectors_dict, etc from RenderComponents
|
||||
profile_name: Optional[str] = field(
|
||||
metadata=dict(description="The unrendered profile name in the project, if set")
|
||||
)
|
||||
project_name: Optional[str] = field(
|
||||
metadata=dict(
|
||||
description=(
|
||||
"The name of the project. This should always be set and will not be rendered"
|
||||
"The name of the project. This should always be set and will not " "be rendered"
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -281,9 +258,6 @@ class PartialProject(RenderComponents):
|
||||
verify_version: bool = field(
|
||||
metadata=dict(description=("If True, verify the dbt version matches the required version"))
|
||||
)
|
||||
packages_specified_path: str = field(
|
||||
metadata=dict(description="The filename where packages were specified")
|
||||
)
|
||||
|
||||
def render_profile_name(self, renderer) -> Optional[str]:
|
||||
if self.profile_name is None:
|
||||
@@ -296,9 +270,7 @@ class PartialProject(RenderComponents):
|
||||
) -> RenderComponents:
|
||||
|
||||
rendered_project = renderer.render_project(self.project_dict, self.project_root)
|
||||
rendered_packages = renderer.render_packages(
|
||||
self.packages_dict, self.packages_specified_path
|
||||
)
|
||||
rendered_packages = renderer.render_packages(self.packages_dict)
|
||||
rendered_selectors = renderer.render_selectors(self.selectors_dict)
|
||||
|
||||
return RenderComponents(
|
||||
@@ -307,7 +279,7 @@ class PartialProject(RenderComponents):
|
||||
selectors_dict=rendered_selectors,
|
||||
)
|
||||
|
||||
# Called by Project.from_project_root (not PartialProject.from_project_root!)
|
||||
# Called by 'collect_parts' in RuntimeConfig
|
||||
def render(self, renderer: DbtProjectYamlRenderer) -> "Project":
|
||||
try:
|
||||
rendered = self.get_rendered(renderer)
|
||||
@@ -317,34 +289,23 @@ class PartialProject(RenderComponents):
|
||||
exc.path = os.path.join(self.project_root, "dbt_project.yml")
|
||||
raise
|
||||
|
||||
def render_package_metadata(self, renderer: PackageRenderer) -> ProjectPackageMetadata:
|
||||
packages_data = renderer.render_data(self.packages_dict)
|
||||
packages_config = package_config_from_data(packages_data)
|
||||
if not self.project_name:
|
||||
raise DbtProjectError("Package dbt_project.yml must have a name!")
|
||||
return ProjectPackageMetadata(self.project_name, packages_config.packages)
|
||||
|
||||
def check_config_path(
|
||||
self, project_dict, deprecated_path, expected_path=None, default_value=None
|
||||
):
|
||||
def check_config_path(self, project_dict, deprecated_path, exp_path):
|
||||
if deprecated_path in project_dict:
|
||||
if expected_path in project_dict:
|
||||
if exp_path in project_dict:
|
||||
msg = (
|
||||
"{deprecated_path} and {expected_path} cannot both be defined. The "
|
||||
"`{deprecated_path}` config has been deprecated in favor of `{expected_path}`. "
|
||||
"{deprecated_path} and {exp_path} cannot both be defined. The "
|
||||
"`{deprecated_path}` config has been deprecated in favor of `{exp_path}`. "
|
||||
"Please update your `dbt_project.yml` configuration to reflect this "
|
||||
"change."
|
||||
)
|
||||
raise DbtProjectError(
|
||||
msg.format(deprecated_path=deprecated_path, expected_path=expected_path)
|
||||
msg.format(deprecated_path=deprecated_path, exp_path=exp_path)
|
||||
)
|
||||
# this field is no longer supported, but many projects may specify it with the default value
|
||||
# if so, let's only raise this deprecation warning if they set a custom value
|
||||
if not default_value or project_dict[deprecated_path] != default_value:
|
||||
kwargs = {"deprecated_path": deprecated_path}
|
||||
if expected_path:
|
||||
kwargs.update({"exp_path": expected_path})
|
||||
deprecations.warn(f"project-config-{deprecated_path}", **kwargs)
|
||||
deprecations.warn(
|
||||
f"project-config-{deprecated_path}",
|
||||
deprecated_path=deprecated_path,
|
||||
exp_path=exp_path,
|
||||
)
|
||||
|
||||
def create_project(self, rendered: RenderComponents) -> "Project":
|
||||
unrendered = RenderComponents(
|
||||
@@ -359,14 +320,12 @@ class PartialProject(RenderComponents):
|
||||
|
||||
self.check_config_path(rendered.project_dict, "source-paths", "model-paths")
|
||||
self.check_config_path(rendered.project_dict, "data-paths", "seed-paths")
|
||||
self.check_config_path(rendered.project_dict, "log-path", default_value="logs")
|
||||
self.check_config_path(rendered.project_dict, "target-path", default_value="target")
|
||||
|
||||
try:
|
||||
ProjectContract.validate(rendered.project_dict)
|
||||
cfg = ProjectContract.from_dict(rendered.project_dict)
|
||||
except ValidationError as e:
|
||||
raise ProjectContractError(e) from e
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
# name/version are required in the Project definition, so we can assume
|
||||
# they are present
|
||||
name = cfg.name
|
||||
@@ -404,13 +363,9 @@ class PartialProject(RenderComponents):
|
||||
|
||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||
flags = get_flags()
|
||||
|
||||
flag_target_path = str(flags.TARGET_PATH) if flags.TARGET_PATH else None
|
||||
target_path: str = flag_or(flag_target_path, cfg.target_path, "target")
|
||||
log_path: str = str(flags.LOG_PATH)
|
||||
|
||||
target_path: str = flag_or(flags.TARGET_PATH, cfg.target_path, "target")
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = flag_or(flags.LOG_PATH, cfg.log_path, "logs")
|
||||
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
@@ -450,7 +405,7 @@ class PartialProject(RenderComponents):
|
||||
|
||||
query_comment = _query_comment_from_cfg(cfg.query_comment)
|
||||
|
||||
packages: PackageConfig = package_config_from_data(rendered.packages_dict)
|
||||
packages = package_config_from_data(rendered.packages_dict)
|
||||
selectors = selector_config_from_data(rendered.selectors_dict)
|
||||
manifest_selectors: Dict[str, Any] = {}
|
||||
if rendered.selectors_dict and rendered.selectors_dict["selectors"]:
|
||||
@@ -476,7 +431,6 @@ class PartialProject(RenderComponents):
|
||||
clean_targets=clean_targets,
|
||||
log_path=log_path,
|
||||
packages_install_path=packages_install_path,
|
||||
packages_specified_path=self.packages_specified_path,
|
||||
quoting=quoting,
|
||||
models=models,
|
||||
on_run_start=on_run_start,
|
||||
@@ -497,7 +451,6 @@ class PartialProject(RenderComponents):
|
||||
config_version=cfg.config_version,
|
||||
unrendered=unrendered,
|
||||
project_env_vars=project_env_vars,
|
||||
restrict_access=cfg.restrict_access,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
@@ -512,13 +465,11 @@ class PartialProject(RenderComponents):
|
||||
selectors_dict: Dict[str, Any],
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
packages_specified_path: str = PACKAGES_FILE_NAME,
|
||||
):
|
||||
"""Construct a partial project from its constituent dicts."""
|
||||
project_name = project_dict.get("name")
|
||||
profile_name = project_dict.get("profile")
|
||||
|
||||
# Create a PartialProject
|
||||
return cls(
|
||||
profile_name=profile_name,
|
||||
project_name=project_name,
|
||||
@@ -527,7 +478,6 @@ class PartialProject(RenderComponents):
|
||||
packages_dict=packages_dict,
|
||||
selectors_dict=selectors_dict,
|
||||
verify_version=verify_version,
|
||||
packages_specified_path=packages_specified_path,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -535,11 +485,15 @@ class PartialProject(RenderComponents):
|
||||
cls, project_root: str, *, verify_version: bool = False
|
||||
) -> "PartialProject":
|
||||
project_root = os.path.normpath(project_root)
|
||||
project_dict = load_raw_project(project_root)
|
||||
(
|
||||
packages_dict,
|
||||
packages_specified_path,
|
||||
) = package_and_project_data_from_root(project_root)
|
||||
project_dict = _raw_project_from(project_root)
|
||||
config_version = project_dict.get("config-version", 1)
|
||||
if config_version != 2:
|
||||
raise DbtProjectError(
|
||||
f"Invalid config version: {config_version}, expected 2",
|
||||
path=os.path.join(project_root, "dbt_project.yml"),
|
||||
)
|
||||
|
||||
packages_dict = package_data_from_root(project_root)
|
||||
selectors_dict = selector_data_from_root(project_root)
|
||||
return cls.from_dicts(
|
||||
project_root=project_root,
|
||||
@@ -547,7 +501,6 @@ class PartialProject(RenderComponents):
|
||||
selectors_dict=selectors_dict,
|
||||
packages_dict=packages_dict,
|
||||
verify_version=verify_version,
|
||||
packages_specified_path=packages_specified_path,
|
||||
)
|
||||
|
||||
|
||||
@@ -572,7 +525,7 @@ class VarProvider:
|
||||
@dataclass
|
||||
class Project:
|
||||
project_name: str
|
||||
version: Optional[Union[SemverString, float]]
|
||||
version: Union[SemverString, float]
|
||||
project_root: str
|
||||
profile_name: Optional[str]
|
||||
model_paths: List[str]
|
||||
@@ -587,7 +540,6 @@ class Project:
|
||||
clean_targets: List[str]
|
||||
log_path: str
|
||||
packages_install_path: str
|
||||
packages_specified_path: str
|
||||
quoting: Dict[str, Any]
|
||||
models: Dict[str, Any]
|
||||
on_run_start: List[str]
|
||||
@@ -601,14 +553,13 @@ class Project:
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
packages: PackageConfig
|
||||
packages: Dict[str, Any]
|
||||
manifest_selectors: Dict[str, Any]
|
||||
selectors: SelectorConfig
|
||||
query_comment: QueryComment
|
||||
config_version: int
|
||||
unrendered: RenderComponents
|
||||
project_env_vars: Dict[str, Any]
|
||||
restrict_access: bool
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
@@ -677,7 +628,6 @@ class Project:
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
"restrict-access": self.restrict_access,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
@@ -692,11 +642,15 @@ class Project:
|
||||
try:
|
||||
ProjectContract.validate(self.to_project_config())
|
||||
except ValidationError as e:
|
||||
raise ProjectContractBrokenError(e) from e
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@classmethod
|
||||
def partial_load(cls, project_root: str, *, verify_version: bool = False) -> PartialProject:
|
||||
return PartialProject.from_project_root(
|
||||
project_root,
|
||||
verify_version=verify_version,
|
||||
)
|
||||
|
||||
# Called by:
|
||||
# RtConfig.load_dependencies => RtConfig.load_projects => RtConfig.new_project => Project.from_project_root
|
||||
# RtConfig.from_args => RtConfig.collect_parts => load_project => Project.from_project_root
|
||||
@classmethod
|
||||
def from_project_root(
|
||||
cls,
|
||||
@@ -705,16 +659,16 @@ class Project:
|
||||
*,
|
||||
verify_version: bool = False,
|
||||
) -> "Project":
|
||||
partial = PartialProject.from_project_root(project_root, verify_version=verify_version)
|
||||
partial = cls.partial_load(project_root, verify_version=verify_version)
|
||||
return partial.render(renderer)
|
||||
|
||||
def hashed_name(self):
|
||||
return md5(self.project_name)
|
||||
return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
|
||||
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
raise DbtRuntimeError(
|
||||
f"Could not find selector named {name}, expected one of {list(self.selectors)}"
|
||||
raise RuntimeException(
|
||||
f"Could not find selector named {name}, expected one of " f"{list(self.selectors)}"
|
||||
)
|
||||
return self.selectors[name]["definition"]
|
||||
|
||||
@@ -734,8 +688,3 @@ class Project:
|
||||
if dispatch_entry["macro_namespace"] == macro_namespace:
|
||||
return dispatch_entry["search_order"]
|
||||
return None
|
||||
|
||||
@property
|
||||
def project_target_path(self):
|
||||
# If target_path is absolute, project_root will not be included
|
||||
return os.path.join(self.project_root, self.target_path)
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
from typing import Dict, Any, Tuple, Optional, Union, Callable
|
||||
import re
|
||||
import os
|
||||
from datetime import date
|
||||
|
||||
from dbt.clients.jinja import get_rendered, catch_jinja
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEPENDENCIES_FILE_NAME
|
||||
from dbt.constants import SECRET_ENV_PREFIX
|
||||
from dbt.context.target import TargetContext
|
||||
from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER
|
||||
from dbt.context.base import BaseContext
|
||||
from dbt.contracts.connection import HasCredentials
|
||||
from dbt.exceptions import DbtProjectError, CompilationError, RecursionError
|
||||
from dbt.exceptions import DbtProjectError, CompilationException, RecursionException
|
||||
from dbt.utils import deep_map_render
|
||||
|
||||
|
||||
@@ -34,21 +33,21 @@ class BaseRenderer:
|
||||
return self.render_value(value, keypath)
|
||||
|
||||
def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:
|
||||
# keypath is ignored (and someone who knows should explain why here)
|
||||
# keypath is ignored.
|
||||
# if it wasn't read as a string, ignore it
|
||||
if not isinstance(value, str):
|
||||
return value if not isinstance(value, date) else value.isoformat()
|
||||
|
||||
return value
|
||||
try:
|
||||
with catch_jinja():
|
||||
return get_rendered(value, self.context, native=True)
|
||||
except CompilationError as exc:
|
||||
except CompilationException as exc:
|
||||
msg = f"Could not render {value}: {exc.msg}"
|
||||
raise CompilationError(msg) from exc
|
||||
raise CompilationException(msg) from exc
|
||||
|
||||
def render_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
return deep_map_render(self.render_entry, data)
|
||||
except RecursionError:
|
||||
except RecursionException:
|
||||
raise DbtProjectError(
|
||||
f"Cycle detected: {self.name} input has a reference to itself", project=data
|
||||
)
|
||||
@@ -108,7 +107,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
if profile:
|
||||
self.ctx_obj = TargetContext(profile.to_target_dict(), cli_vars)
|
||||
self.ctx_obj = TargetContext(profile, cli_vars)
|
||||
else:
|
||||
self.ctx_obj = BaseContext(cli_vars) # type:ignore
|
||||
context = self.ctx_obj.to_dict()
|
||||
@@ -132,15 +131,10 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
rendered_project["project-root"] = project_root
|
||||
return rendered_project
|
||||
|
||||
def render_packages(self, packages: Dict[str, Any], packages_specified_path: str):
|
||||
def render_packages(self, packages: Dict[str, Any]):
|
||||
"""Render the given packages dict"""
|
||||
packages = packages or {} # Sometimes this is none in tests
|
||||
package_renderer = self.get_package_renderer()
|
||||
if packages_specified_path == DEPENDENCIES_FILE_NAME:
|
||||
# We don't want to render the "packages" dictionary that came from dependencies.yml
|
||||
return packages
|
||||
else:
|
||||
return package_renderer.render_data(packages)
|
||||
return package_renderer.render_data(packages)
|
||||
|
||||
def render_selectors(self, selectors: Dict[str, Any]):
|
||||
return self.render_data(selectors)
|
||||
@@ -165,8 +159,7 @@ class DbtProjectYamlRenderer(BaseRenderer):
|
||||
if first in {"seeds", "models", "snapshots", "tests"}:
|
||||
keypath_parts = {(k.lstrip("+ ") if isinstance(k, str) else k) for k in keypath}
|
||||
# model-level hooks
|
||||
late_rendered_hooks = {"pre-hook", "post-hook", "pre_hook", "post_hook"}
|
||||
if keypath_parts.intersection(late_rendered_hooks):
|
||||
if "pre-hook" in keypath_parts or "post-hook" in keypath_parts:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -188,17 +181,7 @@ class SecretRenderer(BaseRenderer):
|
||||
# First, standard Jinja rendering, with special handling for 'secret' environment variables
|
||||
# "{{ env_var('DBT_SECRET_ENV_VAR') }}" -> "$$$DBT_SECRET_START$$$DBT_SECRET_ENV_{VARIABLE_NAME}$$$DBT_SECRET_END$$$"
|
||||
# This prevents Jinja manipulation of secrets via macros/filters that might leak partial/modified values in logs
|
||||
|
||||
try:
|
||||
rendered = super().render_value(value, keypath)
|
||||
except Exception as ex:
|
||||
if keypath and "password" in keypath:
|
||||
# Passwords sometimes contain jinja-esque characters, but we
|
||||
# don't want to render them if they aren't valid jinja.
|
||||
rendered = value
|
||||
else:
|
||||
raise ex
|
||||
|
||||
rendered = super().render_value(value, keypath)
|
||||
# Now, detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$)
|
||||
# and replace them with the actual secret value
|
||||
if SECRET_ENV_PREFIX in str(rendered):
|
||||
|
||||
@@ -1,78 +1,44 @@
|
||||
import itertools
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from dbt.flags import get_flags
|
||||
from dbt import flags
|
||||
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
|
||||
from dbt.config.project import load_raw_project
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials, HasCredentials
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||
from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
from dbt.exceptions import (
|
||||
ConfigContractBrokenError,
|
||||
DbtProjectError,
|
||||
NonUniquePackageNameError,
|
||||
DbtRuntimeError,
|
||||
UninstalledPackagesFoundError,
|
||||
RuntimeException,
|
||||
raise_compiler_error,
|
||||
validator_error_message,
|
||||
warn_or_error,
|
||||
)
|
||||
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project
|
||||
from .project import Project, PartialProject
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
|
||||
|
||||
# Called by RuntimeConfig.collect_parts class method
|
||||
def load_project(
|
||||
project_root: str,
|
||||
version_check: bool,
|
||||
profile: HasCredentials,
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
) -> Project:
|
||||
# get the project with all of the provided information
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = Project.from_project_root(
|
||||
project_root, project_renderer, verify_version=version_check
|
||||
)
|
||||
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
|
||||
|
||||
def load_profile(
|
||||
project_root: str,
|
||||
cli_vars: Dict[str, Any],
|
||||
profile_name_override: Optional[str] = None,
|
||||
target_override: Optional[str] = None,
|
||||
threads_override: Optional[int] = None,
|
||||
) -> Profile:
|
||||
raw_project = load_raw_project(project_root)
|
||||
raw_profile_name = raw_project.get("profile")
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
profile_name = profile_renderer.render_value(raw_profile_name)
|
||||
profile = Profile.render(
|
||||
profile_renderer, profile_name, profile_name_override, target_override, threads_override
|
||||
)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
from .utils import parse_cli_vars
|
||||
|
||||
|
||||
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
|
||||
@@ -96,21 +62,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
def __post_init__(self):
|
||||
self.validate()
|
||||
|
||||
@classmethod
|
||||
def get_profile(
|
||||
cls,
|
||||
project_root: str,
|
||||
cli_vars: Dict[str, Any],
|
||||
args: Any,
|
||||
) -> Profile:
|
||||
return load_profile(
|
||||
project_root,
|
||||
cli_vars,
|
||||
args.profile,
|
||||
args.target,
|
||||
args.threads,
|
||||
)
|
||||
|
||||
# Called by 'new_project' and 'from_args'
|
||||
@classmethod
|
||||
def from_parts(
|
||||
@@ -133,7 +84,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
.replace_dict(_project_quoting_dict(project, profile))
|
||||
).to_dict(omit_none=True)
|
||||
|
||||
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
@@ -151,7 +102,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
packages_specified_path=project.packages_specified_path,
|
||||
quoting=quoting,
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
@@ -172,7 +122,6 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
project_env_vars=project.project_env_vars,
|
||||
restrict_access=project.restrict_access,
|
||||
profile_env_vars=profile.profile_env_vars,
|
||||
profile_name=profile.profile_name,
|
||||
target_name=profile.target_name,
|
||||
@@ -200,10 +149,11 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
# load the new project and its packages. Don't pass cli variables.
|
||||
renderer = DbtProjectYamlRenderer(profile)
|
||||
|
||||
project = Project.from_project_root(
|
||||
project_root,
|
||||
renderer,
|
||||
verify_version=bool(getattr(self.args, "VERSION_CHECK", True)),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
|
||||
runtime_config = self.from_parts(
|
||||
@@ -237,24 +187,68 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
try:
|
||||
Configuration.validate(self.serialize())
|
||||
except ValidationError as e:
|
||||
raise ConfigContractBrokenError(e) from e
|
||||
raise DbtProjectError(validator_error_message(e)) from e
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
return Profile.render_from_args(args, profile_renderer, profile_name)
|
||||
|
||||
# Called by RuntimeConfig.from_args
|
||||
@classmethod
|
||||
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
cli_vars: Dict[str, Any] = getattr(args, "vars", {})
|
||||
profile = cls.get_profile(
|
||||
project_root,
|
||||
cli_vars,
|
||||
args,
|
||||
)
|
||||
flags = get_flags()
|
||||
project = load_project(project_root, bool(flags.VERSION_CHECK), profile, cli_vars)
|
||||
return project, profile
|
||||
|
||||
# Called in task/base.py, in BaseTask.from_args
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
profile = cls.collect_profile(args=args)
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = cls.collect_project(args=args, project_renderer=project_renderer)
|
||||
assert type(project) is Project
|
||||
return (project, profile)
|
||||
|
||||
@classmethod
|
||||
def collect_profile(
|
||||
cls: Type["RuntimeConfig"], args: Any, profile_name: Optional[str] = None
|
||||
) -> Profile:
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
if profile_name is None:
|
||||
# Note: only the named profile section is rendered here. The rest of the
|
||||
# profile is ignored.
|
||||
partial = cls.collect_project(args)
|
||||
assert type(partial) is PartialProject
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def collect_project(
|
||||
cls: Type["RuntimeConfig"],
|
||||
args: Any,
|
||||
project_renderer: Optional[DbtProjectYamlRenderer] = None,
|
||||
) -> Union[Project, PartialProject]:
|
||||
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
||||
if project_renderer is None:
|
||||
return partial
|
||||
else:
|
||||
project = partial.render(project_renderer)
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
|
||||
# Called in main.py, lib.py, task/base.py
|
||||
@classmethod
|
||||
def from_args(cls, args: Any) -> "RuntimeConfig":
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
@@ -264,7 +258,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises DbtValidationError: If the cli variables are invalid.
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
@@ -275,11 +269,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
)
|
||||
|
||||
def get_metadata(self) -> ManifestMetadata:
|
||||
return ManifestMetadata(
|
||||
project_name=self.project_name,
|
||||
project_id=self.hashed_name(),
|
||||
adapter_type=self.credentials.type,
|
||||
)
|
||||
return ManifestMetadata(project_id=self.hashed_name(), adapter_type=self.credentials.type)
|
||||
|
||||
def _get_v2_config_paths(
|
||||
self,
|
||||
@@ -325,11 +315,11 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
def warn_for_unused_resource_config_paths(
|
||||
def get_unused_resource_config_paths(
|
||||
self,
|
||||
resource_fqns: Mapping[str, PathSet],
|
||||
disabled: PathSet,
|
||||
) -> None:
|
||||
) -> List[FQNPath]:
|
||||
"""Return a list of lists of strings, where each inner list of strings
|
||||
represents a type + FQN path of a resource configuration that is not
|
||||
used.
|
||||
@@ -343,13 +333,23 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
for config_path in config_paths:
|
||||
if not _is_config_used(config_path, fqns):
|
||||
resource_path = ".".join(i for i in ((resource_type,) + config_path))
|
||||
unused_resource_config_paths.append(resource_path)
|
||||
unused_resource_config_paths.append((resource_type,) + config_path)
|
||||
return unused_resource_config_paths
|
||||
|
||||
if len(unused_resource_config_paths) == 0:
|
||||
def warn_for_unused_resource_config_paths(
|
||||
self,
|
||||
resource_fqns: Mapping[str, PathSet],
|
||||
disabled: PathSet,
|
||||
) -> None:
|
||||
unused = self.get_unused_resource_config_paths(resource_fqns, disabled)
|
||||
if len(unused) == 0:
|
||||
return
|
||||
|
||||
warn_or_error(UnusedResourceConfigPath(unused_config_paths=unused_resource_config_paths))
|
||||
msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format(
|
||||
len(unused), "\n".join("- {}".format(".".join(u)) for u in unused)
|
||||
)
|
||||
|
||||
warn_or_error(msg, log_fmt=warning_tag("{}"))
|
||||
|
||||
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
|
||||
if self.dependencies is None:
|
||||
@@ -363,16 +363,22 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
count_packages_specified = len(self.packages.packages) # type: ignore
|
||||
count_packages_installed = len(tuple(self._get_project_directories()))
|
||||
if count_packages_specified > count_packages_installed:
|
||||
raise UninstalledPackagesFoundError(
|
||||
count_packages_specified,
|
||||
count_packages_installed,
|
||||
self.packages_specified_path,
|
||||
self.packages_install_path,
|
||||
raise_compiler_error(
|
||||
f"dbt found {count_packages_specified} package(s) "
|
||||
f"specified in packages.yml, but only "
|
||||
f"{count_packages_installed} package(s) installed "
|
||||
f'in {self.packages_install_path}. Run "dbt deps" to '
|
||||
f"install package dependencies."
|
||||
)
|
||||
project_paths = itertools.chain(internal_packages, self._get_project_directories())
|
||||
for project_name, project in self.load_projects(project_paths):
|
||||
if project_name in all_projects:
|
||||
raise NonUniquePackageNameError(project_name)
|
||||
raise_compiler_error(
|
||||
f"dbt found more than one package with the name "
|
||||
f'"{project_name}" included in this project. Package '
|
||||
f"names must be unique in a project. Please rename "
|
||||
f"one of these packages."
|
||||
)
|
||||
all_projects[project_name] = project
|
||||
self.dependencies = all_projects
|
||||
return self.dependencies
|
||||
@@ -422,8 +428,8 @@ class UnsetCredentials(Credentials):
|
||||
return ()
|
||||
|
||||
|
||||
# This is used by commands which do not require
|
||||
# a profile, i.e. dbt deps and clean
|
||||
# This is used by UnsetProfileConfig, for commands which do
|
||||
# not require a profile, i.e. dbt deps and clean
|
||||
class UnsetProfile(Profile):
|
||||
def __init__(self):
|
||||
self.credentials = UnsetCredentials()
|
||||
@@ -437,11 +443,189 @@ class UnsetProfile(Profile):
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name in {"profile_name", "target_name", "threads"}:
|
||||
raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
return Profile.__getattribute__(self, name)
|
||||
|
||||
|
||||
# This class is used by the dbt deps and clean commands, because they don't
|
||||
# require a functioning profile.
|
||||
@dataclass
|
||||
class UnsetProfileConfig(RuntimeConfig):
|
||||
"""This class acts a lot _like_ a RuntimeConfig, except if your profile is
|
||||
missing, any access to profile members results in an exception.
|
||||
"""
|
||||
|
||||
profile_name: str = field(repr=False)
|
||||
target_name: str = field(repr=False)
|
||||
|
||||
def __post_init__(self):
|
||||
# instead of futzing with InitVar overrides or rewriting __init__, just
|
||||
# `del` the attrs we don't want users touching.
|
||||
del self.profile_name
|
||||
del self.target_name
|
||||
# don't call super().__post_init__(), as that calls validate(), and
|
||||
# this object isn't very valid
|
||||
|
||||
def __getattribute__(self, name):
|
||||
# Override __getattribute__ to check that the attribute isn't 'banned'.
|
||||
if name in {"profile_name", "target_name"}:
|
||||
raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!')
|
||||
|
||||
# avoid every attribute access triggering infinite recursion
|
||||
return RuntimeConfig.__getattribute__(self, name)
|
||||
|
||||
def to_target_dict(self):
|
||||
# re-override the poisoned profile behavior
|
||||
return DictDefaultEmptyStr({})
|
||||
|
||||
def to_project_config(self, with_packages=False):
|
||||
"""Return a dict representation of the config that could be written to
|
||||
disk with `yaml.safe_dump` to get this configuration.
|
||||
|
||||
Overrides dbt.config.Project.to_project_config to omit undefined profile
|
||||
attributes.
|
||||
|
||||
:param with_packages bool: If True, include the serialized packages
|
||||
file in the root.
|
||||
:returns dict: The serialized profile.
|
||||
"""
|
||||
result = deepcopy(
|
||||
{
|
||||
"name": self.project_name,
|
||||
"version": self.version,
|
||||
"project-root": self.project_root,
|
||||
"profile": "",
|
||||
"model-paths": self.model_paths,
|
||||
"macro-paths": self.macro_paths,
|
||||
"seed-paths": self.seed_paths,
|
||||
"test-paths": self.test_paths,
|
||||
"analysis-paths": self.analysis_paths,
|
||||
"docs-paths": self.docs_paths,
|
||||
"asset-paths": self.asset_paths,
|
||||
"target-path": self.target_path,
|
||||
"snapshot-paths": self.snapshot_paths,
|
||||
"clean-targets": self.clean_targets,
|
||||
"log-path": self.log_path,
|
||||
"quoting": self.quoting,
|
||||
"models": self.models,
|
||||
"on-run-start": self.on_run_start,
|
||||
"on-run-end": self.on_run_end,
|
||||
"dispatch": self.dispatch,
|
||||
"seeds": self.seeds,
|
||||
"snapshots": self.snapshots,
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
result["query-comment"] = self.query_comment.to_dict(omit_none=True)
|
||||
|
||||
if with_packages:
|
||||
result.update(self.packages.to_dict(omit_none=True))
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def from_parts(
|
||||
cls,
|
||||
project: Project,
|
||||
profile: Profile,
|
||||
args: Any,
|
||||
dependencies: Optional[Mapping[str, "RuntimeConfig"]] = None,
|
||||
) -> "RuntimeConfig":
|
||||
"""Instantiate a RuntimeConfig from its components.
|
||||
|
||||
:param profile: Ignored.
|
||||
:param project: A parsed dbt Project.
|
||||
:param args: The parsed command-line arguments.
|
||||
:returns RuntimeConfig: The new configuration.
|
||||
"""
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
return cls(
|
||||
project_name=project.project_name,
|
||||
version=project.version,
|
||||
project_root=project.project_root,
|
||||
model_paths=project.model_paths,
|
||||
macro_paths=project.macro_paths,
|
||||
seed_paths=project.seed_paths,
|
||||
test_paths=project.test_paths,
|
||||
analysis_paths=project.analysis_paths,
|
||||
docs_paths=project.docs_paths,
|
||||
asset_paths=project.asset_paths,
|
||||
target_path=project.target_path,
|
||||
snapshot_paths=project.snapshot_paths,
|
||||
clean_targets=project.clean_targets,
|
||||
log_path=project.log_path,
|
||||
packages_install_path=project.packages_install_path,
|
||||
quoting=project.quoting, # we never use this anyway.
|
||||
models=project.models,
|
||||
on_run_start=project.on_run_start,
|
||||
on_run_end=project.on_run_end,
|
||||
dispatch=project.dispatch,
|
||||
seeds=project.seeds,
|
||||
snapshots=project.snapshots,
|
||||
dbt_version=project.dbt_version,
|
||||
packages=project.packages,
|
||||
manifest_selectors=project.manifest_selectors,
|
||||
selectors=project.selectors,
|
||||
query_comment=project.query_comment,
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
unrendered=project.unrendered,
|
||||
project_env_vars=project.project_env_vars,
|
||||
profile_env_vars=profile.profile_env_vars,
|
||||
profile_name="",
|
||||
target_name="",
|
||||
user_config=UserConfig(),
|
||||
threads=getattr(args, "threads", 1),
|
||||
credentials=UnsetCredentials(),
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_rendered_profile(
|
||||
cls,
|
||||
args: Any,
|
||||
profile_renderer: ProfileRenderer,
|
||||
profile_name: Optional[str],
|
||||
) -> Profile:
|
||||
|
||||
profile = UnsetProfile()
|
||||
# The profile (for warehouse connection) is not needed, but we want
|
||||
# to get the UserConfig, which is also in profiles.yml
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
profile.user_config = user_config
|
||||
return profile
|
||||
|
||||
@classmethod
|
||||
def from_args(cls: Type[RuntimeConfig], args: Any) -> "RuntimeConfig":
|
||||
"""Given arguments, read in dbt_project.yml from the current directory,
|
||||
read in packages.yml if it exists, and use them to find the profile to
|
||||
load.
|
||||
|
||||
:param args: The arguments as parsed from the cli.
|
||||
:raises DbtProjectError: If the project is invalid or missing.
|
||||
:raises DbtProfileError: If the profile is invalid or missing.
|
||||
:raises ValidationException: If the cli variables are invalid.
|
||||
"""
|
||||
project, profile = cls.collect_parts(args)
|
||||
|
||||
return cls.from_parts(project=project, profile=profile, args=args)
|
||||
|
||||
|
||||
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
||||
Configuration paths exist in your dbt_project.yml file which do not \
|
||||
apply to any resources.
|
||||
|
||||
@@ -12,7 +12,7 @@ from dbt.clients.system import (
|
||||
resolve_path_from_base,
|
||||
)
|
||||
from dbt.contracts.selection import SelectorFile
|
||||
from dbt.exceptions import DbtSelectorsError, DbtRuntimeError
|
||||
from dbt.exceptions import DbtSelectorsError, RuntimeException
|
||||
from dbt.graph import parse_from_selectors_definition, SelectionSpec
|
||||
from dbt.graph.selector_spec import SelectionCriteria
|
||||
|
||||
@@ -21,7 +21,7 @@ The selectors.yml file in this project is malformed. Please double check
|
||||
the contents of this file and fix any errors before retrying.
|
||||
|
||||
You can find more information on the syntax for this file here:
|
||||
https://docs.getdbt.com/reference/node-selection/yaml-selectors
|
||||
https://docs.getdbt.com/docs/package-management
|
||||
|
||||
Validator Error:
|
||||
{error}
|
||||
@@ -46,7 +46,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
f"yaml-selectors",
|
||||
result_type="invalid_selector",
|
||||
) from exc
|
||||
except DbtRuntimeError as exc:
|
||||
except RuntimeException as exc:
|
||||
raise DbtSelectorsError(
|
||||
f"Could not read selector file data: {exc}",
|
||||
result_type="invalid_selector",
|
||||
@@ -62,7 +62,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
rendered = renderer.render_data(data)
|
||||
except (ValidationError, DbtRuntimeError) as exc:
|
||||
except (ValidationError, RuntimeException) as exc:
|
||||
raise DbtSelectorsError(
|
||||
f"Could not render selector data: {exc}",
|
||||
result_type="invalid_selector",
|
||||
@@ -77,7 +77,7 @@ class SelectorConfig(Dict[str, Dict[str, Union[SelectionSpec, bool]]]):
|
||||
) -> "SelectorConfig":
|
||||
try:
|
||||
data = load_yaml_text(load_file_contents(str(path)))
|
||||
except (ValidationError, DbtRuntimeError) as exc:
|
||||
except (ValidationError, RuntimeException) as exc:
|
||||
raise DbtSelectorsError(
|
||||
f"Could not read selector file: {exc}",
|
||||
result_type="invalid_selector",
|
||||
|
||||
@@ -1,24 +1,75 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from xmlrpc.client import Boolean
|
||||
from dbt.contracts.project import UserConfig
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.clients import yaml_helper
|
||||
from dbt.config import Profile, Project, read_user_config
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import InvalidOptionYAML
|
||||
from dbt.exceptions import DbtValidationError, OptionNotYamlDictError
|
||||
from dbt.events.types import InvalidVarsYAML
|
||||
from dbt.exceptions import ValidationException, raise_compiler_error
|
||||
|
||||
|
||||
def parse_cli_vars(var_string: str) -> Dict[str, Any]:
|
||||
return parse_cli_yaml_string(var_string, "vars")
|
||||
|
||||
|
||||
def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, Any]:
|
||||
try:
|
||||
cli_vars = yaml_helper.load_yaml_text(var_string)
|
||||
var_type = type(cli_vars)
|
||||
if var_type is dict:
|
||||
return cli_vars
|
||||
else:
|
||||
raise OptionNotYamlDictError(var_type, cli_option_name)
|
||||
except (DbtValidationError, OptionNotYamlDictError):
|
||||
fire_event(InvalidOptionYAML(option_name=cli_option_name))
|
||||
type_name = var_type.__name__
|
||||
raise_compiler_error(
|
||||
"The --vars argument must be a YAML dictionary, but was "
|
||||
"of type '{}'".format(type_name)
|
||||
)
|
||||
except ValidationException:
|
||||
fire_event(InvalidVarsYAML())
|
||||
raise
|
||||
|
||||
|
||||
def get_project_config(
|
||||
project_path: str,
|
||||
profile_name: str,
|
||||
args: Namespace = Namespace(),
|
||||
cli_vars: Optional[Dict[str, Any]] = None,
|
||||
profile: Optional[Profile] = None,
|
||||
user_config: Optional[UserConfig] = None,
|
||||
return_dict: Boolean = True,
|
||||
) -> Union[Project, Dict]:
|
||||
"""Returns a project config (dict or object) from a given project path and profile name.
|
||||
|
||||
Args:
|
||||
project_path: Path to project
|
||||
profile_name: Name of profile
|
||||
args: An argparse.Namespace that represents what would have been passed in on the
|
||||
command line (optional)
|
||||
cli_vars: A dict of any vars that would have been passed in on the command line (optional)
|
||||
(see parse_cli_vars above for formatting details)
|
||||
profile: A dbt.config.profile.Profile object (optional)
|
||||
user_config: A dbt.contracts.project.UserConfig object (optional)
|
||||
return_dict: Return a dict if true, return the full dbt.config.project.Project object if false
|
||||
|
||||
Returns:
|
||||
A full project config
|
||||
|
||||
"""
|
||||
# Generate a profile if not provided
|
||||
if profile is None:
|
||||
# Generate user_config if not provided
|
||||
if user_config is None:
|
||||
user_config = read_user_config(flags.PROFILES_DIR)
|
||||
# Update flags
|
||||
flags.set_from_args(args, user_config)
|
||||
if cli_vars is None:
|
||||
cli_vars = {}
|
||||
profile = Profile.render_from_args(args, ProfileRenderer(cli_vars), profile_name)
|
||||
# Generate a project
|
||||
project = Project.from_project_root(
|
||||
project_path,
|
||||
DbtProjectYamlRenderer(profile),
|
||||
verify_version=bool(flags.VERSION_CHECK),
|
||||
)
|
||||
# Return
|
||||
return project.to_project_config() if return_dict else project
|
||||
|
||||
@@ -1,16 +1,2 @@
|
||||
SECRET_ENV_PREFIX = "DBT_ENV_SECRET_"
|
||||
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
||||
METADATA_ENV_PREFIX = "DBT_ENV_CUSTOM_ENV_"
|
||||
|
||||
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
|
||||
MAXIMUM_SEED_SIZE_NAME = "1MB"
|
||||
|
||||
PIN_PACKAGE_URL = (
|
||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"
|
||||
)
|
||||
|
||||
PACKAGES_FILE_NAME = "packages.yml"
|
||||
DEPENDENCIES_FILE_NAME = "dependencies.yml"
|
||||
MANIFEST_FILE_NAME = "manifest.json"
|
||||
SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json"
|
||||
PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack"
|
||||
|
||||
@@ -1,27 +1,22 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
import threading
|
||||
|
||||
from dbt.flags import get_flags
|
||||
import dbt.flags as flags_module
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
from dbt import utils
|
||||
from dbt.clients.jinja import get_rendered
|
||||
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
from dbt.contracts.graph.nodes import Resource
|
||||
from dbt.contracts.graph.compiled import CompiledResource
|
||||
from dbt.exceptions import (
|
||||
SecretEnvVarLocationError,
|
||||
EnvVarMissingError,
|
||||
CompilationException,
|
||||
MacroReturn,
|
||||
RequiredVarNotFoundError,
|
||||
SetStrictWrongTypeError,
|
||||
ZipStrictWrongTypeError,
|
||||
raise_compiler_error,
|
||||
raise_parsing_error,
|
||||
disallow_secret_env_var,
|
||||
)
|
||||
from dbt.events.functions import fire_event, get_invocation_id
|
||||
from dbt.events.types import JinjaLogInfo, JinjaLogDebug
|
||||
from dbt.events.contextvars import get_node_info
|
||||
from dbt.events.types import MacroEventInfo, MacroEventDebug
|
||||
from dbt.version import __version__ as dbt_version
|
||||
|
||||
# These modules are added to the context. Consider alternative
|
||||
@@ -131,17 +126,18 @@ class ContextMeta(type):
|
||||
|
||||
|
||||
class Var:
|
||||
UndefinedVarError = "Required var '{}' not found in config:\nVars " "supplied to {} = {}"
|
||||
_VAR_NOTSET = object()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
context: Mapping[str, Any],
|
||||
cli_vars: Mapping[str, Any],
|
||||
node: Optional[Resource] = None,
|
||||
node: Optional[CompiledResource] = None,
|
||||
) -> None:
|
||||
self._context: Mapping[str, Any] = context
|
||||
self._cli_vars: Mapping[str, Any] = cli_vars
|
||||
self._node: Optional[Resource] = node
|
||||
self._node: Optional[CompiledResource] = node
|
||||
self._merged: Mapping[str, Any] = self._generate_merged()
|
||||
|
||||
def _generate_merged(self) -> Mapping[str, Any]:
|
||||
@@ -155,7 +151,10 @@ class Var:
|
||||
return "<Configuration>"
|
||||
|
||||
def get_missing_var(self, var_name):
|
||||
raise RequiredVarNotFoundError(var_name, self._merged, self._node)
|
||||
dct = {k: self._merged[k] for k in self._merged}
|
||||
pretty_vars = json.dumps(dct, sort_keys=True, indent=4)
|
||||
msg = self.UndefinedVarError.format(var_name, self.node_name, pretty_vars)
|
||||
raise_compiler_error(msg, self._node)
|
||||
|
||||
def has_var(self, var_name: str):
|
||||
return var_name in self._merged
|
||||
@@ -299,7 +298,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
raise SecretEnvVarLocationError(var)
|
||||
disallow_secret_env_var(var)
|
||||
if var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
@@ -314,7 +313,8 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
return return_value
|
||||
else:
|
||||
raise EnvVarMissingError(var)
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
|
||||
if os.environ.get("DBT_MACRO_DEBUGGING"):
|
||||
|
||||
@@ -495,7 +495,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
try:
|
||||
return set(value)
|
||||
except TypeError as e:
|
||||
raise SetStrictWrongTypeError(e)
|
||||
raise CompilationException(e)
|
||||
|
||||
@contextmember("zip")
|
||||
@staticmethod
|
||||
@@ -539,7 +539,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
try:
|
||||
return zip(*args)
|
||||
except TypeError as e:
|
||||
raise ZipStrictWrongTypeError(e)
|
||||
raise CompilationException(e)
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
@@ -556,10 +556,14 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{{ log("Running some_macro: " ~ arg1 ~ ", " ~ arg2) }}
|
||||
{% endmacro %}"
|
||||
"""
|
||||
|
||||
if not isinstance(msg, str):
|
||||
msg = str(msg)
|
||||
|
||||
if info:
|
||||
fire_event(JinjaLogInfo(msg=msg, node_info=get_node_info()))
|
||||
fire_event(MacroEventInfo(msg=msg))
|
||||
else:
|
||||
fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info()))
|
||||
fire_event(MacroEventDebug(msg=msg))
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
@@ -597,11 +601,6 @@ class BaseContext(metaclass=ContextMeta):
|
||||
"""
|
||||
return get_invocation_id()
|
||||
|
||||
@contextproperty
|
||||
def thread_id(self) -> str:
|
||||
"""thread_id outputs an ID for the current thread (useful for auditing)"""
|
||||
return threading.current_thread().name
|
||||
|
||||
@contextproperty
|
||||
def modules(self) -> Dict[str, Any]:
|
||||
"""The `modules` variable in the Jinja context contains useful Python
|
||||
@@ -642,7 +641,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
This supports all flags defined in flags submodule (core/dbt/flags.py)
|
||||
"""
|
||||
return flags_module.get_flag_obj()
|
||||
return flags.get_flag_obj()
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
@@ -658,7 +657,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endmacro %}"
|
||||
"""
|
||||
|
||||
if get_flags().PRINT:
|
||||
if not flags.NO_PRINT:
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
@@ -691,19 +690,6 @@ class BaseContext(metaclass=ContextMeta):
|
||||
dict_diff.update({k: dict_a[k]})
|
||||
return dict_diff
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def local_md5(value: str) -> str:
|
||||
"""Calculates an MD5 hash of the given string.
|
||||
It's called "local_md5" to emphasize that it runs locally in dbt (in jinja context) and not an MD5 SQL command.
|
||||
|
||||
:param value: The value to hash
|
||||
|
||||
Usage:
|
||||
{% set value_hash = local_md5("hello world") %}
|
||||
"""
|
||||
return utils.md5(value)
|
||||
|
||||
|
||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
ctx = BaseContext(cli_vars)
|
||||
|
||||
@@ -8,7 +8,7 @@ from dbt.utils import MultiDict
|
||||
|
||||
from dbt.context.base import contextproperty, contextmember, Var
|
||||
from dbt.context.target import TargetContext
|
||||
from dbt.exceptions import EnvVarMissingError, SecretEnvVarLocationError
|
||||
from dbt.exceptions import raise_parsing_error, disallow_secret_env_var
|
||||
|
||||
|
||||
class ConfiguredContext(TargetContext):
|
||||
@@ -16,8 +16,7 @@ class ConfiguredContext(TargetContext):
|
||||
config: AdapterRequiredConfig
|
||||
|
||||
def __init__(self, config: AdapterRequiredConfig) -> None:
|
||||
super().__init__(config.to_target_dict(), config.cli_vars)
|
||||
self.config = config
|
||||
super().__init__(config, config.cli_vars)
|
||||
|
||||
@contextproperty
|
||||
def project_name(self) -> str:
|
||||
@@ -52,11 +51,10 @@ class ConfiguredVar(Var):
|
||||
adapter_type = self._config.credentials.type
|
||||
lookup = FQNLookup(self._project_name)
|
||||
active_vars = self._config.vars.vars_for(lookup, adapter_type)
|
||||
all_vars = MultiDict([active_vars])
|
||||
|
||||
all_vars = MultiDict()
|
||||
if self._config.project_name != my_config.project_name:
|
||||
all_vars.add(my_config.vars.vars_for(lookup, adapter_type))
|
||||
all_vars.add(active_vars)
|
||||
|
||||
if var_name in all_vars:
|
||||
return all_vars[var_name]
|
||||
@@ -88,7 +86,7 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
def env_var(self, var: str, default: Optional[str] = None) -> str:
|
||||
return_value = None
|
||||
if var.startswith(SECRET_ENV_PREFIX):
|
||||
raise SecretEnvVarLocationError(var)
|
||||
disallow_secret_env_var(var)
|
||||
if var in os.environ:
|
||||
return_value = os.environ[var]
|
||||
elif default is not None:
|
||||
@@ -106,7 +104,8 @@ class SchemaYamlContext(ConfiguredContext):
|
||||
|
||||
return return_value
|
||||
else:
|
||||
raise EnvVarMissingError(var)
|
||||
msg = f"Env var required but not provided: '{var}'"
|
||||
raise_parsing_error(msg)
|
||||
|
||||
|
||||
class MacroResolvingContext(ConfiguredContext):
|
||||
@@ -119,9 +118,7 @@ class MacroResolvingContext(ConfiguredContext):
|
||||
|
||||
|
||||
def generate_schema_yml_context(
|
||||
config: AdapterRequiredConfig,
|
||||
project_name: str,
|
||||
schema_yaml_vars: Optional[SchemaYamlVars] = None,
|
||||
config: AdapterRequiredConfig, project_name: str, schema_yaml_vars: SchemaYamlVars = None
|
||||
) -> Dict[str, Any]:
|
||||
ctx = SchemaYamlContext(config, project_name, schema_yaml_vars)
|
||||
return ctx.to_dict()
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from abc import abstractmethod
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Iterator, Dict, Any, TypeVar, Generic, Optional
|
||||
from typing import List, Iterator, Dict, Any, TypeVar, Generic
|
||||
|
||||
from dbt.config import RuntimeConfig, Project, IsFQNResource
|
||||
from dbt.contracts.graph.model_config import BaseConfig, get_config_for, _listify
|
||||
from dbt.exceptions import DbtInternalError
|
||||
from dbt.exceptions import InternalException
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.utils import fqn_search
|
||||
|
||||
@@ -89,7 +89,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
return self._active_project
|
||||
dependencies = self._active_project.load_dependencies()
|
||||
if project_name not in dependencies:
|
||||
raise DbtInternalError(
|
||||
raise InternalException(
|
||||
f"Project name {project_name} not found in dependencies "
|
||||
f"(found {list(dependencies)})"
|
||||
)
|
||||
@@ -130,7 +130,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Optional[Dict[str, Any]] = None,
|
||||
patch_config_dict: Dict[str, Any] = None,
|
||||
) -> BaseConfig:
|
||||
own_config = self.get_node_project(project_name)
|
||||
|
||||
@@ -166,7 +166,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Optional[Dict[str, Any]] = None,
|
||||
patch_config_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
...
|
||||
|
||||
@@ -200,7 +200,7 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Optional[dict] = None,
|
||||
patch_config_dict: dict = None,
|
||||
) -> Dict[str, Any]:
|
||||
config = self.calculate_node_config(
|
||||
config_call_dict=config_call_dict,
|
||||
@@ -225,7 +225,7 @@ class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):
|
||||
resource_type: NodeType,
|
||||
project_name: str,
|
||||
base: bool,
|
||||
patch_config_dict: Optional[dict] = None,
|
||||
patch_config_dict: dict = None,
|
||||
) -> Dict[str, Any]:
|
||||
# TODO CT-211
|
||||
return self.calculate_node_config(
|
||||
@@ -287,14 +287,14 @@ class ContextConfig:
|
||||
|
||||
elif k in BaseConfig.mergebehavior["update"]:
|
||||
if not isinstance(v, dict):
|
||||
raise DbtInternalError(f"expected dict, got {v}")
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
if k in config_call_dict and isinstance(config_call_dict[k], dict):
|
||||
config_call_dict[k].update(v)
|
||||
else:
|
||||
config_call_dict[k] = v
|
||||
elif k in BaseConfig.mergebehavior["dict_key_append"]:
|
||||
if not isinstance(v, dict):
|
||||
raise DbtInternalError(f"expected dict, got {v}")
|
||||
raise InternalException(f"expected dict, got {v}")
|
||||
if k in config_call_dict: # should always be a dict
|
||||
for key, value in v.items():
|
||||
extend = False
|
||||
@@ -318,11 +318,7 @@ class ContextConfig:
|
||||
config_call_dict[k] = v
|
||||
|
||||
def build_config_dict(
|
||||
self,
|
||||
base: bool = False,
|
||||
*,
|
||||
rendered: bool = True,
|
||||
patch_config_dict: Optional[dict] = None,
|
||||
self, base: bool = False, *, rendered: bool = True, patch_config_dict: dict = None
|
||||
) -> Dict[str, Any]:
|
||||
if rendered:
|
||||
# TODO CT-211
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
from dbt.exceptions import (
|
||||
DocTargetNotFoundError,
|
||||
DocArgsError,
|
||||
doc_invalid_args,
|
||||
doc_target_not_found,
|
||||
)
|
||||
from dbt.config.runtime import RuntimeConfig
|
||||
from dbt.contracts.graph.compiled import CompileResultNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.graph.nodes import Macro, ResultNode
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
|
||||
from dbt.context.base import contextmember
|
||||
from dbt.context.configured import SchemaYamlContext
|
||||
@@ -16,7 +17,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
def __init__(
|
||||
self,
|
||||
config: RuntimeConfig,
|
||||
node: Union[Macro, ResultNode],
|
||||
node: Union[ParsedMacro, CompileResultNode],
|
||||
manifest: Manifest,
|
||||
current_project: str,
|
||||
) -> None:
|
||||
@@ -52,9 +53,9 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
elif len(args) == 2:
|
||||
doc_package_name, doc_name = args
|
||||
else:
|
||||
raise DocArgsError(self.node, args)
|
||||
doc_invalid_args(self.node, args)
|
||||
|
||||
# Documentation
|
||||
# ParsedDocumentation
|
||||
target_doc = self.manifest.resolve_doc(
|
||||
doc_name,
|
||||
doc_package_name,
|
||||
@@ -68,9 +69,7 @@ class DocsRuntimeContext(SchemaYamlContext):
|
||||
# TODO CT-211
|
||||
source_file.add_node(self.node.unique_id) # type: ignore[union-attr]
|
||||
else:
|
||||
raise DocTargetNotFoundError(
|
||||
node=self.node, target_doc_name=doc_name, target_doc_package=doc_package_name
|
||||
)
|
||||
doc_target_not_found(self.node, doc_name, doc_package_name)
|
||||
|
||||
return target_doc.block_contents
|
||||
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
import functools
|
||||
from typing import NoReturn
|
||||
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import JinjaLogWarning
|
||||
|
||||
from dbt.exceptions import (
|
||||
DbtRuntimeError,
|
||||
MissingConfigError,
|
||||
MissingMaterializationError,
|
||||
MissingRelationError,
|
||||
AmbiguousAliasError,
|
||||
AmbiguousCatalogMatchError,
|
||||
CacheInconsistencyError,
|
||||
DataclassNotDictError,
|
||||
CompilationError,
|
||||
DbtDatabaseError,
|
||||
DependencyNotFoundError,
|
||||
DependencyError,
|
||||
DuplicatePatchPathError,
|
||||
DuplicateResourceNameError,
|
||||
PropertyYMLError,
|
||||
NotImplementedError,
|
||||
RelationWrongTypeError,
|
||||
ContractError,
|
||||
ColumnTypeMissingError,
|
||||
FailFastError,
|
||||
)
|
||||
|
||||
|
||||
def warn(msg, node=None):
|
||||
warn_or_error(JinjaLogWarning(msg=msg), node=node)
|
||||
return ""
|
||||
|
||||
|
||||
def missing_config(model, name) -> NoReturn:
|
||||
raise MissingConfigError(unique_id=model.unique_id, name=name)
|
||||
|
||||
|
||||
def missing_materialization(model, adapter_type) -> NoReturn:
|
||||
raise MissingMaterializationError(
|
||||
materialization=model.config.materialized, adapter_type=adapter_type
|
||||
)
|
||||
|
||||
|
||||
def missing_relation(relation, model=None) -> NoReturn:
|
||||
raise MissingRelationError(relation, model)
|
||||
|
||||
|
||||
def raise_ambiguous_alias(node_1, node_2, duped_name=None) -> NoReturn:
|
||||
raise AmbiguousAliasError(node_1, node_2, duped_name)
|
||||
|
||||
|
||||
def raise_ambiguous_catalog_match(unique_id, match_1, match_2) -> NoReturn:
|
||||
raise AmbiguousCatalogMatchError(unique_id, match_1, match_2)
|
||||
|
||||
|
||||
def raise_cache_inconsistent(message) -> NoReturn:
|
||||
raise CacheInconsistencyError(message)
|
||||
|
||||
|
||||
def raise_dataclass_not_dict(obj) -> NoReturn:
|
||||
raise DataclassNotDictError(obj)
|
||||
|
||||
|
||||
def raise_compiler_error(msg, node=None) -> NoReturn:
|
||||
raise CompilationError(msg, node)
|
||||
|
||||
|
||||
def raise_contract_error(yaml_columns, sql_columns) -> NoReturn:
|
||||
raise ContractError(yaml_columns, sql_columns)
|
||||
|
||||
|
||||
def raise_database_error(msg, node=None) -> NoReturn:
|
||||
raise DbtDatabaseError(msg, node)
|
||||
|
||||
|
||||
def raise_dep_not_found(node, node_description, required_pkg) -> NoReturn:
|
||||
raise DependencyNotFoundError(node, node_description, required_pkg)
|
||||
|
||||
|
||||
def raise_dependency_error(msg) -> NoReturn:
|
||||
raise DependencyError(scrub_secrets(msg, env_secrets()))
|
||||
|
||||
|
||||
def raise_duplicate_patch_name(patch_1, existing_patch_path) -> NoReturn:
|
||||
raise DuplicatePatchPathError(patch_1, existing_patch_path)
|
||||
|
||||
|
||||
def raise_duplicate_resource_name(node_1, node_2) -> NoReturn:
|
||||
raise DuplicateResourceNameError(node_1, node_2)
|
||||
|
||||
|
||||
def raise_invalid_property_yml_version(path, issue) -> NoReturn:
|
||||
raise PropertyYMLError(path, issue)
|
||||
|
||||
|
||||
def raise_not_implemented(msg) -> NoReturn:
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
def relation_wrong_type(relation, expected_type, model=None) -> NoReturn:
|
||||
raise RelationWrongTypeError(relation, expected_type, model)
|
||||
|
||||
|
||||
def column_type_missing(column_names) -> NoReturn:
|
||||
raise ColumnTypeMissingError(column_names)
|
||||
|
||||
|
||||
def raise_fail_fast_error(msg, node=None) -> NoReturn:
|
||||
raise FailFastError(msg, node=node)
|
||||
|
||||
|
||||
# Update this when a new function should be added to the
|
||||
# dbt context's `exceptions` key!
|
||||
CONTEXT_EXPORTS = {
|
||||
fn.__name__: fn
|
||||
for fn in [
|
||||
warn,
|
||||
missing_config,
|
||||
missing_materialization,
|
||||
missing_relation,
|
||||
raise_ambiguous_alias,
|
||||
raise_ambiguous_catalog_match,
|
||||
raise_cache_inconsistent,
|
||||
raise_dataclass_not_dict,
|
||||
raise_compiler_error,
|
||||
raise_database_error,
|
||||
raise_dep_not_found,
|
||||
raise_dependency_error,
|
||||
raise_duplicate_patch_name,
|
||||
raise_duplicate_resource_name,
|
||||
raise_invalid_property_yml_version,
|
||||
raise_not_implemented,
|
||||
relation_wrong_type,
|
||||
raise_contract_error,
|
||||
column_type_missing,
|
||||
raise_fail_fast_error,
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# wraps context based exceptions in node info
|
||||
def wrapper(model):
|
||||
def wrap(func):
|
||||
@functools.wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except DbtRuntimeError as exc:
|
||||
exc.add_node(model)
|
||||
raise exc
|
||||
|
||||
return inner
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def wrapped_exports(model):
|
||||
wrap = wrapper(model)
|
||||
return {name: wrap(export) for name, export in CONTEXT_EXPORTS.items()}
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import Dict, MutableMapping, Optional
|
||||
from dbt.contracts.graph.nodes import Macro
|
||||
from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.clients.jinja import MacroGenerator
|
||||
|
||||
MacroNamespace = Dict[str, Macro]
|
||||
MacroNamespace = Dict[str, ParsedMacro]
|
||||
|
||||
|
||||
# This class builds the MacroResolver by adding macros
|
||||
@@ -21,7 +21,7 @@ MacroNamespace = Dict[str, Macro]
|
||||
class MacroResolver:
|
||||
def __init__(
|
||||
self,
|
||||
macros: MutableMapping[str, Macro],
|
||||
macros: MutableMapping[str, ParsedMacro],
|
||||
root_project_name: str,
|
||||
internal_package_names,
|
||||
) -> None:
|
||||
@@ -77,7 +77,7 @@ class MacroResolver:
|
||||
def _add_macro_to(
|
||||
self,
|
||||
package_namespaces: Dict[str, MacroNamespace],
|
||||
macro: Macro,
|
||||
macro: ParsedMacro,
|
||||
):
|
||||
if macro.package_name in package_namespaces:
|
||||
namespace = package_namespaces[macro.package_name]
|
||||
@@ -86,10 +86,10 @@ class MacroResolver:
|
||||
package_namespaces[macro.package_name] = namespace
|
||||
|
||||
if macro.name in namespace:
|
||||
raise DuplicateMacroNameError(macro, macro, macro.package_name)
|
||||
raise_duplicate_macro_name(macro, macro, macro.package_name)
|
||||
package_namespaces[macro.package_name][macro.name] = macro
|
||||
|
||||
def add_macro(self, macro: Macro):
|
||||
def add_macro(self, macro: ParsedMacro):
|
||||
macro_name: str = macro.name
|
||||
|
||||
# internal macros (from plugins) will be processed separately from
|
||||
@@ -187,7 +187,7 @@ class TestMacroNamespace:
|
||||
elif package_name in self.macro_resolver.packages:
|
||||
macro = self.macro_resolver.packages[package_name].get(name)
|
||||
else:
|
||||
raise PackageNotFoundForMacroError(package_name)
|
||||
raise_compiler_error(f"Could not find package '{package_name}'")
|
||||
if not macro:
|
||||
return None
|
||||
macro_func = MacroGenerator(macro, self.ctx, self.node, self.thread_ctx)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from typing import Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set
|
||||
|
||||
from dbt.clients.jinja import MacroGenerator, MacroStack
|
||||
from dbt.contracts.graph.nodes import Macro
|
||||
from dbt.contracts.graph.parsed import ParsedMacro
|
||||
from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
|
||||
from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError
|
||||
from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
|
||||
|
||||
|
||||
FlatNamespace = Dict[str, MacroGenerator]
|
||||
@@ -75,7 +75,7 @@ class MacroNamespace(Mapping):
|
||||
elif package_name in self.packages:
|
||||
return self.packages[package_name].get(name)
|
||||
else:
|
||||
raise PackageNotFoundForMacroError(package_name)
|
||||
raise_compiler_error(f"Could not find package '{package_name}'")
|
||||
|
||||
|
||||
# This class builds the MacroNamespace by adding macros to
|
||||
@@ -112,7 +112,7 @@ class MacroNamespaceBuilder:
|
||||
def _add_macro_to(
|
||||
self,
|
||||
hierarchy: Dict[str, FlatNamespace],
|
||||
macro: Macro,
|
||||
macro: ParsedMacro,
|
||||
macro_func: MacroGenerator,
|
||||
):
|
||||
if macro.package_name in hierarchy:
|
||||
@@ -122,10 +122,10 @@ class MacroNamespaceBuilder:
|
||||
hierarchy[macro.package_name] = namespace
|
||||
|
||||
if macro.name in namespace:
|
||||
raise DuplicateMacroNameError(macro_func.macro, macro, macro.package_name)
|
||||
raise_duplicate_macro_name(macro_func.macro, macro, macro.package_name)
|
||||
hierarchy[macro.package_name][macro.name] = macro_func
|
||||
|
||||
def add_macro(self, macro: Macro, ctx: Dict[str, Any]):
|
||||
def add_macro(self, macro: ParsedMacro, ctx: Dict[str, Any]):
|
||||
macro_name: str = macro.name
|
||||
|
||||
# MacroGenerator is in clients/jinja.py
|
||||
@@ -147,11 +147,13 @@ class MacroNamespaceBuilder:
|
||||
elif macro.package_name == self.root_package:
|
||||
self.globals[macro_name] = macro_func
|
||||
|
||||
def add_macros(self, macros: Iterable[Macro], ctx: Dict[str, Any]):
|
||||
def add_macros(self, macros: Iterable[ParsedMacro], ctx: Dict[str, Any]):
|
||||
for macro in macros:
|
||||
self.add_macro(macro, ctx)
|
||||
|
||||
def build_namespace(self, macros: Iterable[Macro], ctx: Dict[str, Any]) -> MacroNamespace:
|
||||
def build_namespace(
|
||||
self, macros: Iterable[ParsedMacro], ctx: Dict[str, Any]
|
||||
) -> MacroNamespace:
|
||||
self.add_macros(macros, ctx)
|
||||
|
||||
# Iterate in reverse-order and overwrite: the packages that are first
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user