mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 13:31:28 +00:00
Compare commits
80 Commits
v1.3.1
...
adding-sem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb8b161351 | ||
|
|
7ecb431278 | ||
|
|
792150ff6a | ||
|
|
85d0b5afc7 | ||
|
|
1fbcaa4484 | ||
|
|
481235a943 | ||
|
|
2289e45571 | ||
|
|
b5d303f12a | ||
|
|
c3be975783 | ||
|
|
47c2edb42a | ||
|
|
b3440417ad | ||
|
|
020f639c7a | ||
|
|
55db15aba8 | ||
|
|
bce0e7c096 | ||
|
|
7d7066466d | ||
|
|
517576c088 | ||
|
|
987764858b | ||
|
|
a235abd176 | ||
|
|
9297e4d55c | ||
|
|
eae98677b9 | ||
|
|
66ac107409 | ||
|
|
39c5c42215 | ||
|
|
9f280a8469 | ||
|
|
73116fb816 | ||
|
|
f02243506d | ||
|
|
d5e9ce1797 | ||
|
|
4e786184d2 | ||
|
|
930bd3541e | ||
|
|
6c76137da4 | ||
|
|
68d06d8a9c | ||
|
|
d0543c9242 | ||
|
|
cfad27f963 | ||
|
|
c3ccbe3357 | ||
|
|
8e28f5906e | ||
|
|
d23285b4ba | ||
|
|
a42748433d | ||
|
|
be4a91a0fe | ||
|
|
8145eed603 | ||
|
|
fc00239f36 | ||
|
|
77dfec7214 | ||
|
|
7b73264ec8 | ||
|
|
1916784287 | ||
|
|
c2856017a1 | ||
|
|
17b82661d2 | ||
|
|
6c8609499a | ||
|
|
53ae325576 | ||
|
|
a7670a3ab9 | ||
|
|
ff2f1f42c3 | ||
|
|
35f7975d8f | ||
|
|
a9c8bc0e0a | ||
|
|
73aebd8159 | ||
|
|
9b84b6e2e8 | ||
|
|
095997913e | ||
|
|
6de1d29cf9 | ||
|
|
87db12d05b | ||
|
|
dcc70f314f | ||
|
|
dcd6ef733b | ||
|
|
85e415f50f | ||
|
|
2c684247e9 | ||
|
|
3d09531cda | ||
|
|
fc1227e0b1 | ||
|
|
dc96352493 | ||
|
|
725cf81af6 | ||
|
|
558468e854 | ||
|
|
95ad1ca4f8 | ||
|
|
02a69c8f4f | ||
|
|
7dbdfc88e0 | ||
|
|
2002791ec1 | ||
|
|
29d96bd6bf | ||
|
|
d01245133a | ||
|
|
23c8ac230c | ||
|
|
43d9ee3470 | ||
|
|
50fe25d230 | ||
|
|
a79960fa64 | ||
|
|
fa4f9d3d97 | ||
|
|
73385720b4 | ||
|
|
c2ab2971b0 | ||
|
|
0e60fc1078 | ||
|
|
4f2fef1ece | ||
|
|
3562637984 |
@@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 1.3.1
|
current_version = 1.4.0a1
|
||||||
parse = (?P<major>\d+)
|
parse = (?P<major>\d+)
|
||||||
\.(?P<minor>\d+)
|
\.(?P<minor>\d+)
|
||||||
\.(?P<patch>\d+)
|
\.(?P<patch>\d+)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||||
|
|||||||
@@ -1,140 +0,0 @@
|
|||||||
## dbt-core 1.3.0 - October 12, 2022
|
|
||||||
|
|
||||||
### Breaking Changes
|
|
||||||
|
|
||||||
- Renaming Metric Spec Attributes ([#5774](https://github.com/dbt-labs/dbt-core/issues/5774), [#5775](https://github.com/dbt-labs/dbt-core/pull/5775))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Add `--defer` flag to dbt compile & dbt docs generate ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
|
||||||
- Python model inital version ([#5261](https://github.com/dbt-labs/dbt-core/issues/5261), [#5421](https://github.com/dbt-labs/dbt-core/pull/5421))
|
|
||||||
- allows user to include the file extension for .py models in the dbt run -m command. ([#5289](https://github.com/dbt-labs/dbt-core/issues/5289), [#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
|
||||||
- Incremental materialization refactor and cleanup ([#5245](https://github.com/dbt-labs/dbt-core/issues/5245), [#5359](https://github.com/dbt-labs/dbt-core/pull/5359))
|
|
||||||
- Python models can support incremental logic ([#0](https://github.com/dbt-labs/dbt-core/issues/0), [#35](https://github.com/dbt-labs/dbt-core/pull/35))
|
|
||||||
- Add reusable function for retrying adapter connections. Utilize said function to add retries for Postgres (and Redshift). ([#5022](https://github.com/dbt-labs/dbt-core/issues/5022), [#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
|
||||||
- merge_exclude_columns for incremental materialization ([#5260](https://github.com/dbt-labs/dbt-core/issues/5260), [#5457](https://github.com/dbt-labs/dbt-core/pull/5457))
|
|
||||||
- add exponential backoff to connection retries on Postgres (and Redshift) ([#5502](https://github.com/dbt-labs/dbt-core/issues/5502), [#5503](https://github.com/dbt-labs/dbt-core/pull/5503))
|
|
||||||
- use MethodName.File when value ends with .csv ([#5578](https://github.com/dbt-labs/dbt-core/issues/5578), [#5581](https://github.com/dbt-labs/dbt-core/pull/5581))
|
|
||||||
- Make `docs` configurable in `dbt_project.yml` and add a `node_color` attribute to change the color of nodes in the DAG ([#5333](https://github.com/dbt-labs/dbt-core/issues/5333), [#5397](https://github.com/dbt-labs/dbt-core/pull/5397))
|
|
||||||
- Adding ResolvedMetricReference helper functions and tests ([#5567](https://github.com/dbt-labs/dbt-core/issues/5567), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607))
|
|
||||||
- Check dbt-core version requirements when installing Hub packages ([#5648](https://github.com/dbt-labs/dbt-core/issues/5648), [#5651](https://github.com/dbt-labs/dbt-core/pull/5651))
|
|
||||||
- Search current working directory for `profiles.yml` ([#5411](https://github.com/dbt-labs/dbt-core/issues/5411), [#5717](https://github.com/dbt-labs/dbt-core/pull/5717))
|
|
||||||
- Adding the `window` parameter to the metric spec. ([#5721](https://github.com/dbt-labs/dbt-core/issues/5721), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722))
|
|
||||||
- Add invocation args dict to ProviderContext class ([#5524](https://github.com/dbt-labs/dbt-core/issues/5524), [#5782](https://github.com/dbt-labs/dbt-core/pull/5782))
|
|
||||||
- Adds new cli framework ([#5526](https://github.com/dbt-labs/dbt-core/issues/5526), [#5647](https://github.com/dbt-labs/dbt-core/pull/5647))
|
|
||||||
- Flags work with new Click CLI ([#5529](https://github.com/dbt-labs/dbt-core/issues/5529), [#5790](https://github.com/dbt-labs/dbt-core/pull/5790))
|
|
||||||
- Add metadata env method to ProviderContext class ([#5522](https://github.com/dbt-labs/dbt-core/issues/5522), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794))
|
|
||||||
- Array macros ([#5520](https://github.com/dbt-labs/dbt-core/issues/5520), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
|
||||||
- Add enabled config to exposures and metrics ([#5422](https://github.com/dbt-labs/dbt-core/issues/5422), [#5815](https://github.com/dbt-labs/dbt-core/pull/5815))
|
|
||||||
- Migrate dbt-utils current_timestamp macros into core + adapters ([#5521](https://github.com/dbt-labs/dbt-core/issues/5521), [#5838](https://github.com/dbt-labs/dbt-core/pull/5838))
|
|
||||||
- add -fr flag shorthand ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879))
|
|
||||||
- add type_boolean as a data type macro ([#5739](https://github.com/dbt-labs/dbt-core/issues/5739), [#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
|
||||||
- Support .dbtignore in project root to ignore certain files being read by dbt ([#5733](https://github.com/dbt-labs/dbt-core/issues/5733), [#5897](https://github.com/dbt-labs/dbt-core/pull/5897))
|
|
||||||
- This conditionally no-ops warehouse connection at compile depending on an env var, disabling introspection/queries during compilation only. This is a temporary solution to more complex permissions requirements for the semantic layer. ([#5936](https://github.com/dbt-labs/dbt-core/issues/5936), [#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Remove the default 256 characters limit on postgres character varying type when no limitation is set ([#5238](https://github.com/dbt-labs/dbt-core/issues/5238), [#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
|
||||||
- Include schema file config in unrendered_config ([#5338](https://github.com/dbt-labs/dbt-core/issues/5338), [#5344](https://github.com/dbt-labs/dbt-core/pull/5344))
|
|
||||||
- Add context to compilation errors generated while rendering generic test configuration values. ([#5294](https://github.com/dbt-labs/dbt-core/issues/5294), [#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
|
||||||
- Resolves #5351 - Do not consider shorter varchar cols as schema changes ([#5351](https://github.com/dbt-labs/dbt-core/issues/5351), [#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
|
||||||
- Rename try to strict for more intuitiveness ([#5475](https://github.com/dbt-labs/dbt-core/issues/5475), [#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
|
||||||
- on_shchma_change fail verbosity enhancement ([#5504](https://github.com/dbt-labs/dbt-core/issues/5504), [#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
|
||||||
- Ignore empty strings passed in as secrets ([#5312](https://github.com/dbt-labs/dbt-core/issues/5312), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518))
|
|
||||||
- Fix handling of top-level exceptions ([#5564](https://github.com/dbt-labs/dbt-core/issues/5564), [#5560](https://github.com/dbt-labs/dbt-core/pull/5560))
|
|
||||||
- Fix error rendering docs block in metrics description ([#5585](https://github.com/dbt-labs/dbt-core/issues/5585), [#5603](https://github.com/dbt-labs/dbt-core/pull/5603))
|
|
||||||
- Extended validations for the project names ([#5379](https://github.com/dbt-labs/dbt-core/issues/5379), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
|
||||||
- Use sys.exit instead of exit ([#5621](https://github.com/dbt-labs/dbt-core/issues/5621), [#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
|
||||||
- Finishing logic upgrade to Redshift for name truncation collisions. ([#5586](https://github.com/dbt-labs/dbt-core/issues/5586), [#5656](https://github.com/dbt-labs/dbt-core/pull/5656))
|
|
||||||
- multiple args for ref and source ([#5634](https://github.com/dbt-labs/dbt-core/issues/5634), [#5635](https://github.com/dbt-labs/dbt-core/pull/5635))
|
|
||||||
- Fix Unexpected behavior when chaining methods on dbt-ref'ed/sourced dataframes ([#5646](https://github.com/dbt-labs/dbt-core/issues/5646), [#5677](https://github.com/dbt-labs/dbt-core/pull/5677))
|
|
||||||
- Fix typos of comments in core/dbt/adapters/ ([#5690](https://github.com/dbt-labs/dbt-core/issues/5690), [#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
|
||||||
- Include py.typed in MANIFEST.in. This enables packages that install dbt-core from pypi to use mypy. ([#5703](https://github.com/dbt-labs/dbt-core/issues/5703), [#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
|
||||||
- Removal of all .coverage files when using make clean command ([#5633](https://github.com/dbt-labs/dbt-core/issues/5633), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
|
||||||
- Remove temp files generated by unit tests ([#5631](https://github.com/dbt-labs/dbt-core/issues/5631), [#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
|
||||||
- Fix warnings as errors during tests ([#5424](https://github.com/dbt-labs/dbt-core/issues/5424), [#5800](https://github.com/dbt-labs/dbt-core/pull/5800))
|
|
||||||
- Prevent event_history from holding references ([#5848](https://github.com/dbt-labs/dbt-core/issues/5848), [#5858](https://github.com/dbt-labs/dbt-core/pull/5858))
|
|
||||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992), [#5868](https://github.com/dbt-labs/dbt-core/pull/5868))
|
|
||||||
- ConfigSelectorMethod should check for bools ([#5890](https://github.com/dbt-labs/dbt-core/issues/5890), [#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
|
||||||
- shorthand for full refresh should be one character ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
|
||||||
- Fix macro resolution order during static analysis for custom generic tests ([#5720](https://github.com/dbt-labs/dbt-core/issues/5720), [#5907](https://github.com/dbt-labs/dbt-core/pull/5907))
|
|
||||||
- Fix race condition when invoking dbt via lib.py concurrently ([#5919](https://github.com/dbt-labs/dbt-core/issues/5919), [#5921](https://github.com/dbt-labs/dbt-core/pull/5921))
|
|
||||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041), [#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Update dependency inline-source from ^6.1.5 to ^7.2.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency jest from ^26.2.2 to ^28.1.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency underscore from ^1.9.0 to ^1.13.4 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency webpack-cli from ^3.3.12 to ^4.7.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency webpack-dev-server from ^3.1.11 to ^4.9.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Searches no longer require perfect matches, and instead consider each word individually. `my model` or `model my` will now find `my_model`, without the need for underscores ([dbt-docs/#143](https://github.com/dbt-labs/dbt-docs/issues/143), [dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
|
||||||
- Support the renaming of SQL to code happening in dbt-core ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
|
||||||
- Leverages `docs.node_color` from `dbt-core` to color nodes in the DAG ([dbt-docs/#44](https://github.com/dbt-labs/dbt-docs/issues/44), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- Refer to exposures by their label by default. ([dbt-docs/#306](https://github.com/dbt-labs/dbt-docs/issues/306), [dbt-docs/#307](https://github.com/dbt-labs/dbt-docs/pull/307))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Added language to tracked fields in run_model event ([#5571](https://github.com/dbt-labs/dbt-core/issues/5571), [#5469](https://github.com/dbt-labs/dbt-core/pull/5469))
|
|
||||||
- Update mashumaro to 3.0.3 ([#4940](https://github.com/dbt-labs/dbt-core/issues/4940), [#5118](https://github.com/dbt-labs/dbt-core/pull/5118))
|
|
||||||
- Add python incremental materialization test ([#0000](https://github.com/dbt-labs/dbt-core/issues/0000), [#5571](https://github.com/dbt-labs/dbt-core/pull/5571))
|
|
||||||
- Save use of default env vars to manifest to enable partial parsing in those cases. ([#5155](https://github.com/dbt-labs/dbt-core/issues/5155), [#5589](https://github.com/dbt-labs/dbt-core/pull/5589))
|
|
||||||
- add more information to log line interop test failures ([#5658](https://github.com/dbt-labs/dbt-core/issues/5658), [#5659](https://github.com/dbt-labs/dbt-core/pull/5659))
|
|
||||||
- Add supported languages to materializations ([#5569](https://github.com/dbt-labs/dbt-core/issues/5569), [#5695](https://github.com/dbt-labs/dbt-core/pull/5695))
|
|
||||||
- Migrate integration test 014 but also fix the snapshot hard delete test's timezone logic and force all integration tests to run flags.set_from_args to force environment variables are accessible to all integration test threads. ([#5760](https://github.com/dbt-labs/dbt-core/issues/5760), [#5760](https://github.com/dbt-labs/dbt-core/pull/5760))
|
|
||||||
- Support dbt-metrics compilation by rebuilding flat_graph ([#5525](https://github.com/dbt-labs/dbt-core/issues/5525), [#5786](https://github.com/dbt-labs/dbt-core/pull/5786))
|
|
||||||
- Reworking the way we define the window attribute of metrics to match freshness tests ([#5722](https://github.com/dbt-labs/dbt-core/issues/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793))
|
|
||||||
- Add PythonJobHelper base class in core and add more type checking ([#5802](https://github.com/dbt-labs/dbt-core/issues/5802), [#5802](https://github.com/dbt-labs/dbt-core/pull/5802))
|
|
||||||
- The link did not go to the anchor directly, now it does ([#5813](https://github.com/dbt-labs/dbt-core/issues/5813), [#5814](https://github.com/dbt-labs/dbt-core/pull/5814))
|
|
||||||
- remove key as reserved keyword from test_bool_or ([#5817](https://github.com/dbt-labs/dbt-core/issues/5817), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818))
|
|
||||||
- Convert default selector tests to pytest ([#5728](https://github.com/dbt-labs/dbt-core/issues/5728), [#5820](https://github.com/dbt-labs/dbt-core/pull/5820))
|
|
||||||
- Compatibiltiy for metric attribute renaming ([#5807](https://github.com/dbt-labs/dbt-core/issues/5807), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825))
|
|
||||||
- remove source quoting setting in adapter tests ([#5836](https://github.com/dbt-labs/dbt-core/issues/5836), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
|
||||||
- Add name validation for metrics ([#5456](https://github.com/dbt-labs/dbt-core/issues/5456), [#5841](https://github.com/dbt-labs/dbt-core/pull/5841))
|
|
||||||
- Validate exposure name and add label ([#5606](https://github.com/dbt-labs/dbt-core/issues/5606), [#5844](https://github.com/dbt-labs/dbt-core/pull/5844))
|
|
||||||
- Adding validation for metric expression attribute ([#5871](https://github.com/dbt-labs/dbt-core/issues/5871), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
|
||||||
- Profiling and Adapter Management work with Click CLI ([#5531](https://github.com/dbt-labs/dbt-core/issues/5531), [#5892](https://github.com/dbt-labs/dbt-core/pull/5892))
|
|
||||||
- Reparse references to deleted metric ([#5444](https://github.com/dbt-labs/dbt-core/issues/5444), [#5920](https://github.com/dbt-labs/dbt-core/pull/5920))
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Upgrade to Jinja2==3.1.2 from Jinja2==2.11.3 ([#4748](https://github.com/dbt-labs/dbt-core/issues/4748), [#5465](https://github.com/dbt-labs/dbt-core/pull/5465))
|
|
||||||
- Bump mypy from 0.961 to 0.971 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5495](https://github.com/dbt-labs/dbt-core/pull/5495))
|
|
||||||
- Remove pin for MarkUpSafe from >=0.23,<2.1 ([#5506](https://github.com/dbt-labs/dbt-core/issues/5506), [#5507](https://github.com/dbt-labs/dbt-core/pull/5507))
|
|
||||||
|
|
||||||
### Dependency
|
|
||||||
|
|
||||||
- Bump python from 3.10.5-slim-bullseye to 3.10.6-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5623](https://github.com/dbt-labs/dbt-core/pull/5623))
|
|
||||||
- Bump mashumaro[msgpack] from 3.0.3 to 3.0.4 in /core ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5649](https://github.com/dbt-labs/dbt-core/pull/5649))
|
|
||||||
- Bump black from 22.6.0 to 22.8.0 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5750](https://github.com/dbt-labs/dbt-core/pull/5750))
|
|
||||||
- Bump python from 3.10.6-slim-bullseye to 3.10.7-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5805](https://github.com/dbt-labs/dbt-core/pull/5805))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@Goodkat](https://github.com/Goodkat) ([#5581](https://github.com/dbt-labs/dbt-core/pull/5581), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
|
||||||
- [@Ilanbenb](https://github.com/Ilanbenb) ([#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
|
||||||
- [@b-per](https://github.com/b-per) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- [@bbroeksema](https://github.com/bbroeksema) ([#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
|
||||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#5775](https://github.com/dbt-labs/dbt-core/pull/5775), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
|
||||||
- [@danielcmessias](https://github.com/danielcmessias) ([#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
|
||||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5457](https://github.com/dbt-labs/dbt-core/pull/5457), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
|
||||||
- [@dbeatty10](https://github.com/dbeatty10) ([#5717](https://github.com/dbt-labs/dbt-core/pull/5717), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
|
||||||
- [@drewbanin](https://github.com/drewbanin) ([#5921](https://github.com/dbt-labs/dbt-core/pull/5921), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
|
||||||
- [@epapineau](https://github.com/epapineau) ([#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
|
||||||
- [@graciegoheen](https://github.com/graciegoheen) ([#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
|
||||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5782](https://github.com/dbt-labs/dbt-core/pull/5782), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
|
||||||
- [@jeremyyeo](https://github.com/jeremyyeo) ([#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
|
||||||
- [@joellabes](https://github.com/joellabes) ([dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
|
||||||
- [@jpmmcneill](https://github.com/jpmmcneill) ([#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
|
||||||
- [@kadero](https://github.com/kadero) ([#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
|
||||||
- [@leoebfolsom](https://github.com/leoebfolsom) ([#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
|
||||||
- [@matt-winkler](https://github.com/matt-winkler) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- [@nicholasyager](https://github.com/nicholasyager) ([#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
|
||||||
- [@panasenco](https://github.com/panasenco) ([#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
|
||||||
- [@racheldaniel](https://github.com/racheldaniel) ([#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
|
||||||
- [@sdebruyn](https://github.com/sdebruyn) ([#5814](https://github.com/dbt-labs/dbt-core/pull/5814), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
|
||||||
- [@shrodingers](https://github.com/shrodingers) ([#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
|
||||||
- [@sungchun12](https://github.com/sungchun12) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- [@tomasfarias](https://github.com/tomasfarias) ([#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
|
||||||
- [@varun-dc](https://github.com/varun-dc) ([#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
|
||||||
- [@yoiki](https://github.com/yoiki) ([#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
|
||||||
- [@chamini2](https://github.com/chamini2) ([#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
## dbt-core 1.3.1 - November 16, 2022
|
|
||||||
### Features
|
|
||||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201), [#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
|
||||||
### Docs
|
|
||||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880), [dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
|
||||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323), [dbt-docs/#346](https://github.com/dbt-labs/dbt-docs/pull/346))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
|
||||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
|
||||||
7
.changes/unreleased/Dependency-20220923-000646.yaml
Normal file
7
.changes/unreleased/Dependency-20220923-000646.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: "Dependency"
|
||||||
|
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
|
||||||
|
time: 2022-09-23T00:06:46.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
Issue: 4904
|
||||||
|
PR: 5917
|
||||||
7
.changes/unreleased/Dependency-20221007-000848.yaml
Normal file
7
.changes/unreleased/Dependency-20221007-000848.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: "Dependency"
|
||||||
|
body: "Bump black from 22.8.0 to 22.10.0"
|
||||||
|
time: 2022-10-07T00:08:48.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
Issue: 4904
|
||||||
|
PR: 6019
|
||||||
7
.changes/unreleased/Dependency-20221020-000753.yaml
Normal file
7
.changes/unreleased/Dependency-20221020-000753.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: "Dependency"
|
||||||
|
body: "Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core"
|
||||||
|
time: 2022-10-20T00:07:53.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
Issue: 4904
|
||||||
|
PR: 6108
|
||||||
7
.changes/unreleased/Dependency-20221026-000910.yaml
Normal file
7
.changes/unreleased/Dependency-20221026-000910.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: "Dependency"
|
||||||
|
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
|
||||||
|
time: 2022-10-26T00:09:10.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
Issue: 4904
|
||||||
|
PR: 6144
|
||||||
7
.changes/unreleased/Docs-20220908-154157.yaml
Normal file
7
.changes/unreleased/Docs-20220908-154157.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: minor doc correction
|
||||||
|
time: 2022-09-08T15:41:57.689162-04:00
|
||||||
|
custom:
|
||||||
|
Author: andy-clapson
|
||||||
|
Issue: "5791"
|
||||||
|
PR: "5684"
|
||||||
7
.changes/unreleased/Docs-20221007-090656.yaml
Normal file
7
.changes/unreleased/Docs-20221007-090656.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Generate API docs for new CLI interface
|
||||||
|
time: 2022-10-07T09:06:56.446078-05:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "5528"
|
||||||
|
PR: "6022"
|
||||||
6
.changes/unreleased/Docs-20221017-171411.yaml
Normal file
6
.changes/unreleased/Docs-20221017-171411.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
time: 2022-10-17T17:14:11.715348-05:00
|
||||||
|
custom:
|
||||||
|
Author: paulbenschmidt
|
||||||
|
Issue: "5880"
|
||||||
|
PR: "324"
|
||||||
7
.changes/unreleased/Docs-20221116-155743.yaml
Normal file
7
.changes/unreleased/Docs-20221116-155743.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Fix rendering of sample code for metrics
|
||||||
|
time: 2022-11-16T15:57:43.204201+01:00
|
||||||
|
custom:
|
||||||
|
Author: jtcohen6
|
||||||
|
Issue: "323"
|
||||||
|
PR: "346"
|
||||||
8
.changes/unreleased/Features-20220408-165459.yaml
Normal file
8
.changes/unreleased/Features-20220408-165459.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Added favor-state flag to optionally favor state nodes even if unselected node
|
||||||
|
exists
|
||||||
|
time: 2022-04-08T16:54:59.696564+01:00
|
||||||
|
custom:
|
||||||
|
Author: daniel-murray josephberni
|
||||||
|
Issue: "2968"
|
||||||
|
PR: "5859"
|
||||||
7
.changes/unreleased/Features-20220817-154857.yaml
Normal file
7
.changes/unreleased/Features-20220817-154857.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Proto logging messages
|
||||||
|
time: 2022-08-17T15:48:57.225267-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "5610"
|
||||||
|
PR: "5643"
|
||||||
7
.changes/unreleased/Features-20220912-125935.yaml
Normal file
7
.changes/unreleased/Features-20220912-125935.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Friendlier error messages when packages.yml is malformed
|
||||||
|
time: 2022-09-12T12:59:35.121188+01:00
|
||||||
|
custom:
|
||||||
|
Author: jared-rimmer
|
||||||
|
Issue: "5486"
|
||||||
|
PR: "5812"
|
||||||
7
.changes/unreleased/Features-20220914-095625.yaml
Normal file
7
.changes/unreleased/Features-20220914-095625.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Migrate dbt-utils current_timestamp macros into core + adapters
|
||||||
|
time: 2022-09-14T09:56:25.97818-07:00
|
||||||
|
custom:
|
||||||
|
Author: colin-rogers-dbt
|
||||||
|
Issue: "5521"
|
||||||
|
PR: "5838"
|
||||||
7
.changes/unreleased/Features-20220925-211651.yaml
Normal file
7
.changes/unreleased/Features-20220925-211651.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Allow partitions in external tables to be supplied as a list
|
||||||
|
time: 2022-09-25T21:16:51.051239654+02:00
|
||||||
|
custom:
|
||||||
|
Author: pgoslatara
|
||||||
|
Issue: "5929"
|
||||||
|
PR: "5930"
|
||||||
7
.changes/unreleased/Features-20221003-110705.yaml
Normal file
7
.changes/unreleased/Features-20221003-110705.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: extend -f flag shorthand for seed command
|
||||||
|
time: 2022-10-03T11:07:05.381632-05:00
|
||||||
|
custom:
|
||||||
|
Author: dave-connors-3
|
||||||
|
Issue: "5990"
|
||||||
|
PR: "5991"
|
||||||
8
.changes/unreleased/Features-20221102-150003.yaml
Normal file
8
.changes/unreleased/Features-20221102-150003.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Features
|
||||||
|
body: This pulls the profile name from args when constructing a RuntimeConfig in lib.py,
|
||||||
|
enabling the dbt-server to override the value that's in the dbt_project.yml
|
||||||
|
time: 2022-11-02T15:00:03.000805-05:00
|
||||||
|
custom:
|
||||||
|
Author: racheldaniel
|
||||||
|
Issue: "6201"
|
||||||
|
PR: "6202"
|
||||||
7
.changes/unreleased/Features-20221114-185207.yaml
Normal file
7
.changes/unreleased/Features-20221114-185207.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Added an md5 function to the base context
|
||||||
|
time: 2022-11-14T18:52:07.788593+02:00
|
||||||
|
custom:
|
||||||
|
Author: haritamar
|
||||||
|
Issue: "6246"
|
||||||
|
PR: "6247"
|
||||||
7
.changes/unreleased/Features-20221130-112913.yaml
Normal file
7
.changes/unreleased/Features-20221130-112913.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Exposures support metrics in lineage
|
||||||
|
time: 2022-11-30T11:29:13.256034-05:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "6057"
|
||||||
|
PR: "6342"
|
||||||
7
.changes/unreleased/Fixes-20220916-104854.yaml
Normal file
7
.changes/unreleased/Fixes-20220916-104854.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Account for disabled flags on models in schema files more completely
|
||||||
|
time: 2022-09-16T10:48:54.162273-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "3992"
|
||||||
|
PR: "5868"
|
||||||
7
.changes/unreleased/Fixes-20221010-113218.yaml
Normal file
7
.changes/unreleased/Fixes-20221010-113218.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Add validation of enabled config for metrics, exposures and sources
|
||||||
|
time: 2022-10-10T11:32:18.752322-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "6030"
|
||||||
|
PR: "6038"
|
||||||
7
.changes/unreleased/Fixes-20221011-160715.yaml
Normal file
7
.changes/unreleased/Fixes-20221011-160715.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: check length of args of python model function before accessing it
|
||||||
|
time: 2022-10-11T16:07:15.464093-04:00
|
||||||
|
custom:
|
||||||
|
Author: chamini2
|
||||||
|
Issue: "6041"
|
||||||
|
PR: "6042"
|
||||||
8
.changes/unreleased/Fixes-20221016-173742.yaml
Normal file
8
.changes/unreleased/Fixes-20221016-173742.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Add functors to ensure event types with str-type attributes are initialized
|
||||||
|
to spec, even when provided non-str type params.
|
||||||
|
time: 2022-10-16T17:37:42.846683-07:00
|
||||||
|
custom:
|
||||||
|
Author: versusfacit
|
||||||
|
Issue: "5436"
|
||||||
|
PR: "5874"
|
||||||
7
.changes/unreleased/Fixes-20221107-095314.yaml
Normal file
7
.changes/unreleased/Fixes-20221107-095314.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Allow hooks to fail without halting execution flow
|
||||||
|
time: 2022-11-07T09:53:14.340257-06:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "5625"
|
||||||
|
PR: "6059"
|
||||||
7
.changes/unreleased/Fixes-20221115-081021.yaml
Normal file
7
.changes/unreleased/Fixes-20221115-081021.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Clarify Error Message for how many models are allowed in a Python file
|
||||||
|
time: 2022-11-15T08:10:21.527884-05:00
|
||||||
|
custom:
|
||||||
|
Author: justbldwn
|
||||||
|
Issue: "6245"
|
||||||
|
PR: "6251"
|
||||||
7
.changes/unreleased/Under the Hood-20220927-194259.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220927-194259.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Put black config in explicit config
|
||||||
|
time: 2022-09-27T19:42:59.241433-07:00
|
||||||
|
custom:
|
||||||
|
Author: max-sixty
|
||||||
|
Issue: "5946"
|
||||||
|
PR: "5947"
|
||||||
7
.changes/unreleased/Under the Hood-20220929-134406.yaml
Normal file
7
.changes/unreleased/Under the Hood-20220929-134406.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Added flat_graph attribute the Manifest class's deepcopy() coverage
|
||||||
|
time: 2022-09-29T13:44:06.275941-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "5809"
|
||||||
|
PR: "5975"
|
||||||
7
.changes/unreleased/Under the Hood-20221005-120310.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221005-120310.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Add mypy configs so `mypy` passes from CLI
|
||||||
|
time: 2022-10-05T12:03:10.061263-07:00
|
||||||
|
custom:
|
||||||
|
Author: max-sixty
|
||||||
|
Issue: "5983"
|
||||||
|
PR: "5983"
|
||||||
7
.changes/unreleased/Under the Hood-20221007-094627.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221007-094627.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Exception message cleanup.
|
||||||
|
time: 2022-10-07T09:46:27.682872-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
Issue: "6023"
|
||||||
|
PR: "6024"
|
||||||
7
.changes/unreleased/Under the Hood-20221007-140044.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221007-140044.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Add dmypy cache to gitignore
|
||||||
|
time: 2022-10-07T14:00:44.227644-07:00
|
||||||
|
custom:
|
||||||
|
Author: max-sixty
|
||||||
|
Issue: "6028"
|
||||||
|
PR: "5978"
|
||||||
7
.changes/unreleased/Under the Hood-20221013-181912.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221013-181912.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Provide useful errors when the value of 'materialized' is invalid
|
||||||
|
time: 2022-10-13T18:19:12.167548-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "5229"
|
||||||
|
PR: "6025"
|
||||||
7
.changes/unreleased/Under the Hood-20221017-151511.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221017-151511.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Fixed extra whitespace in strings introduced by black.
|
||||||
|
time: 2022-10-17T15:15:11.499246-05:00
|
||||||
|
custom:
|
||||||
|
Author: luke-bassett
|
||||||
|
Issue: "1350"
|
||||||
|
PR: "6086"
|
||||||
7
.changes/unreleased/Under the Hood-20221017-155844.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221017-155844.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Clean up string formatting
|
||||||
|
time: 2022-10-17T15:58:44.676549-04:00
|
||||||
|
custom:
|
||||||
|
Author: eve-johns
|
||||||
|
Issue: "6068"
|
||||||
|
PR: "6082"
|
||||||
7
.changes/unreleased/Under the Hood-20221028-104837.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221028-104837.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Remove the 'root_path' field from most nodes
|
||||||
|
time: 2022-10-28T10:48:37.687886-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6171"
|
||||||
|
PR: "6172"
|
||||||
7
.changes/unreleased/Under the Hood-20221028-110344.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221028-110344.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Combine certain logging events with different levels
|
||||||
|
time: 2022-10-28T11:03:44.887836-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6173"
|
||||||
|
PR: "6174"
|
||||||
7
.changes/unreleased/Under the Hood-20221108-074550.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221108-074550.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Convert threading tests to pytest
|
||||||
|
time: 2022-11-08T07:45:50.589147-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "5942"
|
||||||
|
PR: "6226"
|
||||||
7
.changes/unreleased/Under the Hood-20221108-115633.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221108-115633.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Convert postgres index tests to pytest
|
||||||
|
time: 2022-11-08T11:56:33.743042-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "5770"
|
||||||
|
PR: "6228"
|
||||||
7
.changes/unreleased/Under the Hood-20221108-133104.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221108-133104.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Convert use color tests to pytest
|
||||||
|
time: 2022-11-08T13:31:04.788547-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "5771"
|
||||||
|
PR: "6230"
|
||||||
7
.changes/unreleased/Under the Hood-20221116-130037.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221116-130037.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Add github actions workflow to generate high level CLI API docs
|
||||||
|
time: 2022-11-16T13:00:37.916202-06:00
|
||||||
|
custom:
|
||||||
|
Author: stu-k
|
||||||
|
Issue: "5942"
|
||||||
|
PR: "6187"
|
||||||
@@ -44,7 +44,7 @@ custom:
|
|||||||
footerFormat: |
|
footerFormat: |
|
||||||
{{- $contributorDict := dict }}
|
{{- $contributorDict := dict }}
|
||||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||||
{{- $core_team := list "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||||
{{- range $change := .Changes }}
|
{{- range $change := .Changes }}
|
||||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||||
{{- /* loop through all authors for a PR */}}
|
{{- /* loop through all authors for a PR */}}
|
||||||
|
|||||||
166
.github/workflows/generate-cli-api-docs.yml
vendored
Normal file
166
.github/workflows/generate-cli-api-docs.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
# **what?**
|
||||||
|
# On push, if anything in core/dbt/docs or core/dbt/cli has been
|
||||||
|
# created or modified, regenerate the CLI API docs using sphinx.
|
||||||
|
|
||||||
|
# **why?**
|
||||||
|
# We watch for changes in core/dbt/cli because the CLI API docs rely on click
|
||||||
|
# and all supporting flags/params to be generated. We watch for changes in
|
||||||
|
# core/dbt/docs since any changes to sphinx configuration or any of the
|
||||||
|
# .rst files there could result in a differently build final index.html file.
|
||||||
|
|
||||||
|
# **when?**
|
||||||
|
# Whenever a change has been pushed to a branch, and only if there is a diff
|
||||||
|
# between the PR branch and main's core/dbt/cli and or core/dbt/docs dirs.
|
||||||
|
|
||||||
|
# TODO: add bot comment to PR informing contributor that the docs have been committed
|
||||||
|
# TODO: figure out why github action triggered pushes cause github to fail to report
|
||||||
|
# the status of jobs
|
||||||
|
|
||||||
|
name: Generate CLI API docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
CLI_DIR: ${{ github.workspace }}/core/dbt/cli
|
||||||
|
DOCS_DIR: ${{ github.workspace }}/core/dbt/docs
|
||||||
|
DOCS_BUILD_DIR: ${{ github.workspace }}/core/dbt/docs/build
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check_gen:
|
||||||
|
name: check if generation needed
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
cli_dir_changed: ${{ steps.check_cli.outputs.cli_dir_changed }}
|
||||||
|
docs_dir_changed: ${{ steps.check_docs.outputs.docs_dir_changed }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "[DEBUG] print variables"
|
||||||
|
run: |
|
||||||
|
echo "env.CLI_DIR: ${{ env.CLI_DIR }}"
|
||||||
|
echo "env.DOCS_BUILD_DIR: ${{ env.DOCS_BUILD_DIR }}"
|
||||||
|
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||||
|
echo ">>>>> git log"
|
||||||
|
git log --pretty=oneline | head -5
|
||||||
|
|
||||||
|
- name: git checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
|
||||||
|
- name: set shas
|
||||||
|
id: set_shas
|
||||||
|
run: |
|
||||||
|
THIS_SHA=$(git rev-parse @)
|
||||||
|
LAST_SHA=$(git rev-parse @~1)
|
||||||
|
|
||||||
|
echo "this sha: $THIS_SHA"
|
||||||
|
echo "last sha: $LAST_SHA"
|
||||||
|
|
||||||
|
echo "this_sha=$THIS_SHA" >> $GITHUB_OUTPUT
|
||||||
|
echo "last_sha=$LAST_SHA" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: check for changes in core/dbt/cli
|
||||||
|
id: check_cli
|
||||||
|
run: |
|
||||||
|
CLI_DIR_CHANGES=$(git diff \
|
||||||
|
${{ steps.set_shas.outputs.last_sha }} \
|
||||||
|
${{ steps.set_shas.outputs.this_sha }} \
|
||||||
|
-- ${{ env.CLI_DIR }})
|
||||||
|
|
||||||
|
if [ -n "$CLI_DIR_CHANGES" ]; then
|
||||||
|
echo "changes found"
|
||||||
|
echo $CLI_DIR_CHANGES
|
||||||
|
echo "cli_dir_changed=true" >> $GITHUB_OUTPUT
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
echo "cli_dir_changed=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "no changes found"
|
||||||
|
|
||||||
|
- name: check for changes in core/dbt/docs
|
||||||
|
id: check_docs
|
||||||
|
if: steps.check_cli.outputs.cli_dir_changed == 'false'
|
||||||
|
run: |
|
||||||
|
DOCS_DIR_CHANGES=$(git diff --name-only \
|
||||||
|
${{ steps.set_shas.outputs.last_sha }} \
|
||||||
|
${{ steps.set_shas.outputs.this_sha }} \
|
||||||
|
-- ${{ env.DOCS_DIR }} ':!${{ env.DOCS_BUILD_DIR }}')
|
||||||
|
|
||||||
|
DOCS_BUILD_DIR_CHANGES=$(git diff --name-only \
|
||||||
|
${{ steps.set_shas.outputs.last_sha }} \
|
||||||
|
${{ steps.set_shas.outputs.this_sha }} \
|
||||||
|
-- ${{ env.DOCS_BUILD_DIR }})
|
||||||
|
|
||||||
|
if [ -n "$DOCS_DIR_CHANGES" ] && [ -z "$DOCS_BUILD_DIR_CHANGES" ]; then
|
||||||
|
echo "changes found"
|
||||||
|
echo $DOCS_DIR_CHANGES
|
||||||
|
echo "docs_dir_changed=true" >> $GITHUB_OUTPUT
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
echo "docs_dir_changed=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "no changes found"
|
||||||
|
|
||||||
|
gen_docs:
|
||||||
|
name: generate docs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [check_gen]
|
||||||
|
if: |
|
||||||
|
needs.check_gen.outputs.cli_dir_changed == 'true'
|
||||||
|
|| needs.check_gen.outputs.docs_dir_changed == 'true'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "[DEBUG] print variables"
|
||||||
|
run: |
|
||||||
|
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||||
|
echo "github head_ref: ${{ github.head_ref }}"
|
||||||
|
|
||||||
|
- name: git checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
|
||||||
|
- name: install python
|
||||||
|
uses: actions/setup-python@v4.3.0
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
|
||||||
|
- name: install dev requirements
|
||||||
|
run: |
|
||||||
|
python3 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt -r dev-requirements.txt
|
||||||
|
|
||||||
|
- name: generate docs
|
||||||
|
run: |
|
||||||
|
source env/bin/activate
|
||||||
|
cd ${{ env.DOCS_DIR }}
|
||||||
|
|
||||||
|
echo "cleaning existing docs"
|
||||||
|
make clean
|
||||||
|
|
||||||
|
echo "creating docs"
|
||||||
|
make html
|
||||||
|
|
||||||
|
- name: debug
|
||||||
|
run: |
|
||||||
|
echo ">>>>> status"
|
||||||
|
git status
|
||||||
|
echo ">>>>> remotes"
|
||||||
|
git remote -v
|
||||||
|
echo ">>>>> branch"
|
||||||
|
git branch -v
|
||||||
|
echo ">>>>> log"
|
||||||
|
git log --pretty=oneline | head -5
|
||||||
|
|
||||||
|
- name: commit docs
|
||||||
|
run: |
|
||||||
|
git config user.name 'Github Build Bot'
|
||||||
|
git config user.email 'buildbot@fishtownanalytics.com'
|
||||||
|
git commit -am "Add generated CLI API docs"
|
||||||
|
git push -u origin ${{ github.head_ref }}
|
||||||
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@@ -119,7 +119,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-20.04]
|
||||||
include:
|
include:
|
||||||
- python-version: 3.8
|
- python-version: 3.8
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
|||||||
2
.github/workflows/release-branch-tests.yml
vendored
2
.github/workflows/release-branch-tests.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
|||||||
max-parallel: 1
|
max-parallel: 1
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
branch: [1.0.latest, 1.1.latest, 1.2.latest, main]
|
branch: [1.0.latest, 1.1.latest, 1.2.latest, 1.3.latest, main]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||||
|
|||||||
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
@@ -9,13 +9,4 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
||||||
steps:
|
|
||||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
|
||||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
|
||||||
with:
|
|
||||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
|
||||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
|
||||||
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
|
|
||||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
|
||||||
days-before-stale: 180
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
# run the performance measurements on the current or default branch
|
# run the performance measurements on the current or default branch
|
||||||
test-schema:
|
test-schema:
|
||||||
name: Test Log Schema
|
name: Test Log Schema
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
env:
|
env:
|
||||||
# turns warnings into errors
|
# turns warnings into errors
|
||||||
RUSTFLAGS: "-D warnings"
|
RUSTFLAGS: "-D warnings"
|
||||||
@@ -46,12 +46,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "3.8"
|
python-version: "3.8"
|
||||||
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
- name: Install python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --user --upgrade pip
|
pip install --user --upgrade pip
|
||||||
@@ -69,10 +63,3 @@ jobs:
|
|||||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
run: tox -e integration -- -nauto
|
run: tox -e integration -- -nauto
|
||||||
|
|
||||||
# apply our schema tests to every log event from the previous step
|
|
||||||
# skips any output that isn't valid json
|
|
||||||
- uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: run
|
|
||||||
args: --manifest-path test/interop/log_parsing/Cargo.toml
|
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -11,6 +11,7 @@ __pycache__/
|
|||||||
env*/
|
env*/
|
||||||
dbt_env/
|
dbt_env/
|
||||||
build/
|
build/
|
||||||
|
!core/dbt/docs/build
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
downloads/
|
downloads/
|
||||||
@@ -24,7 +25,8 @@ var/
|
|||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
*.mypy_cache/
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
logs/
|
logs/
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
@@ -98,5 +100,4 @@ venv/
|
|||||||
*.code-workspace
|
*.code-workspace
|
||||||
|
|
||||||
# poetry
|
# poetry
|
||||||
pyproject.toml
|
|
||||||
poetry.lock
|
poetry.lock
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||||
|
|
||||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||||
exclude: ^test/
|
exclude: ^(test/|core/dbt/docs/build/)
|
||||||
|
|
||||||
# Force all unspecified python hooks to run python 3.8
|
# Force all unspecified python hooks to run python 3.8
|
||||||
default_language_version:
|
default_language_version:
|
||||||
@@ -24,15 +24,10 @@ repos:
|
|||||||
rev: 22.3.0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
args:
|
|
||||||
- "--line-length=99"
|
|
||||||
- "--target-version=py38"
|
|
||||||
- id: black
|
- id: black
|
||||||
alias: black-check
|
alias: black-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
args:
|
args:
|
||||||
- "--line-length=99"
|
|
||||||
- "--target-version=py38"
|
|
||||||
- "--check"
|
- "--check"
|
||||||
- "--diff"
|
- "--diff"
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
|
|||||||
155
CHANGELOG.md
155
CHANGELOG.md
@@ -6,165 +6,12 @@
|
|||||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.3.1 - November 16, 2022
|
|
||||||
### Features
|
|
||||||
- This pulls the profile name from args when constructing a RuntimeConfig in lib.py, enabling the dbt-server to override the value that's in the dbt_project.yml ([#6201](https://github.com/dbt-labs/dbt-core/issues/6201), [#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
|
||||||
### Docs
|
|
||||||
- ([dbt-docs/#5880](https://github.com/dbt-labs/dbt-docs/issues/5880), [dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
|
||||||
- Fix rendering of sample code for metrics ([dbt-docs/#323](https://github.com/dbt-labs/dbt-docs/issues/323), [dbt-docs/#346](https://github.com/dbt-labs/dbt-docs/pull/346))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@paulbenschmidt](https://github.com/paulbenschmidt) ([dbt-docs/#324](https://github.com/dbt-labs/dbt-docs/pull/324))
|
|
||||||
- [@racheldaniel](https://github.com/racheldaniel) ([#6202](https://github.com/dbt-labs/dbt-core/pull/6202))
|
|
||||||
|
|
||||||
|
|
||||||
## dbt-core 1.3.0 - October 12, 2022
|
|
||||||
|
|
||||||
### Breaking Changes
|
|
||||||
|
|
||||||
- Renaming Metric Spec Attributes ([#5774](https://github.com/dbt-labs/dbt-core/issues/5774), [#5775](https://github.com/dbt-labs/dbt-core/pull/5775))
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- Add `--defer` flag to dbt compile & dbt docs generate ([#4110](https://github.com/dbt-labs/dbt-core/issues/4110), [#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
|
||||||
- Python model inital version ([#5261](https://github.com/dbt-labs/dbt-core/issues/5261), [#5421](https://github.com/dbt-labs/dbt-core/pull/5421))
|
|
||||||
- allows user to include the file extension for .py models in the dbt run -m command. ([#5289](https://github.com/dbt-labs/dbt-core/issues/5289), [#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
|
||||||
- Incremental materialization refactor and cleanup ([#5245](https://github.com/dbt-labs/dbt-core/issues/5245), [#5359](https://github.com/dbt-labs/dbt-core/pull/5359))
|
|
||||||
- Python models can support incremental logic ([#0](https://github.com/dbt-labs/dbt-core/issues/0), [#35](https://github.com/dbt-labs/dbt-core/pull/35))
|
|
||||||
- Add reusable function for retrying adapter connections. Utilize said function to add retries for Postgres (and Redshift). ([#5022](https://github.com/dbt-labs/dbt-core/issues/5022), [#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
|
||||||
- merge_exclude_columns for incremental materialization ([#5260](https://github.com/dbt-labs/dbt-core/issues/5260), [#5457](https://github.com/dbt-labs/dbt-core/pull/5457))
|
|
||||||
- add exponential backoff to connection retries on Postgres (and Redshift) ([#5502](https://github.com/dbt-labs/dbt-core/issues/5502), [#5503](https://github.com/dbt-labs/dbt-core/pull/5503))
|
|
||||||
- use MethodName.File when value ends with .csv ([#5578](https://github.com/dbt-labs/dbt-core/issues/5578), [#5581](https://github.com/dbt-labs/dbt-core/pull/5581))
|
|
||||||
- Make `docs` configurable in `dbt_project.yml` and add a `node_color` attribute to change the color of nodes in the DAG ([#5333](https://github.com/dbt-labs/dbt-core/issues/5333), [#5397](https://github.com/dbt-labs/dbt-core/pull/5397))
|
|
||||||
- Adding ResolvedMetricReference helper functions and tests ([#5567](https://github.com/dbt-labs/dbt-core/issues/5567), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607))
|
|
||||||
- Check dbt-core version requirements when installing Hub packages ([#5648](https://github.com/dbt-labs/dbt-core/issues/5648), [#5651](https://github.com/dbt-labs/dbt-core/pull/5651))
|
|
||||||
- Search current working directory for `profiles.yml` ([#5411](https://github.com/dbt-labs/dbt-core/issues/5411), [#5717](https://github.com/dbt-labs/dbt-core/pull/5717))
|
|
||||||
- Adding the `window` parameter to the metric spec. ([#5721](https://github.com/dbt-labs/dbt-core/issues/5721), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722))
|
|
||||||
- Add invocation args dict to ProviderContext class ([#5524](https://github.com/dbt-labs/dbt-core/issues/5524), [#5782](https://github.com/dbt-labs/dbt-core/pull/5782))
|
|
||||||
- Adds new cli framework ([#5526](https://github.com/dbt-labs/dbt-core/issues/5526), [#5647](https://github.com/dbt-labs/dbt-core/pull/5647))
|
|
||||||
- Flags work with new Click CLI ([#5529](https://github.com/dbt-labs/dbt-core/issues/5529), [#5790](https://github.com/dbt-labs/dbt-core/pull/5790))
|
|
||||||
- Add metadata env method to ProviderContext class ([#5522](https://github.com/dbt-labs/dbt-core/issues/5522), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794))
|
|
||||||
- Array macros ([#5520](https://github.com/dbt-labs/dbt-core/issues/5520), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
|
||||||
- Add enabled config to exposures and metrics ([#5422](https://github.com/dbt-labs/dbt-core/issues/5422), [#5815](https://github.com/dbt-labs/dbt-core/pull/5815))
|
|
||||||
- Migrate dbt-utils current_timestamp macros into core + adapters ([#5521](https://github.com/dbt-labs/dbt-core/issues/5521), [#5838](https://github.com/dbt-labs/dbt-core/pull/5838))
|
|
||||||
- add -fr flag shorthand ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879))
|
|
||||||
- add type_boolean as a data type macro ([#5739](https://github.com/dbt-labs/dbt-core/issues/5739), [#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
|
||||||
- Support .dbtignore in project root to ignore certain files being read by dbt ([#5733](https://github.com/dbt-labs/dbt-core/issues/5733), [#5897](https://github.com/dbt-labs/dbt-core/pull/5897))
|
|
||||||
- This conditionally no-ops warehouse connection at compile depending on an env var, disabling introspection/queries during compilation only. This is a temporary solution to more complex permissions requirements for the semantic layer. ([#5936](https://github.com/dbt-labs/dbt-core/issues/5936), [#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Remove the default 256 characters limit on postgres character varying type when no limitation is set ([#5238](https://github.com/dbt-labs/dbt-core/issues/5238), [#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
|
||||||
- Include schema file config in unrendered_config ([#5338](https://github.com/dbt-labs/dbt-core/issues/5338), [#5344](https://github.com/dbt-labs/dbt-core/pull/5344))
|
|
||||||
- Add context to compilation errors generated while rendering generic test configuration values. ([#5294](https://github.com/dbt-labs/dbt-core/issues/5294), [#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
|
||||||
- Resolves #5351 - Do not consider shorter varchar cols as schema changes ([#5351](https://github.com/dbt-labs/dbt-core/issues/5351), [#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
|
||||||
- Rename try to strict for more intuitiveness ([#5475](https://github.com/dbt-labs/dbt-core/issues/5475), [#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
|
||||||
- on_shchma_change fail verbosity enhancement ([#5504](https://github.com/dbt-labs/dbt-core/issues/5504), [#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
|
||||||
- Ignore empty strings passed in as secrets ([#5312](https://github.com/dbt-labs/dbt-core/issues/5312), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518))
|
|
||||||
- Fix handling of top-level exceptions ([#5564](https://github.com/dbt-labs/dbt-core/issues/5564), [#5560](https://github.com/dbt-labs/dbt-core/pull/5560))
|
|
||||||
- Fix error rendering docs block in metrics description ([#5585](https://github.com/dbt-labs/dbt-core/issues/5585), [#5603](https://github.com/dbt-labs/dbt-core/pull/5603))
|
|
||||||
- Extended validations for the project names ([#5379](https://github.com/dbt-labs/dbt-core/issues/5379), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
|
||||||
- Use sys.exit instead of exit ([#5621](https://github.com/dbt-labs/dbt-core/issues/5621), [#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
|
||||||
- Finishing logic upgrade to Redshift for name truncation collisions. ([#5586](https://github.com/dbt-labs/dbt-core/issues/5586), [#5656](https://github.com/dbt-labs/dbt-core/pull/5656))
|
|
||||||
- multiple args for ref and source ([#5634](https://github.com/dbt-labs/dbt-core/issues/5634), [#5635](https://github.com/dbt-labs/dbt-core/pull/5635))
|
|
||||||
- Fix Unexpected behavior when chaining methods on dbt-ref'ed/sourced dataframes ([#5646](https://github.com/dbt-labs/dbt-core/issues/5646), [#5677](https://github.com/dbt-labs/dbt-core/pull/5677))
|
|
||||||
- Fix typos of comments in core/dbt/adapters/ ([#5690](https://github.com/dbt-labs/dbt-core/issues/5690), [#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
|
||||||
- Include py.typed in MANIFEST.in. This enables packages that install dbt-core from pypi to use mypy. ([#5703](https://github.com/dbt-labs/dbt-core/issues/5703), [#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
|
||||||
- Removal of all .coverage files when using make clean command ([#5633](https://github.com/dbt-labs/dbt-core/issues/5633), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
|
||||||
- Remove temp files generated by unit tests ([#5631](https://github.com/dbt-labs/dbt-core/issues/5631), [#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
|
||||||
- Fix warnings as errors during tests ([#5424](https://github.com/dbt-labs/dbt-core/issues/5424), [#5800](https://github.com/dbt-labs/dbt-core/pull/5800))
|
|
||||||
- Prevent event_history from holding references ([#5848](https://github.com/dbt-labs/dbt-core/issues/5848), [#5858](https://github.com/dbt-labs/dbt-core/pull/5858))
|
|
||||||
- Account for disabled flags on models in schema files more completely ([#3992](https://github.com/dbt-labs/dbt-core/issues/3992), [#5868](https://github.com/dbt-labs/dbt-core/pull/5868))
|
|
||||||
- ConfigSelectorMethod should check for bools ([#5890](https://github.com/dbt-labs/dbt-core/issues/5890), [#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
|
||||||
- shorthand for full refresh should be one character ([#5878](https://github.com/dbt-labs/dbt-core/issues/5878), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
|
||||||
- Fix macro resolution order during static analysis for custom generic tests ([#5720](https://github.com/dbt-labs/dbt-core/issues/5720), [#5907](https://github.com/dbt-labs/dbt-core/pull/5907))
|
|
||||||
- Fix race condition when invoking dbt via lib.py concurrently ([#5919](https://github.com/dbt-labs/dbt-core/issues/5919), [#5921](https://github.com/dbt-labs/dbt-core/pull/5921))
|
|
||||||
- check length of args of python model function before accessing it ([#6041](https://github.com/dbt-labs/dbt-core/issues/6041), [#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- Update dependency inline-source from ^6.1.5 to ^7.2.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency jest from ^26.2.2 to ^28.1.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency underscore from ^1.9.0 to ^1.13.4 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency webpack-cli from ^3.3.12 to ^4.7.0 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Update dependency webpack-dev-server from ^3.1.11 to ^4.9.3 ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#291](https://github.com/dbt-labs/dbt-docs/pull/291))
|
|
||||||
- Searches no longer require perfect matches, and instead consider each word individually. `my model` or `model my` will now find `my_model`, without the need for underscores ([dbt-docs/#143](https://github.com/dbt-labs/dbt-docs/issues/143), [dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
|
||||||
- Support the renaming of SQL to code happening in dbt-core ([dbt-docs/#299](https://github.com/dbt-labs/dbt-docs/issues/299), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
|
||||||
- Leverages `docs.node_color` from `dbt-core` to color nodes in the DAG ([dbt-docs/#44](https://github.com/dbt-labs/dbt-docs/issues/44), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- Refer to exposures by their label by default. ([dbt-docs/#306](https://github.com/dbt-labs/dbt-docs/issues/306), [dbt-docs/#307](https://github.com/dbt-labs/dbt-docs/pull/307))
|
|
||||||
|
|
||||||
### Under the Hood
|
|
||||||
|
|
||||||
- Added language to tracked fields in run_model event ([#5571](https://github.com/dbt-labs/dbt-core/issues/5571), [#5469](https://github.com/dbt-labs/dbt-core/pull/5469))
|
|
||||||
- Update mashumaro to 3.0.3 ([#4940](https://github.com/dbt-labs/dbt-core/issues/4940), [#5118](https://github.com/dbt-labs/dbt-core/pull/5118))
|
|
||||||
- Add python incremental materialization test ([#0000](https://github.com/dbt-labs/dbt-core/issues/0000), [#5571](https://github.com/dbt-labs/dbt-core/pull/5571))
|
|
||||||
- Save use of default env vars to manifest to enable partial parsing in those cases. ([#5155](https://github.com/dbt-labs/dbt-core/issues/5155), [#5589](https://github.com/dbt-labs/dbt-core/pull/5589))
|
|
||||||
- add more information to log line interop test failures ([#5658](https://github.com/dbt-labs/dbt-core/issues/5658), [#5659](https://github.com/dbt-labs/dbt-core/pull/5659))
|
|
||||||
- Add supported languages to materializations ([#5569](https://github.com/dbt-labs/dbt-core/issues/5569), [#5695](https://github.com/dbt-labs/dbt-core/pull/5695))
|
|
||||||
- Migrate integration test 014 but also fix the snapshot hard delete test's timezone logic and force all integration tests to run flags.set_from_args to force environment variables are accessible to all integration test threads. ([#5760](https://github.com/dbt-labs/dbt-core/issues/5760), [#5760](https://github.com/dbt-labs/dbt-core/pull/5760))
|
|
||||||
- Support dbt-metrics compilation by rebuilding flat_graph ([#5525](https://github.com/dbt-labs/dbt-core/issues/5525), [#5786](https://github.com/dbt-labs/dbt-core/pull/5786))
|
|
||||||
- Reworking the way we define the window attribute of metrics to match freshness tests ([#5722](https://github.com/dbt-labs/dbt-core/issues/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793))
|
|
||||||
- Add PythonJobHelper base class in core and add more type checking ([#5802](https://github.com/dbt-labs/dbt-core/issues/5802), [#5802](https://github.com/dbt-labs/dbt-core/pull/5802))
|
|
||||||
- The link did not go to the anchor directly, now it does ([#5813](https://github.com/dbt-labs/dbt-core/issues/5813), [#5814](https://github.com/dbt-labs/dbt-core/pull/5814))
|
|
||||||
- remove key as reserved keyword from test_bool_or ([#5817](https://github.com/dbt-labs/dbt-core/issues/5817), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818))
|
|
||||||
- Convert default selector tests to pytest ([#5728](https://github.com/dbt-labs/dbt-core/issues/5728), [#5820](https://github.com/dbt-labs/dbt-core/pull/5820))
|
|
||||||
- Compatibiltiy for metric attribute renaming ([#5807](https://github.com/dbt-labs/dbt-core/issues/5807), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825))
|
|
||||||
- remove source quoting setting in adapter tests ([#5836](https://github.com/dbt-labs/dbt-core/issues/5836), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
|
||||||
- Add name validation for metrics ([#5456](https://github.com/dbt-labs/dbt-core/issues/5456), [#5841](https://github.com/dbt-labs/dbt-core/pull/5841))
|
|
||||||
- Validate exposure name and add label ([#5606](https://github.com/dbt-labs/dbt-core/issues/5606), [#5844](https://github.com/dbt-labs/dbt-core/pull/5844))
|
|
||||||
- Adding validation for metric expression attribute ([#5871](https://github.com/dbt-labs/dbt-core/issues/5871), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
|
||||||
- Profiling and Adapter Management work with Click CLI ([#5531](https://github.com/dbt-labs/dbt-core/issues/5531), [#5892](https://github.com/dbt-labs/dbt-core/pull/5892))
|
|
||||||
- Reparse references to deleted metric ([#5444](https://github.com/dbt-labs/dbt-core/issues/5444), [#5920](https://github.com/dbt-labs/dbt-core/pull/5920))
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Upgrade to Jinja2==3.1.2 from Jinja2==2.11.3 ([#4748](https://github.com/dbt-labs/dbt-core/issues/4748), [#5465](https://github.com/dbt-labs/dbt-core/pull/5465))
|
|
||||||
- Bump mypy from 0.961 to 0.971 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5495](https://github.com/dbt-labs/dbt-core/pull/5495))
|
|
||||||
- Remove pin for MarkUpSafe from >=0.23,<2.1 ([#5506](https://github.com/dbt-labs/dbt-core/issues/5506), [#5507](https://github.com/dbt-labs/dbt-core/pull/5507))
|
|
||||||
|
|
||||||
### Dependency
|
|
||||||
|
|
||||||
- Bump python from 3.10.5-slim-bullseye to 3.10.6-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5623](https://github.com/dbt-labs/dbt-core/pull/5623))
|
|
||||||
- Bump mashumaro[msgpack] from 3.0.3 to 3.0.4 in /core ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5649](https://github.com/dbt-labs/dbt-core/pull/5649))
|
|
||||||
- Bump black from 22.6.0 to 22.8.0 ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5750](https://github.com/dbt-labs/dbt-core/pull/5750))
|
|
||||||
- Bump python from 3.10.6-slim-bullseye to 3.10.7-slim-bullseye in /docker ([#4904](https://github.com/dbt-labs/dbt-core/issues/4904), [#5805](https://github.com/dbt-labs/dbt-core/pull/5805))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
- [@Goodkat](https://github.com/Goodkat) ([#5581](https://github.com/dbt-labs/dbt-core/pull/5581), [#5518](https://github.com/dbt-labs/dbt-core/pull/5518), [#5620](https://github.com/dbt-labs/dbt-core/pull/5620))
|
|
||||||
- [@Ilanbenb](https://github.com/Ilanbenb) ([#5505](https://github.com/dbt-labs/dbt-core/pull/5505))
|
|
||||||
- [@b-per](https://github.com/b-per) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- [@bbroeksema](https://github.com/bbroeksema) ([#5749](https://github.com/dbt-labs/dbt-core/pull/5749))
|
|
||||||
- [@callum-mcdata](https://github.com/callum-mcdata) ([#5775](https://github.com/dbt-labs/dbt-core/pull/5775), [#5607](https://github.com/dbt-labs/dbt-core/pull/5607), [#5722](https://github.com/dbt-labs/dbt-core/pull/5722), [#5793](https://github.com/dbt-labs/dbt-core/pull/5793), [#5825](https://github.com/dbt-labs/dbt-core/pull/5825), [#5873](https://github.com/dbt-labs/dbt-core/pull/5873))
|
|
||||||
- [@danielcmessias](https://github.com/danielcmessias) ([#5889](https://github.com/dbt-labs/dbt-core/pull/5889))
|
|
||||||
- [@dave-connors-3](https://github.com/dave-connors-3) ([#5457](https://github.com/dbt-labs/dbt-core/pull/5457), [#5879](https://github.com/dbt-labs/dbt-core/pull/5879), [#5908](https://github.com/dbt-labs/dbt-core/pull/5908))
|
|
||||||
- [@dbeatty10](https://github.com/dbeatty10) ([#5717](https://github.com/dbt-labs/dbt-core/pull/5717), [#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
|
||||||
- [@drewbanin](https://github.com/drewbanin) ([#5921](https://github.com/dbt-labs/dbt-core/pull/5921), [dbt-docs/#292](https://github.com/dbt-labs/dbt-docs/pull/292))
|
|
||||||
- [@epapineau](https://github.com/epapineau) ([#5395](https://github.com/dbt-labs/dbt-core/pull/5395))
|
|
||||||
- [@graciegoheen](https://github.com/graciegoheen) ([#5823](https://github.com/dbt-labs/dbt-core/pull/5823))
|
|
||||||
- [@jared-rimmer](https://github.com/jared-rimmer) ([#5782](https://github.com/dbt-labs/dbt-core/pull/5782), [#5794](https://github.com/dbt-labs/dbt-core/pull/5794), [#5759](https://github.com/dbt-labs/dbt-core/pull/5759))
|
|
||||||
- [@jeremyyeo](https://github.com/jeremyyeo) ([#5477](https://github.com/dbt-labs/dbt-core/pull/5477))
|
|
||||||
- [@joellabes](https://github.com/joellabes) ([dbt-docs/#145](https://github.com/dbt-labs/dbt-docs/pull/145))
|
|
||||||
- [@jpmmcneill](https://github.com/jpmmcneill) ([#5875](https://github.com/dbt-labs/dbt-core/pull/5875))
|
|
||||||
- [@kadero](https://github.com/kadero) ([#4514](https://github.com/dbt-labs/dbt-core/pull/4514))
|
|
||||||
- [@leoebfolsom](https://github.com/leoebfolsom) ([#5295](https://github.com/dbt-labs/dbt-core/pull/5295))
|
|
||||||
- [@matt-winkler](https://github.com/matt-winkler) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- [@nicholasyager](https://github.com/nicholasyager) ([#5393](https://github.com/dbt-labs/dbt-core/pull/5393))
|
|
||||||
- [@panasenco](https://github.com/panasenco) ([#5703](https://github.com/dbt-labs/dbt-core/pull/5703))
|
|
||||||
- [@racheldaniel](https://github.com/racheldaniel) ([#5926](https://github.com/dbt-labs/dbt-core/pull/5926))
|
|
||||||
- [@sdebruyn](https://github.com/sdebruyn) ([#5814](https://github.com/dbt-labs/dbt-core/pull/5814), [#5818](https://github.com/dbt-labs/dbt-core/pull/5818), [#5839](https://github.com/dbt-labs/dbt-core/pull/5839))
|
|
||||||
- [@shrodingers](https://github.com/shrodingers) ([#5292](https://github.com/dbt-labs/dbt-core/pull/5292))
|
|
||||||
- [@sungchun12](https://github.com/sungchun12) ([#5397](https://github.com/dbt-labs/dbt-core/pull/5397), [dbt-docs/#281](https://github.com/dbt-labs/dbt-docs/pull/281))
|
|
||||||
- [@tomasfarias](https://github.com/tomasfarias) ([#5432](https://github.com/dbt-labs/dbt-core/pull/5432))
|
|
||||||
- [@varun-dc](https://github.com/varun-dc) ([#5627](https://github.com/dbt-labs/dbt-core/pull/5627))
|
|
||||||
- [@yoiki](https://github.com/yoiki) ([#5693](https://github.com/dbt-labs/dbt-core/pull/5693))
|
|
||||||
- [@chamini2](https://github.com/chamini2) ([#6042](https://github.com/dbt-labs/dbt-core/pull/6042))
|
|
||||||
|
|
||||||
|
|
||||||
## Previous Releases
|
## Previous Releases
|
||||||
|
|
||||||
For information on prior major and minor releases, see their changelogs:
|
For information on prior major and minor releases, see their changelogs:
|
||||||
|
|
||||||
|
|
||||||
|
* [1.3](https://github.com/dbt-labs/dbt-core/blob/1.3.latest/CHANGELOG.md)
|
||||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import abc
|
|||||||
import os
|
import os
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import sys
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
# multiprocessing.RLock is a function returning this type
|
# multiprocessing.RLock is a function returning this type
|
||||||
from multiprocessing.synchronize import RLock
|
from multiprocessing.synchronize import RLock
|
||||||
@@ -40,14 +41,15 @@ from dbt.events.functions import fire_event
|
|||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
NewConnection,
|
NewConnection,
|
||||||
ConnectionReused,
|
ConnectionReused,
|
||||||
|
ConnectionLeftOpenInCleanup,
|
||||||
ConnectionLeftOpen,
|
ConnectionLeftOpen,
|
||||||
ConnectionLeftOpen2,
|
ConnectionClosedInCleanup,
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
ConnectionClosed2,
|
|
||||||
Rollback,
|
Rollback,
|
||||||
RollbackFailed,
|
RollbackFailed,
|
||||||
)
|
)
|
||||||
from dbt import flags
|
from dbt import flags
|
||||||
|
from dbt.utils import cast_to_str
|
||||||
|
|
||||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||||
@@ -304,9 +306,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
with self.lock:
|
with self.lock:
|
||||||
for connection in self.thread_connections.values():
|
for connection in self.thread_connections.values():
|
||||||
if connection.state not in {"closed", "init"}:
|
if connection.state not in {"closed", "init"}:
|
||||||
fire_event(ConnectionLeftOpen(conn_name=connection.name))
|
fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name)))
|
||||||
else:
|
else:
|
||||||
fire_event(ConnectionClosed(conn_name=connection.name))
|
fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name)))
|
||||||
self.close(connection)
|
self.close(connection)
|
||||||
|
|
||||||
# garbage collect these connections
|
# garbage collect these connections
|
||||||
@@ -332,17 +334,21 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
try:
|
try:
|
||||||
connection.handle.rollback()
|
connection.handle.rollback()
|
||||||
except Exception:
|
except Exception:
|
||||||
fire_event(RollbackFailed(conn_name=connection.name))
|
fire_event(
|
||||||
|
RollbackFailed(
|
||||||
|
conn_name=cast_to_str(connection.name), exc_info=traceback.format_exc()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _close_handle(cls, connection: Connection) -> None:
|
def _close_handle(cls, connection: Connection) -> None:
|
||||||
"""Perform the actual close operation."""
|
"""Perform the actual close operation."""
|
||||||
# On windows, sometimes connection handles don't have a close() attr.
|
# On windows, sometimes connection handles don't have a close() attr.
|
||||||
if hasattr(connection.handle, "close"):
|
if hasattr(connection.handle, "close"):
|
||||||
fire_event(ConnectionClosed2(conn_name=connection.name))
|
fire_event(ConnectionClosed(conn_name=cast_to_str(connection.name)))
|
||||||
connection.handle.close()
|
connection.handle.close()
|
||||||
else:
|
else:
|
||||||
fire_event(ConnectionLeftOpen2(conn_name=connection.name))
|
fire_event(ConnectionLeftOpen(conn_name=cast_to_str(connection.name)))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _rollback(cls, connection: Connection) -> None:
|
def _rollback(cls, connection: Connection) -> None:
|
||||||
@@ -353,7 +359,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
f'"{connection.name}", but it does not have one open!'
|
f'"{connection.name}", but it does not have one open!'
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(Rollback(conn_name=connection.name))
|
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
|
||||||
cls._rollback_handle(connection)
|
cls._rollback_handle(connection)
|
||||||
|
|
||||||
connection.transaction_open = False
|
connection.transaction_open = False
|
||||||
@@ -365,7 +371,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
|||||||
return connection
|
return connection
|
||||||
|
|
||||||
if connection.transaction_open and connection.handle:
|
if connection.transaction_open and connection.handle:
|
||||||
fire_event(Rollback(conn_name=connection.name))
|
fire_event(Rollback(conn_name=cast_to_str(connection.name)))
|
||||||
cls._rollback_handle(connection)
|
cls._rollback_handle(connection)
|
||||||
connection.transaction_open = False
|
connection.transaction_open = False
|
||||||
|
|
||||||
|
|||||||
@@ -41,15 +41,15 @@ from dbt.clients.jinja import MacroGenerator
|
|||||||
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
||||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||||
from dbt.exceptions import warn_or_error
|
from dbt.events.functions import fire_event, warn_or_error
|
||||||
from dbt.events.functions import fire_event
|
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
CacheMiss,
|
CacheMiss,
|
||||||
ListRelations,
|
ListRelations,
|
||||||
CodeExecution,
|
CodeExecution,
|
||||||
CodeExecutionStatus,
|
CodeExecutionStatus,
|
||||||
|
CatalogGenerationError,
|
||||||
)
|
)
|
||||||
from dbt.utils import filter_null_values, executor
|
from dbt.utils import filter_null_values, executor, cast_to_str
|
||||||
|
|
||||||
from dbt.adapters.base.connections import Connection, AdapterResponse
|
from dbt.adapters.base.connections import Connection, AdapterResponse
|
||||||
from dbt.adapters.base.meta import AdapterMeta, available
|
from dbt.adapters.base.meta import AdapterMeta, available
|
||||||
@@ -61,7 +61,7 @@ from dbt.adapters.base.relation import (
|
|||||||
)
|
)
|
||||||
from dbt.adapters.base import Column as BaseColumn
|
from dbt.adapters.base import Column as BaseColumn
|
||||||
from dbt.adapters.base import Credentials
|
from dbt.adapters.base import Credentials
|
||||||
from dbt.adapters.cache import RelationsCache, _make_key
|
from dbt.adapters.cache import RelationsCache, _make_ref_key_msg
|
||||||
|
|
||||||
|
|
||||||
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
|
SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
|
||||||
@@ -343,7 +343,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
fire_event(
|
fire_event(
|
||||||
CacheMiss(
|
CacheMiss(
|
||||||
conn_name=self.nice_connection_name(),
|
conn_name=self.nice_connection_name(),
|
||||||
database=database,
|
database=cast_to_str(database),
|
||||||
schema=schema,
|
schema=schema,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -581,7 +581,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
:rtype: List[self.Relation]
|
:rtype: List[self.Relation]
|
||||||
"""
|
"""
|
||||||
raise NotImplementedException(
|
raise NotImplementedException(
|
||||||
"`list_relations_without_caching` is not implemented for this " "adapter!"
|
"`list_relations_without_caching` is not implemented for this adapter!"
|
||||||
)
|
)
|
||||||
|
|
||||||
###
|
###
|
||||||
@@ -726,9 +726,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|||||||
relations = self.list_relations_without_caching(schema_relation)
|
relations = self.list_relations_without_caching(schema_relation)
|
||||||
fire_event(
|
fire_event(
|
||||||
ListRelations(
|
ListRelations(
|
||||||
database=database,
|
database=cast_to_str(database),
|
||||||
schema=schema,
|
schema=schema,
|
||||||
relations=[_make_key(x) for x in relations],
|
relations=[_make_ref_key_msg(x) for x in relations],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1327,7 +1327,7 @@ def catch_as_completed(
|
|||||||
elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
|
elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
|
||||||
raise exc
|
raise exc
|
||||||
else:
|
else:
|
||||||
warn_or_error(f"Encountered an error while generating catalog: {str(exc)}")
|
warn_or_error(CatalogGenerationError(exc=str(exc)))
|
||||||
# exc is not None, derives from Exception, and isn't ctrl+c
|
# exc is not None, derives from Exception, and isn't ctrl+c
|
||||||
exceptions.append(exc)
|
exceptions.append(exc)
|
||||||
return merge_tables(tables), exceptions
|
return merge_tables(tables), exceptions
|
||||||
|
|||||||
@@ -3,9 +3,14 @@ import threading
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||||
|
|
||||||
from dbt.adapters.reference_keys import _make_key, _ReferenceKey
|
from dbt.adapters.reference_keys import (
|
||||||
|
_make_ref_key,
|
||||||
|
_make_ref_key_msg,
|
||||||
|
_make_msg_from_ref_key,
|
||||||
|
_ReferenceKey,
|
||||||
|
)
|
||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event, fire_event_if
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
AddLink,
|
AddLink,
|
||||||
AddRelation,
|
AddRelation,
|
||||||
@@ -21,8 +26,8 @@ from dbt.events.types import (
|
|||||||
UncachedRelation,
|
UncachedRelation,
|
||||||
UpdateReference,
|
UpdateReference,
|
||||||
)
|
)
|
||||||
|
import dbt.flags as flags
|
||||||
from dbt.utils import lowercase
|
from dbt.utils import lowercase
|
||||||
from dbt.helper_types import Lazy
|
|
||||||
|
|
||||||
|
|
||||||
def dot_separated(key: _ReferenceKey) -> str:
|
def dot_separated(key: _ReferenceKey) -> str:
|
||||||
@@ -82,7 +87,7 @@ class _CachedRelation:
|
|||||||
|
|
||||||
:return _ReferenceKey: A key for this relation.
|
:return _ReferenceKey: A key for this relation.
|
||||||
"""
|
"""
|
||||||
return _make_key(self)
|
return _make_ref_key(self)
|
||||||
|
|
||||||
def add_reference(self, referrer: "_CachedRelation"):
|
def add_reference(self, referrer: "_CachedRelation"):
|
||||||
"""Add a reference from referrer to self, indicating that if this node
|
"""Add a reference from referrer to self, indicating that if this node
|
||||||
@@ -294,13 +299,18 @@ class RelationsCache:
|
|||||||
:param BaseRelation dependent: The dependent model.
|
:param BaseRelation dependent: The dependent model.
|
||||||
:raises InternalError: If either entry does not exist.
|
:raises InternalError: If either entry does not exist.
|
||||||
"""
|
"""
|
||||||
ref_key = _make_key(referenced)
|
ref_key = _make_ref_key(referenced)
|
||||||
dep_key = _make_key(dependent)
|
dep_key = _make_ref_key(dependent)
|
||||||
if (ref_key.database, ref_key.schema) not in self:
|
if (ref_key.database, ref_key.schema) not in self:
|
||||||
# if we have not cached the referenced schema at all, we must be
|
# if we have not cached the referenced schema at all, we must be
|
||||||
# referring to a table outside our control. There's no need to make
|
# referring to a table outside our control. There's no need to make
|
||||||
# a link - we will never drop the referenced relation during a run.
|
# a link - we will never drop the referenced relation during a run.
|
||||||
fire_event(UncachedRelation(dep_key=dep_key, ref_key=ref_key))
|
fire_event(
|
||||||
|
UncachedRelation(
|
||||||
|
dep_key=_make_msg_from_ref_key(dep_key),
|
||||||
|
ref_key=_make_msg_from_ref_key(ref_key),
|
||||||
|
)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
if ref_key not in self.relations:
|
if ref_key not in self.relations:
|
||||||
# Insert a dummy "external" relation.
|
# Insert a dummy "external" relation.
|
||||||
@@ -310,7 +320,11 @@ class RelationsCache:
|
|||||||
# Insert a dummy "external" relation.
|
# Insert a dummy "external" relation.
|
||||||
dependent = dependent.replace(type=referenced.External)
|
dependent = dependent.replace(type=referenced.External)
|
||||||
self.add(dependent)
|
self.add(dependent)
|
||||||
fire_event(AddLink(dep_key=dep_key, ref_key=ref_key))
|
fire_event(
|
||||||
|
AddLink(
|
||||||
|
dep_key=_make_msg_from_ref_key(dep_key), ref_key=_make_msg_from_ref_key(ref_key)
|
||||||
|
)
|
||||||
|
)
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self._add_link(ref_key, dep_key)
|
self._add_link(ref_key, dep_key)
|
||||||
|
|
||||||
@@ -321,12 +335,12 @@ class RelationsCache:
|
|||||||
:param BaseRelation relation: The underlying relation.
|
:param BaseRelation relation: The underlying relation.
|
||||||
"""
|
"""
|
||||||
cached = _CachedRelation(relation)
|
cached = _CachedRelation(relation)
|
||||||
fire_event(AddRelation(relation=_make_key(cached)))
|
fire_event(AddRelation(relation=_make_ref_key_msg(cached)))
|
||||||
fire_event(DumpBeforeAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpBeforeAddGraph(dump=self.dump_graph()))
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self._setdefault(cached)
|
self._setdefault(cached)
|
||||||
fire_event(DumpAfterAddGraph(dump=Lazy.defer(lambda: self.dump_graph())))
|
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpAfterAddGraph(dump=self.dump_graph()))
|
||||||
|
|
||||||
def _remove_refs(self, keys):
|
def _remove_refs(self, keys):
|
||||||
"""Removes all references to all entries in keys. This does not
|
"""Removes all references to all entries in keys. This does not
|
||||||
@@ -341,19 +355,6 @@ class RelationsCache:
|
|||||||
for cached in self.relations.values():
|
for cached in self.relations.values():
|
||||||
cached.release_references(keys)
|
cached.release_references(keys)
|
||||||
|
|
||||||
def _drop_cascade_relation(self, dropped_key):
|
|
||||||
"""Drop the given relation and cascade it appropriately to all
|
|
||||||
dependent relations.
|
|
||||||
|
|
||||||
:param _CachedRelation dropped: An existing _CachedRelation to drop.
|
|
||||||
"""
|
|
||||||
if dropped_key not in self.relations:
|
|
||||||
fire_event(DropMissingRelation(relation=dropped_key))
|
|
||||||
return
|
|
||||||
consequences = self.relations[dropped_key].collect_consequences()
|
|
||||||
fire_event(DropCascade(dropped=dropped_key, consequences=consequences))
|
|
||||||
self._remove_refs(consequences)
|
|
||||||
|
|
||||||
def drop(self, relation):
|
def drop(self, relation):
|
||||||
"""Drop the named relation and cascade it appropriately to all
|
"""Drop the named relation and cascade it appropriately to all
|
||||||
dependent relations.
|
dependent relations.
|
||||||
@@ -365,10 +366,19 @@ class RelationsCache:
|
|||||||
:param str schema: The schema of the relation to drop.
|
:param str schema: The schema of the relation to drop.
|
||||||
:param str identifier: The identifier of the relation to drop.
|
:param str identifier: The identifier of the relation to drop.
|
||||||
"""
|
"""
|
||||||
dropped_key = _make_key(relation)
|
dropped_key = _make_ref_key(relation)
|
||||||
fire_event(DropRelation(dropped=dropped_key))
|
dropped_key_msg = _make_ref_key_msg(relation)
|
||||||
|
fire_event(DropRelation(dropped=dropped_key_msg))
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self._drop_cascade_relation(dropped_key)
|
if dropped_key not in self.relations:
|
||||||
|
fire_event(DropMissingRelation(relation=dropped_key_msg))
|
||||||
|
return
|
||||||
|
consequences = self.relations[dropped_key].collect_consequences()
|
||||||
|
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
||||||
|
consequence_msgs = [_make_msg_from_ref_key(key) for key in consequences]
|
||||||
|
|
||||||
|
fire_event(DropCascade(dropped=dropped_key_msg, consequences=consequence_msgs))
|
||||||
|
self._remove_refs(consequences)
|
||||||
|
|
||||||
def _rename_relation(self, old_key, new_relation):
|
def _rename_relation(self, old_key, new_relation):
|
||||||
"""Rename a relation named old_key to new_key, updating references.
|
"""Rename a relation named old_key to new_key, updating references.
|
||||||
@@ -390,7 +400,11 @@ class RelationsCache:
|
|||||||
for cached in self.relations.values():
|
for cached in self.relations.values():
|
||||||
if cached.is_referenced_by(old_key):
|
if cached.is_referenced_by(old_key):
|
||||||
fire_event(
|
fire_event(
|
||||||
UpdateReference(old_key=old_key, new_key=new_key, cached_key=cached.key())
|
UpdateReference(
|
||||||
|
old_key=_make_ref_key_msg(old_key),
|
||||||
|
new_key=_make_ref_key_msg(new_key),
|
||||||
|
cached_key=_make_ref_key_msg(cached.key()),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
cached.rename_key(old_key, new_key)
|
cached.rename_key(old_key, new_key)
|
||||||
|
|
||||||
@@ -436,7 +450,7 @@ class RelationsCache:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if old_key not in self.relations:
|
if old_key not in self.relations:
|
||||||
fire_event(TemporaryRelation(key=old_key))
|
fire_event(TemporaryRelation(key=_make_msg_from_ref_key(old_key)))
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -452,11 +466,17 @@ class RelationsCache:
|
|||||||
:param BaseRelation new: The new relation name information.
|
:param BaseRelation new: The new relation name information.
|
||||||
:raises InternalError: If the new key is already present.
|
:raises InternalError: If the new key is already present.
|
||||||
"""
|
"""
|
||||||
old_key = _make_key(old)
|
old_key = _make_ref_key(old)
|
||||||
new_key = _make_key(new)
|
new_key = _make_ref_key(new)
|
||||||
fire_event(RenameSchema(old_key=old_key, new_key=new_key))
|
fire_event(
|
||||||
|
RenameSchema(
|
||||||
|
old_key=_make_msg_from_ref_key(old_key), new_key=_make_msg_from_ref_key(new)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
fire_event(DumpBeforeRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
fire_event_if(
|
||||||
|
flags.LOG_CACHE_EVENTS, lambda: DumpBeforeRenameSchema(dump=self.dump_graph())
|
||||||
|
)
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if self._check_rename_constraints(old_key, new_key):
|
if self._check_rename_constraints(old_key, new_key):
|
||||||
@@ -464,7 +484,9 @@ class RelationsCache:
|
|||||||
else:
|
else:
|
||||||
self._setdefault(_CachedRelation(new))
|
self._setdefault(_CachedRelation(new))
|
||||||
|
|
||||||
fire_event(DumpAfterRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
|
fire_event_if(
|
||||||
|
flags.LOG_CACHE_EVENTS, lambda: DumpAfterRenameSchema(dump=self.dump_graph())
|
||||||
|
)
|
||||||
|
|
||||||
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
||||||
"""Case-insensitively yield all relations matching the given schema.
|
"""Case-insensitively yield all relations matching the given schema.
|
||||||
@@ -512,6 +534,6 @@ class RelationsCache:
|
|||||||
"""
|
"""
|
||||||
for relation in to_remove:
|
for relation in to_remove:
|
||||||
# it may have been cascaded out already
|
# it may have been cascaded out already
|
||||||
drop_key = _make_key(relation)
|
drop_key = _make_ref_key(relation)
|
||||||
if drop_key in self.relations:
|
if drop_key in self.relations:
|
||||||
self.drop(drop_key)
|
self.drop(drop_key)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import threading
|
import threading
|
||||||
|
import traceback
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -63,7 +64,7 @@ class AdapterContainer:
|
|||||||
# otherwise, the error had to have come from some underlying
|
# otherwise, the error had to have come from some underlying
|
||||||
# library. Log the stack trace.
|
# library. Log the stack trace.
|
||||||
|
|
||||||
fire_event(PluginLoadError())
|
fire_event(PluginLoadError(exc_info=traceback.format_exc()))
|
||||||
raise
|
raise
|
||||||
plugin: AdapterPlugin = mod.Plugin
|
plugin: AdapterPlugin = mod.Plugin
|
||||||
plugin_type = plugin.adapter.type()
|
plugin_type = plugin.adapter.type()
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
from dbt.events.proto_types import ReferenceKeyMsg
|
||||||
|
|
||||||
|
|
||||||
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
|
||||||
@@ -14,7 +15,12 @@ def lowercase(value: Optional[str]) -> Optional[str]:
|
|||||||
return value.lower()
|
return value.lower()
|
||||||
|
|
||||||
|
|
||||||
|
# For backwards compatibility. New code should use _make_ref_key
|
||||||
def _make_key(relation: Any) -> _ReferenceKey:
|
def _make_key(relation: Any) -> _ReferenceKey:
|
||||||
|
return _make_ref_key(relation)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_ref_key(relation: Any) -> _ReferenceKey:
|
||||||
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
|
||||||
to keep track of quoting
|
to keep track of quoting
|
||||||
"""
|
"""
|
||||||
@@ -22,3 +28,13 @@ def _make_key(relation: Any) -> _ReferenceKey:
|
|||||||
return _ReferenceKey(
|
return _ReferenceKey(
|
||||||
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_ref_key_msg(relation: Any):
|
||||||
|
return _make_msg_from_ref_key(_make_ref_key(relation))
|
||||||
|
|
||||||
|
|
||||||
|
def _make_msg_from_ref_key(ref_key: _ReferenceKey) -> ReferenceKeyMsg:
|
||||||
|
return ReferenceKeyMsg(
|
||||||
|
database=ref_key.database, schema=ref_key.schema, identifier=ref_key.identifier
|
||||||
|
)
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from dbt.adapters.base import BaseConnectionManager
|
|||||||
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
|
||||||
|
from dbt.utils import cast_to_str
|
||||||
|
|
||||||
|
|
||||||
class SQLConnectionManager(BaseConnectionManager):
|
class SQLConnectionManager(BaseConnectionManager):
|
||||||
@@ -55,7 +56,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
connection = self.get_thread_connection()
|
connection = self.get_thread_connection()
|
||||||
if auto_begin and connection.transaction_open is False:
|
if auto_begin and connection.transaction_open is False:
|
||||||
self.begin()
|
self.begin()
|
||||||
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=connection.name))
|
fire_event(ConnectionUsed(conn_type=self.TYPE, conn_name=cast_to_str(connection.name)))
|
||||||
|
|
||||||
with self.exception_handler(sql):
|
with self.exception_handler(sql):
|
||||||
if abridge_sql_log:
|
if abridge_sql_log:
|
||||||
@@ -63,7 +64,7 @@ class SQLConnectionManager(BaseConnectionManager):
|
|||||||
else:
|
else:
|
||||||
log_sql = sql
|
log_sql = sql
|
||||||
|
|
||||||
fire_event(SQLQuery(conn_name=connection.name, sql=log_sql))
|
fire_event(SQLQuery(conn_name=cast_to_str(connection.name), sql=log_sql))
|
||||||
pre = time.time()
|
pre = time.time()
|
||||||
|
|
||||||
cursor = connection.handle.cursor()
|
cursor = connection.handle.cursor()
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import dbt.clients.agate_helper
|
|||||||
from dbt.contracts.connection import Connection
|
from dbt.contracts.connection import Connection
|
||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
from dbt.adapters.base import BaseAdapter, available
|
from dbt.adapters.base import BaseAdapter, available
|
||||||
from dbt.adapters.cache import _make_key
|
from dbt.adapters.cache import _make_ref_key_msg
|
||||||
from dbt.adapters.sql import SQLConnectionManager
|
from dbt.adapters.sql import SQLConnectionManager
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop
|
||||||
@@ -110,7 +110,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
ColTypeChange(
|
ColTypeChange(
|
||||||
orig_type=target_column.data_type,
|
orig_type=target_column.data_type,
|
||||||
new_type=new_type,
|
new_type=new_type,
|
||||||
table=_make_key(current),
|
table=_make_ref_key_msg(current),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -155,7 +155,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
|
|
||||||
def create_schema(self, relation: BaseRelation) -> None:
|
def create_schema(self, relation: BaseRelation) -> None:
|
||||||
relation = relation.without_identifier()
|
relation = relation.without_identifier()
|
||||||
fire_event(SchemaCreation(relation=_make_key(relation)))
|
fire_event(SchemaCreation(relation=_make_ref_key_msg(relation)))
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"relation": relation,
|
"relation": relation,
|
||||||
}
|
}
|
||||||
@@ -166,7 +166,7 @@ class SQLAdapter(BaseAdapter):
|
|||||||
|
|
||||||
def drop_schema(self, relation: BaseRelation) -> None:
|
def drop_schema(self, relation: BaseRelation) -> None:
|
||||||
relation = relation.without_identifier()
|
relation = relation.without_identifier()
|
||||||
fire_event(SchemaDrop(relation=_make_key(relation)))
|
fire_event(SchemaDrop(relation=_make_ref_key_msg(relation)))
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"relation": relation,
|
"relation": relation,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -367,9 +367,9 @@ class BlockIterator:
|
|||||||
if self.current:
|
if self.current:
|
||||||
linecount = self.data[: self.current.end].count("\n") + 1
|
linecount = self.data[: self.current.end].count("\n") + 1
|
||||||
dbt.exceptions.raise_compiler_error(
|
dbt.exceptions.raise_compiler_error(
|
||||||
(
|
("Reached EOF without finding a close tag for {} (searched from line {})").format(
|
||||||
"Reached EOF without finding a close tag for " "{} (searched from line {})"
|
self.current.block_type_name, linecount
|
||||||
).format(self.current.block_type_name, linecount)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if collect_raw_data:
|
if collect_raw_data:
|
||||||
|
|||||||
@@ -3,9 +3,9 @@ from typing import Any, Dict, List
|
|||||||
import requests
|
import requests
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import (
|
from dbt.events.types import (
|
||||||
RegistryProgressMakingGETRequest,
|
RegistryProgressGETRequest,
|
||||||
RegistryProgressGETResponse,
|
RegistryProgressGETResponse,
|
||||||
RegistryIndexProgressMakingGETRequest,
|
RegistryIndexProgressGETRequest,
|
||||||
RegistryIndexProgressGETResponse,
|
RegistryIndexProgressGETResponse,
|
||||||
RegistryResponseUnexpectedType,
|
RegistryResponseUnexpectedType,
|
||||||
RegistryResponseMissingTopKeys,
|
RegistryResponseMissingTopKeys,
|
||||||
@@ -38,7 +38,7 @@ def _get_with_retries(package_name, registry_base_url=None):
|
|||||||
|
|
||||||
def _get(package_name, registry_base_url=None):
|
def _get(package_name, registry_base_url=None):
|
||||||
url = _get_url(package_name, registry_base_url)
|
url = _get_url(package_name, registry_base_url)
|
||||||
fire_event(RegistryProgressMakingGETRequest(url=url))
|
fire_event(RegistryProgressGETRequest(url=url))
|
||||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||||
resp = requests.get(url, timeout=30)
|
resp = requests.get(url, timeout=30)
|
||||||
fire_event(RegistryProgressGETResponse(url=url, resp_code=resp.status_code))
|
fire_event(RegistryProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||||
@@ -162,7 +162,7 @@ def get_compatible_versions(package_name, dbt_version, should_version_check) ->
|
|||||||
def _get_index(registry_base_url=None):
|
def _get_index(registry_base_url=None):
|
||||||
|
|
||||||
url = _get_url("index", registry_base_url)
|
url = _get_url("index", registry_base_url)
|
||||||
fire_event(RegistryIndexProgressMakingGETRequest(url=url))
|
fire_event(RegistryIndexProgressGETRequest(url=url))
|
||||||
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
# all exceptions from requests get caught in the retry logic so no need to wrap this here
|
||||||
resp = requests.get(url, timeout=30)
|
resp = requests.get(url, timeout=30)
|
||||||
fire_event(RegistryIndexProgressGETResponse(url=url, resp_code=resp.status_code))
|
fire_event(RegistryIndexProgressGETResponse(url=url, resp_code=resp.status_code))
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ def print_compile_stats(stats):
|
|||||||
NodeType.Source: "source",
|
NodeType.Source: "source",
|
||||||
NodeType.Exposure: "exposure",
|
NodeType.Exposure: "exposure",
|
||||||
NodeType.Metric: "metric",
|
NodeType.Metric: "metric",
|
||||||
|
NodeType.Entity: "entity",
|
||||||
}
|
}
|
||||||
|
|
||||||
results = {k: 0 for k in names.keys()}
|
results = {k: 0 for k in names.keys()}
|
||||||
@@ -91,6 +92,8 @@ def _generate_stats(manifest: Manifest):
|
|||||||
stats[exposure.resource_type] += 1
|
stats[exposure.resource_type] += 1
|
||||||
for metric in manifest.metrics.values():
|
for metric in manifest.metrics.values():
|
||||||
stats[metric.resource_type] += 1
|
stats[metric.resource_type] += 1
|
||||||
|
for entity in manifest.entities.values():
|
||||||
|
stats[entity.resource_type] += 1
|
||||||
for macro in manifest.macros.values():
|
for macro in manifest.macros.values():
|
||||||
stats[macro.resource_type] += 1
|
stats[macro.resource_type] += 1
|
||||||
return stats
|
return stats
|
||||||
|
|||||||
@@ -248,7 +248,7 @@ class PartialProject(RenderComponents):
|
|||||||
project_name: Optional[str] = field(
|
project_name: Optional[str] = field(
|
||||||
metadata=dict(
|
metadata=dict(
|
||||||
description=(
|
description=(
|
||||||
"The name of the project. This should always be set and will not " "be rendered"
|
"The name of the project. This should always be set and will not be rendered"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -381,6 +381,7 @@ class PartialProject(RenderComponents):
|
|||||||
sources: Dict[str, Any]
|
sources: Dict[str, Any]
|
||||||
tests: Dict[str, Any]
|
tests: Dict[str, Any]
|
||||||
metrics: Dict[str, Any]
|
metrics: Dict[str, Any]
|
||||||
|
entities: Dict[str, Any]
|
||||||
exposures: Dict[str, Any]
|
exposures: Dict[str, Any]
|
||||||
vars_value: VarProvider
|
vars_value: VarProvider
|
||||||
|
|
||||||
@@ -391,6 +392,7 @@ class PartialProject(RenderComponents):
|
|||||||
sources = cfg.sources
|
sources = cfg.sources
|
||||||
tests = cfg.tests
|
tests = cfg.tests
|
||||||
metrics = cfg.metrics
|
metrics = cfg.metrics
|
||||||
|
entities = cfg.entities
|
||||||
exposures = cfg.exposures
|
exposures = cfg.exposures
|
||||||
if cfg.vars is None:
|
if cfg.vars is None:
|
||||||
vars_dict: Dict[str, Any] = {}
|
vars_dict: Dict[str, Any] = {}
|
||||||
@@ -446,6 +448,7 @@ class PartialProject(RenderComponents):
|
|||||||
sources=sources,
|
sources=sources,
|
||||||
tests=tests,
|
tests=tests,
|
||||||
metrics=metrics,
|
metrics=metrics,
|
||||||
|
entities=entities,
|
||||||
exposures=exposures,
|
exposures=exposures,
|
||||||
vars=vars_value,
|
vars=vars_value,
|
||||||
config_version=cfg.config_version,
|
config_version=cfg.config_version,
|
||||||
@@ -550,6 +553,7 @@ class Project:
|
|||||||
sources: Dict[str, Any]
|
sources: Dict[str, Any]
|
||||||
tests: Dict[str, Any]
|
tests: Dict[str, Any]
|
||||||
metrics: Dict[str, Any]
|
metrics: Dict[str, Any]
|
||||||
|
entities: Dict[str, Any]
|
||||||
exposures: Dict[str, Any]
|
exposures: Dict[str, Any]
|
||||||
vars: VarProvider
|
vars: VarProvider
|
||||||
dbt_version: List[VersionSpecifier]
|
dbt_version: List[VersionSpecifier]
|
||||||
@@ -624,6 +628,7 @@ class Project:
|
|||||||
"sources": self.sources,
|
"sources": self.sources,
|
||||||
"tests": self.tests,
|
"tests": self.tests,
|
||||||
"metrics": self.metrics,
|
"metrics": self.metrics,
|
||||||
|
"entities": self.entities,
|
||||||
"exposures": self.exposures,
|
"exposures": self.exposures,
|
||||||
"vars": self.vars.to_dict(),
|
"vars": self.vars.to_dict(),
|
||||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||||
@@ -668,7 +673,7 @@ class Project:
|
|||||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||||
if name not in self.selectors:
|
if name not in self.selectors:
|
||||||
raise RuntimeException(
|
raise RuntimeException(
|
||||||
f"Could not find selector named {name}, expected one of " f"{list(self.selectors)}"
|
f"Could not find selector named {name}, expected one of {list(self.selectors)}"
|
||||||
)
|
)
|
||||||
return self.selectors[name]["definition"]
|
return self.selectors[name]["definition"]
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from typing import (
|
|||||||
Dict,
|
Dict,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
List,
|
|
||||||
Mapping,
|
Mapping,
|
||||||
MutableSet,
|
MutableSet,
|
||||||
Optional,
|
Optional,
|
||||||
@@ -30,10 +29,10 @@ from dbt.exceptions import (
|
|||||||
RuntimeException,
|
RuntimeException,
|
||||||
raise_compiler_error,
|
raise_compiler_error,
|
||||||
validator_error_message,
|
validator_error_message,
|
||||||
warn_or_error,
|
|
||||||
)
|
)
|
||||||
|
from dbt.events.functions import warn_or_error
|
||||||
|
from dbt.events.types import UnusedResourceConfigPath
|
||||||
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
||||||
from dbt.ui import warning_tag
|
|
||||||
|
|
||||||
from .profile import Profile
|
from .profile import Profile
|
||||||
from .project import Project, PartialProject
|
from .project import Project, PartialProject
|
||||||
@@ -117,6 +116,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
sources=project.sources,
|
sources=project.sources,
|
||||||
tests=project.tests,
|
tests=project.tests,
|
||||||
metrics=project.metrics,
|
metrics=project.metrics,
|
||||||
|
entities=project.entities,
|
||||||
exposures=project.exposures,
|
exposures=project.exposures,
|
||||||
vars=project.vars,
|
vars=project.vars,
|
||||||
config_version=project.config_version,
|
config_version=project.config_version,
|
||||||
@@ -312,14 +312,15 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
"sources": self._get_config_paths(self.sources),
|
"sources": self._get_config_paths(self.sources),
|
||||||
"tests": self._get_config_paths(self.tests),
|
"tests": self._get_config_paths(self.tests),
|
||||||
"metrics": self._get_config_paths(self.metrics),
|
"metrics": self._get_config_paths(self.metrics),
|
||||||
|
"entities": self._get_config_paths(self.entities),
|
||||||
"exposures": self._get_config_paths(self.exposures),
|
"exposures": self._get_config_paths(self.exposures),
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_unused_resource_config_paths(
|
def warn_for_unused_resource_config_paths(
|
||||||
self,
|
self,
|
||||||
resource_fqns: Mapping[str, PathSet],
|
resource_fqns: Mapping[str, PathSet],
|
||||||
disabled: PathSet,
|
disabled: PathSet,
|
||||||
) -> List[FQNPath]:
|
) -> None:
|
||||||
"""Return a list of lists of strings, where each inner list of strings
|
"""Return a list of lists of strings, where each inner list of strings
|
||||||
represents a type + FQN path of a resource configuration that is not
|
represents a type + FQN path of a resource configuration that is not
|
||||||
used.
|
used.
|
||||||
@@ -333,23 +334,13 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
|
|
||||||
for config_path in config_paths:
|
for config_path in config_paths:
|
||||||
if not _is_config_used(config_path, fqns):
|
if not _is_config_used(config_path, fqns):
|
||||||
unused_resource_config_paths.append((resource_type,) + config_path)
|
resource_path = ".".join(i for i in ((resource_type,) + config_path))
|
||||||
return unused_resource_config_paths
|
unused_resource_config_paths.append(resource_path)
|
||||||
|
|
||||||
def warn_for_unused_resource_config_paths(
|
if len(unused_resource_config_paths) == 0:
|
||||||
self,
|
|
||||||
resource_fqns: Mapping[str, PathSet],
|
|
||||||
disabled: PathSet,
|
|
||||||
) -> None:
|
|
||||||
unused = self.get_unused_resource_config_paths(resource_fqns, disabled)
|
|
||||||
if len(unused) == 0:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format(
|
warn_or_error(UnusedResourceConfigPath(unused_config_paths=unused_resource_config_paths))
|
||||||
len(unused), "\n".join("- {}".format(".".join(u)) for u in unused)
|
|
||||||
)
|
|
||||||
|
|
||||||
warn_or_error(msg, log_fmt=warning_tag("{}"))
|
|
||||||
|
|
||||||
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
|
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
|
||||||
if self.dependencies is None:
|
if self.dependencies is None:
|
||||||
@@ -517,6 +508,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
|||||||
"sources": self.sources,
|
"sources": self.sources,
|
||||||
"tests": self.tests,
|
"tests": self.tests,
|
||||||
"metrics": self.metrics,
|
"metrics": self.metrics,
|
||||||
|
"entities": self.entities,
|
||||||
"exposures": self.exposures,
|
"exposures": self.exposures,
|
||||||
"vars": self.vars.to_dict(),
|
"vars": self.vars.to_dict(),
|
||||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||||
@@ -579,6 +571,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
|||||||
sources=project.sources,
|
sources=project.sources,
|
||||||
tests=project.tests,
|
tests=project.tests,
|
||||||
metrics=project.metrics,
|
metrics=project.metrics,
|
||||||
|
entities=project.entities,
|
||||||
exposures=project.exposures,
|
exposures=project.exposures,
|
||||||
vars=project.vars,
|
vars=project.vars,
|
||||||
config_version=project.config_version,
|
config_version=project.config_version,
|
||||||
@@ -626,14 +619,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
|||||||
return cls.from_parts(project=project, profile=profile, args=args)
|
return cls.from_parts(project=project, profile=profile, args=args)
|
||||||
|
|
||||||
|
|
||||||
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
|
||||||
Configuration paths exist in your dbt_project.yml file which do not \
|
|
||||||
apply to any resources.
|
|
||||||
There are {} unused configuration paths:
|
|
||||||
{}
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def _is_config_used(path, fqns):
|
def _is_config_used(path, fqns):
|
||||||
if fqns:
|
if fqns:
|
||||||
for fqn in fqns:
|
for fqn in fqns:
|
||||||
|
|||||||
@@ -1,2 +1,10 @@
|
|||||||
SECRET_ENV_PREFIX = "DBT_ENV_SECRET_"
|
SECRET_ENV_PREFIX = "DBT_ENV_SECRET_"
|
||||||
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
||||||
|
METADATA_ENV_PREFIX = "DBT_ENV_CUSTOM_ENV_"
|
||||||
|
|
||||||
|
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
|
||||||
|
MAXIMUM_SEED_SIZE_NAME = "1MB"
|
||||||
|
|
||||||
|
PIN_PACKAGE_URL = (
|
||||||
|
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"
|
||||||
|
)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
|||||||
|
|
||||||
from dbt import flags
|
from dbt import flags
|
||||||
from dbt import tracking
|
from dbt import tracking
|
||||||
|
from dbt import utils
|
||||||
from dbt.clients.jinja import get_rendered
|
from dbt.clients.jinja import get_rendered
|
||||||
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
|
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
|
||||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||||
@@ -16,7 +17,7 @@ from dbt.exceptions import (
|
|||||||
disallow_secret_env_var,
|
disallow_secret_env_var,
|
||||||
)
|
)
|
||||||
from dbt.events.functions import fire_event, get_invocation_id
|
from dbt.events.functions import fire_event, get_invocation_id
|
||||||
from dbt.events.types import MacroEventInfo, MacroEventDebug
|
from dbt.events.types import JinjaLogInfo, JinjaLogDebug
|
||||||
from dbt.version import __version__ as dbt_version
|
from dbt.version import __version__ as dbt_version
|
||||||
|
|
||||||
# These modules are added to the context. Consider alternative
|
# These modules are added to the context. Consider alternative
|
||||||
@@ -126,7 +127,7 @@ class ContextMeta(type):
|
|||||||
|
|
||||||
|
|
||||||
class Var:
|
class Var:
|
||||||
UndefinedVarError = "Required var '{}' not found in config:\nVars " "supplied to {} = {}"
|
UndefinedVarError = "Required var '{}' not found in config:\nVars supplied to {} = {}"
|
||||||
_VAR_NOTSET = object()
|
_VAR_NOTSET = object()
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -557,9 +558,9 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
{% endmacro %}"
|
{% endmacro %}"
|
||||||
"""
|
"""
|
||||||
if info:
|
if info:
|
||||||
fire_event(MacroEventInfo(msg=msg))
|
fire_event(JinjaLogInfo(msg=msg))
|
||||||
else:
|
else:
|
||||||
fire_event(MacroEventDebug(msg=msg))
|
fire_event(JinjaLogDebug(msg=msg))
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
@@ -687,6 +688,19 @@ class BaseContext(metaclass=ContextMeta):
|
|||||||
dict_diff.update({k: dict_a[k]})
|
dict_diff.update({k: dict_a[k]})
|
||||||
return dict_diff
|
return dict_diff
|
||||||
|
|
||||||
|
@contextmember
|
||||||
|
@staticmethod
|
||||||
|
def local_md5(value: str) -> str:
|
||||||
|
"""Calculates an MD5 hash of the given string.
|
||||||
|
It's called "local_md5" to emphasize that it runs locally in dbt (in jinja context) and not an MD5 SQL command.
|
||||||
|
|
||||||
|
:param value: The value to hash
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{% set value_hash = local_md5("hello world") %}
|
||||||
|
"""
|
||||||
|
return utils.md5(value)
|
||||||
|
|
||||||
|
|
||||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
ctx = BaseContext(cli_vars)
|
ctx = BaseContext(cli_vars)
|
||||||
|
|||||||
@@ -45,6 +45,8 @@ class UnrenderedConfig(ConfigSource):
|
|||||||
model_configs = unrendered.get("tests")
|
model_configs = unrendered.get("tests")
|
||||||
elif resource_type == NodeType.Metric:
|
elif resource_type == NodeType.Metric:
|
||||||
model_configs = unrendered.get("metrics")
|
model_configs = unrendered.get("metrics")
|
||||||
|
elif resource_type == NodeType.Entity:
|
||||||
|
model_configs = unrendered.get("entities")
|
||||||
elif resource_type == NodeType.Exposure:
|
elif resource_type == NodeType.Exposure:
|
||||||
model_configs = unrendered.get("exposures")
|
model_configs = unrendered.get("exposures")
|
||||||
else:
|
else:
|
||||||
@@ -70,6 +72,8 @@ class RenderedConfig(ConfigSource):
|
|||||||
model_configs = self.project.tests
|
model_configs = self.project.tests
|
||||||
elif resource_type == NodeType.Metric:
|
elif resource_type == NodeType.Metric:
|
||||||
model_configs = self.project.metrics
|
model_configs = self.project.metrics
|
||||||
|
elif resource_type == NodeType.Entity:
|
||||||
|
model_configs = self.project.entities
|
||||||
elif resource_type == NodeType.Exposure:
|
elif resource_type == NodeType.Exposure:
|
||||||
model_configs = self.project.exposures
|
model_configs = self.project.exposures
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -37,11 +37,12 @@ from dbt.contracts.graph.parsed import (
|
|||||||
ParsedMacro,
|
ParsedMacro,
|
||||||
ParsedExposure,
|
ParsedExposure,
|
||||||
ParsedMetric,
|
ParsedMetric,
|
||||||
|
ParsedEntity,
|
||||||
ParsedSeedNode,
|
ParsedSeedNode,
|
||||||
ParsedSourceDefinition,
|
ParsedSourceDefinition,
|
||||||
)
|
)
|
||||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||||
from dbt.contracts.util import get_metadata_env
|
from dbt.events.functions import get_metadata_vars
|
||||||
from dbt.exceptions import (
|
from dbt.exceptions import (
|
||||||
CompilationException,
|
CompilationException,
|
||||||
ParsingException,
|
ParsingException,
|
||||||
@@ -53,10 +54,8 @@ from dbt.exceptions import (
|
|||||||
raise_compiler_error,
|
raise_compiler_error,
|
||||||
ref_invalid_args,
|
ref_invalid_args,
|
||||||
metric_invalid_args,
|
metric_invalid_args,
|
||||||
ref_target_not_found,
|
target_not_found,
|
||||||
metric_target_not_found,
|
|
||||||
ref_bad_context,
|
ref_bad_context,
|
||||||
source_target_not_found,
|
|
||||||
wrapped_exports,
|
wrapped_exports,
|
||||||
raise_parsing_error,
|
raise_parsing_error,
|
||||||
disallow_secret_env_var,
|
disallow_secret_env_var,
|
||||||
@@ -183,7 +182,7 @@ class BaseDatabaseWrapper:
|
|||||||
return macro
|
return macro
|
||||||
|
|
||||||
searched = ", ".join(repr(a) for a in attempts)
|
searched = ", ".join(repr(a) for a in attempts)
|
||||||
msg = f"In dispatch: No macro named '{macro_name}' found\n" f" Searched for: {searched}"
|
msg = f"In dispatch: No macro named '{macro_name}' found\n Searched for: {searched}"
|
||||||
raise CompilationException(msg)
|
raise CompilationException(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -221,12 +220,12 @@ class BaseRefResolver(BaseResolver):
|
|||||||
def validate_args(self, name: str, package: Optional[str]):
|
def validate_args(self, name: str, package: Optional[str]):
|
||||||
if not isinstance(name, str):
|
if not isinstance(name, str):
|
||||||
raise CompilationException(
|
raise CompilationException(
|
||||||
f"The name argument to ref() must be a string, got " f"{type(name)}"
|
f"The name argument to ref() must be a string, got {type(name)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if package is not None and not isinstance(package, str):
|
if package is not None and not isinstance(package, str):
|
||||||
raise CompilationException(
|
raise CompilationException(
|
||||||
f"The package argument to ref() must be a string or None, got " f"{type(package)}"
|
f"The package argument to ref() must be a string or None, got {type(package)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __call__(self, *args: str) -> RelationProxy:
|
def __call__(self, *args: str) -> RelationProxy:
|
||||||
@@ -303,12 +302,10 @@ class BaseMetricResolver(BaseResolver):
|
|||||||
self.validate_args(name, package)
|
self.validate_args(name, package)
|
||||||
return self.resolve(name, package)
|
return self.resolve(name, package)
|
||||||
|
|
||||||
|
|
||||||
class Config(Protocol):
|
class Config(Protocol):
|
||||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
# Implementation of "config(..)" calls in models
|
# Implementation of "config(..)" calls in models
|
||||||
class ParseConfigObject(Config):
|
class ParseConfigObject(Config):
|
||||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||||
@@ -477,10 +474,11 @@ class RuntimeRefResolver(BaseRefResolver):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if target_model is None or isinstance(target_model, Disabled):
|
if target_model is None or isinstance(target_model, Disabled):
|
||||||
ref_target_not_found(
|
target_not_found(
|
||||||
self.model,
|
node=self.model,
|
||||||
target_name,
|
target_name=target_name,
|
||||||
target_package,
|
target_kind="node",
|
||||||
|
target_package=target_package,
|
||||||
disabled=isinstance(target_model, Disabled),
|
disabled=isinstance(target_model, Disabled),
|
||||||
)
|
)
|
||||||
self.validate(target_model, target_name, target_package)
|
self.validate(target_model, target_name, target_package)
|
||||||
@@ -542,10 +540,11 @@ class RuntimeSourceResolver(BaseSourceResolver):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if target_source is None or isinstance(target_source, Disabled):
|
if target_source is None or isinstance(target_source, Disabled):
|
||||||
source_target_not_found(
|
target_not_found(
|
||||||
self.model,
|
node=self.model,
|
||||||
source_name,
|
target_name=f"{source_name}.{table_name}",
|
||||||
table_name,
|
target_kind="source",
|
||||||
|
disabled=(isinstance(target_source, Disabled)),
|
||||||
)
|
)
|
||||||
return self.Relation.create_from_source(target_source)
|
return self.Relation.create_from_source(target_source)
|
||||||
|
|
||||||
@@ -568,11 +567,11 @@ class RuntimeMetricResolver(BaseMetricResolver):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if target_metric is None or isinstance(target_metric, Disabled):
|
if target_metric is None or isinstance(target_metric, Disabled):
|
||||||
# TODO : Use a different exception!!
|
target_not_found(
|
||||||
metric_target_not_found(
|
node=self.model,
|
||||||
self.model,
|
target_name=target_name,
|
||||||
target_name,
|
target_kind="metric",
|
||||||
target_package,
|
target_package=target_package,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ResolvedMetricReference(target_metric, self.manifest, self.Relation)
|
return ResolvedMetricReference(target_metric, self.manifest, self.Relation)
|
||||||
@@ -713,7 +712,7 @@ class ProviderContext(ManifestContext):
|
|||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
def dbt_metadata_envs(self) -> Dict[str, str]:
|
def dbt_metadata_envs(self) -> Dict[str, str]:
|
||||||
return get_metadata_env()
|
return get_metadata_vars()
|
||||||
|
|
||||||
@contextproperty
|
@contextproperty
|
||||||
def invocation_args_dict(self):
|
def invocation_args_dict(self):
|
||||||
@@ -803,6 +802,7 @@ class ProviderContext(ManifestContext):
|
|||||||
raise_compiler_error(
|
raise_compiler_error(
|
||||||
"can only load_agate_table for seeds (got a {})".format(self.model.resource_type)
|
"can only load_agate_table for seeds (got a {})".format(self.model.resource_type)
|
||||||
)
|
)
|
||||||
|
assert self.model.root_path
|
||||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||||
column_types = self.model.config.column_types
|
column_types = self.model.config.column_types
|
||||||
try:
|
try:
|
||||||
@@ -1434,6 +1434,14 @@ class ExposureSourceResolver(BaseResolver):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
class ExposureMetricResolver(BaseResolver):
|
||||||
|
def __call__(self, *args) -> str:
|
||||||
|
if len(args) not in (1, 2):
|
||||||
|
metric_invalid_args(self.model, args)
|
||||||
|
self.model.metrics.append(list(args))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def generate_parse_exposure(
|
def generate_parse_exposure(
|
||||||
exposure: ParsedExposure,
|
exposure: ParsedExposure,
|
||||||
config: RuntimeConfig,
|
config: RuntimeConfig,
|
||||||
@@ -1454,6 +1462,12 @@ def generate_parse_exposure(
|
|||||||
project,
|
project,
|
||||||
manifest,
|
manifest,
|
||||||
),
|
),
|
||||||
|
"metric": ExposureMetricResolver(
|
||||||
|
None,
|
||||||
|
exposure,
|
||||||
|
project,
|
||||||
|
manifest,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -1477,7 +1491,6 @@ class MetricRefResolver(BaseResolver):
|
|||||||
"the name argument to ref() must be a string"
|
"the name argument to ref() must be a string"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def generate_parse_metrics(
|
def generate_parse_metrics(
|
||||||
metric: ParsedMetric,
|
metric: ParsedMetric,
|
||||||
config: RuntimeConfig,
|
config: RuntimeConfig,
|
||||||
@@ -1500,6 +1513,41 @@ def generate_parse_metrics(
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class EntityRefResolver(BaseResolver):
|
||||||
|
def __call__(self, *args) -> str:
|
||||||
|
package = None
|
||||||
|
if len(args) == 1:
|
||||||
|
name = args[0]
|
||||||
|
elif len(args) == 2:
|
||||||
|
package, name = args
|
||||||
|
else:
|
||||||
|
ref_invalid_args(self.model, args)
|
||||||
|
self.validate_args(name, package)
|
||||||
|
self.model.refs.append(list(args))
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def validate_args(self, name, package):
|
||||||
|
if not isinstance(name, str):
|
||||||
|
raise ParsingException(
|
||||||
|
f"In the entity associated with {self.model.original_file_path} "
|
||||||
|
"the name argument to ref() must be a string"
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_parse_entities(
|
||||||
|
entity: ParsedEntity,
|
||||||
|
config: RuntimeConfig,
|
||||||
|
manifest: Manifest,
|
||||||
|
package_name: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
project = config.load_dependencies()[package_name]
|
||||||
|
return {
|
||||||
|
"ref": EntityRefResolver(
|
||||||
|
None,
|
||||||
|
entity,
|
||||||
|
project,
|
||||||
|
manifest,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
# This class is currently used by the schema parser in order
|
# This class is currently used by the schema parser in order
|
||||||
# to limit the number of macros in the context by using
|
# to limit the number of macros in the context by using
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ class Connection(ExtensibleDbtClassMixin, Replaceable):
|
|||||||
self._handle.resolve(self)
|
self._handle.resolve(self)
|
||||||
except RecursionError as exc:
|
except RecursionError as exc:
|
||||||
raise InternalException(
|
raise InternalException(
|
||||||
"A connection's open() method attempted to read the " "handle value"
|
"A connection's open() method attempted to read the handle value"
|
||||||
) from exc
|
) from exc
|
||||||
return self._handle
|
return self._handle
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,16 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
|
||||||
from mashumaro.types import SerializableType
|
from mashumaro.types import SerializableType
|
||||||
from typing import List, Optional, Union, Dict, Any
|
from typing import List, Optional, Union, Dict, Any
|
||||||
|
|
||||||
|
from dbt.constants import MAXIMUM_SEED_SIZE
|
||||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||||
|
|
||||||
from .util import SourceKey
|
from .util import SourceKey
|
||||||
|
|
||||||
|
|
||||||
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
|
|
||||||
MAXIMUM_SEED_SIZE_NAME = "1MB"
|
|
||||||
|
|
||||||
|
|
||||||
class ParseFileType(StrEnum):
|
class ParseFileType(StrEnum):
|
||||||
Macro = "macro"
|
Macro = "macro"
|
||||||
Model = "model"
|
Model = "model"
|
||||||
@@ -229,6 +227,7 @@ class SchemaSourceFile(BaseSourceFile):
|
|||||||
sources: List[str] = field(default_factory=list)
|
sources: List[str] = field(default_factory=list)
|
||||||
exposures: List[str] = field(default_factory=list)
|
exposures: List[str] = field(default_factory=list)
|
||||||
metrics: List[str] = field(default_factory=list)
|
metrics: List[str] = field(default_factory=list)
|
||||||
|
entities: List[str] = field(default_factory=list)
|
||||||
# node patches contain models, seeds, snapshots, analyses
|
# node patches contain models, seeds, snapshots, analyses
|
||||||
ndp: List[str] = field(default_factory=list)
|
ndp: List[str] = field(default_factory=list)
|
||||||
# any macro patches in this file by macro unique_id.
|
# any macro patches in this file by macro unique_id.
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from dbt.contracts.graph.parsed import (
|
|||||||
ParsedModelNode,
|
ParsedModelNode,
|
||||||
ParsedExposure,
|
ParsedExposure,
|
||||||
ParsedMetric,
|
ParsedMetric,
|
||||||
|
ParsedEntity,
|
||||||
ParsedResource,
|
ParsedResource,
|
||||||
ParsedRPCNode,
|
ParsedRPCNode,
|
||||||
ParsedSqlNode,
|
ParsedSqlNode,
|
||||||
@@ -97,6 +98,7 @@ class CompiledSeedNode(CompiledNode):
|
|||||||
# keep this in sync with ParsedSeedNode!
|
# keep this in sync with ParsedSeedNode!
|
||||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
||||||
config: SeedConfig = field(default_factory=SeedConfig)
|
config: SeedConfig = field(default_factory=SeedConfig)
|
||||||
|
root_path: Optional[str] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
@@ -232,4 +234,5 @@ GraphMemberNode = Union[
|
|||||||
CompileResultNode,
|
CompileResultNode,
|
||||||
ParsedExposure,
|
ParsedExposure,
|
||||||
ParsedMetric,
|
ParsedMetric,
|
||||||
|
ParsedEntity,
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ from dbt.contracts.graph.parsed import (
|
|||||||
ParsedGenericTestNode,
|
ParsedGenericTestNode,
|
||||||
ParsedExposure,
|
ParsedExposure,
|
||||||
ParsedMetric,
|
ParsedMetric,
|
||||||
|
ParsedEntity,
|
||||||
HasUniqueID,
|
HasUniqueID,
|
||||||
UnpatchedSourceDefinition,
|
UnpatchedSourceDefinition,
|
||||||
ManifestNodes,
|
ManifestNodes,
|
||||||
@@ -216,8 +217,39 @@ class MetricLookup(dbtClassMixin):
|
|||||||
)
|
)
|
||||||
return manifest.metrics[unique_id]
|
return manifest.metrics[unique_id]
|
||||||
|
|
||||||
|
class EntityLookup(dbtClassMixin):
|
||||||
|
def __init__(self, manifest: "Manifest"):
|
||||||
|
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||||
|
self.populate(manifest)
|
||||||
|
|
||||||
# This handles both models/seeds/snapshots and sources/metrics/exposures
|
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||||
|
return find_unique_id_for_package(self.storage, search_name, package)
|
||||||
|
|
||||||
|
def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"):
|
||||||
|
unique_id = self.get_unique_id(search_name, package)
|
||||||
|
if unique_id is not None:
|
||||||
|
return self.perform_lookup(unique_id, manifest)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def add_entity(self, entity: ParsedEntity):
|
||||||
|
if entity.search_name not in self.storage:
|
||||||
|
self.storage[entity.search_name] = {}
|
||||||
|
|
||||||
|
self.storage[entity.search_name][entity.package_name] = entity.unique_id
|
||||||
|
|
||||||
|
def populate(self, manifest):
|
||||||
|
for entity in manifest.entities.values():
|
||||||
|
if hasattr(entity, "name"):
|
||||||
|
self.add_entity(entity)
|
||||||
|
|
||||||
|
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> ParsedEntity:
|
||||||
|
if unique_id not in manifest.entities:
|
||||||
|
raise dbt.exceptions.InternalException(
|
||||||
|
f"Entity {unique_id} found in cache but not found in manifest"
|
||||||
|
)
|
||||||
|
return manifest.entities[unique_id]
|
||||||
|
|
||||||
|
# This handles both models/seeds/snapshots and sources/metrics/entities/exposures
|
||||||
class DisabledLookup(dbtClassMixin):
|
class DisabledLookup(dbtClassMixin):
|
||||||
def __init__(self, manifest: "Manifest"):
|
def __init__(self, manifest: "Manifest"):
|
||||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||||
@@ -467,6 +499,7 @@ class Disabled(Generic[D]):
|
|||||||
|
|
||||||
MaybeMetricNode = Optional[Union[ParsedMetric, Disabled[ParsedMetric]]]
|
MaybeMetricNode = Optional[Union[ParsedMetric, Disabled[ParsedMetric]]]
|
||||||
|
|
||||||
|
MaybeEntityNode = Optional[Union[ParsedEntity, Disabled[ParsedEntity]]]
|
||||||
|
|
||||||
MaybeDocumentation = Optional[ParsedDocumentation]
|
MaybeDocumentation = Optional[ParsedDocumentation]
|
||||||
|
|
||||||
@@ -499,7 +532,7 @@ def _update_into(dest: MutableMapping[str, T], new_item: T):
|
|||||||
existing = dest[unique_id]
|
existing = dest[unique_id]
|
||||||
if new_item.original_file_path != existing.original_file_path:
|
if new_item.original_file_path != existing.original_file_path:
|
||||||
raise dbt.exceptions.RuntimeException(
|
raise dbt.exceptions.RuntimeException(
|
||||||
f"cannot update a {new_item.resource_type} to have a new file " f"path!"
|
f"cannot update a {new_item.resource_type} to have a new file path!"
|
||||||
)
|
)
|
||||||
dest[unique_id] = new_item
|
dest[unique_id] = new_item
|
||||||
|
|
||||||
@@ -611,6 +644,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
|
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
|
||||||
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
|
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
|
||||||
metrics: MutableMapping[str, ParsedMetric] = field(default_factory=dict)
|
metrics: MutableMapping[str, ParsedMetric] = field(default_factory=dict)
|
||||||
|
entities: MutableMapping[str, ParsedEntity] = field(default_factory=dict)
|
||||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||||
@@ -632,6 +666,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
_metric_lookup: Optional[MetricLookup] = field(
|
_metric_lookup: Optional[MetricLookup] = field(
|
||||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||||
)
|
)
|
||||||
|
_entity_lookup: Optional[EntityLookup] = field(
|
||||||
|
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||||
|
)
|
||||||
_disabled_lookup: Optional[DisabledLookup] = field(
|
_disabled_lookup: Optional[DisabledLookup] = field(
|
||||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||||
)
|
)
|
||||||
@@ -682,6 +719,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
def update_metric(self, new_metric: ParsedMetric):
|
def update_metric(self, new_metric: ParsedMetric):
|
||||||
_update_into(self.metrics, new_metric)
|
_update_into(self.metrics, new_metric)
|
||||||
|
|
||||||
|
def update_entity(self, new_entity: ParsedEntity):
|
||||||
|
_update_into(self.entities, new_entity)
|
||||||
|
|
||||||
def update_node(self, new_node: ManifestNode):
|
def update_node(self, new_node: ManifestNode):
|
||||||
_update_into(self.nodes, new_node)
|
_update_into(self.nodes, new_node)
|
||||||
|
|
||||||
@@ -697,6 +737,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.flat_graph = {
|
self.flat_graph = {
|
||||||
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
||||||
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
||||||
|
"entities": {k: v.to_dict(omit_none=False) for k, v in self.entities.items()},
|
||||||
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
||||||
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
||||||
}
|
}
|
||||||
@@ -759,6 +800,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.nodes.values(),
|
self.nodes.values(),
|
||||||
self.sources.values(),
|
self.sources.values(),
|
||||||
self.metrics.values(),
|
self.metrics.values(),
|
||||||
|
self.entities.values(),
|
||||||
)
|
)
|
||||||
for resource in all_resources:
|
for resource in all_resources:
|
||||||
resource_type_plural = resource.resource_type.pluralize()
|
resource_type_plural = resource.resource_type.pluralize()
|
||||||
@@ -780,19 +822,22 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
return frozenset(x.database for x in chain(self.nodes.values(), self.sources.values()))
|
return frozenset(x.database for x in chain(self.nodes.values(), self.sources.values()))
|
||||||
|
|
||||||
def deepcopy(self):
|
def deepcopy(self):
|
||||||
return Manifest(
|
copy = Manifest(
|
||||||
nodes={k: _deepcopy(v) for k, v in self.nodes.items()},
|
nodes={k: _deepcopy(v) for k, v in self.nodes.items()},
|
||||||
sources={k: _deepcopy(v) for k, v in self.sources.items()},
|
sources={k: _deepcopy(v) for k, v in self.sources.items()},
|
||||||
macros={k: _deepcopy(v) for k, v in self.macros.items()},
|
macros={k: _deepcopy(v) for k, v in self.macros.items()},
|
||||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||||
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
||||||
|
entities={k: _deepcopy(v) for k, v in self.entities.items()},
|
||||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||||
files={k: _deepcopy(v) for k, v in self.files.items()},
|
files={k: _deepcopy(v) for k, v in self.files.items()},
|
||||||
state_check=_deepcopy(self.state_check),
|
state_check=_deepcopy(self.state_check),
|
||||||
)
|
)
|
||||||
|
copy.build_flat_graph()
|
||||||
|
return copy
|
||||||
|
|
||||||
def build_parent_and_child_maps(self):
|
def build_parent_and_child_maps(self):
|
||||||
edge_members = list(
|
edge_members = list(
|
||||||
@@ -801,6 +846,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.sources.values(),
|
self.sources.values(),
|
||||||
self.exposures.values(),
|
self.exposures.values(),
|
||||||
self.metrics.values(),
|
self.metrics.values(),
|
||||||
|
self.entities.values(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
forward_edges, backward_edges = build_node_edges(edge_members)
|
forward_edges, backward_edges = build_node_edges(edge_members)
|
||||||
@@ -826,6 +872,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
docs=self.docs,
|
docs=self.docs,
|
||||||
exposures=self.exposures,
|
exposures=self.exposures,
|
||||||
metrics=self.metrics,
|
metrics=self.metrics,
|
||||||
|
entities=self.entities,
|
||||||
selectors=self.selectors,
|
selectors=self.selectors,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
disabled=self.disabled,
|
disabled=self.disabled,
|
||||||
@@ -847,6 +894,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
return self.exposures[unique_id]
|
return self.exposures[unique_id]
|
||||||
elif unique_id in self.metrics:
|
elif unique_id in self.metrics:
|
||||||
return self.metrics[unique_id]
|
return self.metrics[unique_id]
|
||||||
|
elif unique_id in self.entities:
|
||||||
|
return self.entities[unique_id]
|
||||||
else:
|
else:
|
||||||
# something terrible has happened
|
# something terrible has happened
|
||||||
raise dbt.exceptions.InternalException(
|
raise dbt.exceptions.InternalException(
|
||||||
@@ -883,6 +932,12 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self._metric_lookup = MetricLookup(self)
|
self._metric_lookup = MetricLookup(self)
|
||||||
return self._metric_lookup
|
return self._metric_lookup
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entity_lookup(self) -> EntityLookup:
|
||||||
|
if self._entity_lookup is None:
|
||||||
|
self._entity_lookup = EntityLookup(self)
|
||||||
|
return self._entity_lookup
|
||||||
|
|
||||||
def rebuild_ref_lookup(self):
|
def rebuild_ref_lookup(self):
|
||||||
self._ref_lookup = RefableLookup(self)
|
self._ref_lookup = RefableLookup(self)
|
||||||
|
|
||||||
@@ -983,6 +1038,32 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
return Disabled(disabled[0])
|
return Disabled(disabled[0])
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def resolve_entity(
|
||||||
|
self,
|
||||||
|
target_entity_name: str,
|
||||||
|
target_entity_package: Optional[str],
|
||||||
|
current_project: str,
|
||||||
|
node_package: str,
|
||||||
|
) -> MaybeEntityNode:
|
||||||
|
|
||||||
|
entity: Optional[ParsedEntity] = None
|
||||||
|
disabled: Optional[List[ParsedEntity]] = None
|
||||||
|
|
||||||
|
candidates = _search_packages(current_project, node_package, target_entity_package)
|
||||||
|
for pkg in candidates:
|
||||||
|
entity = self.entity_lookup.find(target_entity_name, pkg, self)
|
||||||
|
|
||||||
|
if entity is not None and entity.config.enabled:
|
||||||
|
return entity
|
||||||
|
|
||||||
|
# it's possible that the node is disabled
|
||||||
|
if disabled is None:
|
||||||
|
disabled = self.disabled_lookup.find(f"{target_entity_name}", pkg)
|
||||||
|
if disabled:
|
||||||
|
return Disabled(disabled[0])
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
# Called by DocsRuntimeContext.doc
|
# Called by DocsRuntimeContext.doc
|
||||||
def resolve_doc(
|
def resolve_doc(
|
||||||
self,
|
self,
|
||||||
@@ -1009,6 +1090,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
adapter,
|
adapter,
|
||||||
other: "WritableManifest",
|
other: "WritableManifest",
|
||||||
selected: AbstractSet[UniqueID],
|
selected: AbstractSet[UniqueID],
|
||||||
|
favor_state: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Given the selected unique IDs and a writable manifest, update this
|
"""Given the selected unique IDs and a writable manifest, update this
|
||||||
manifest by replacing any unselected nodes with their counterpart.
|
manifest by replacing any unselected nodes with their counterpart.
|
||||||
@@ -1023,7 +1105,10 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
node.resource_type in refables
|
node.resource_type in refables
|
||||||
and not node.is_ephemeral
|
and not node.is_ephemeral
|
||||||
and unique_id not in selected
|
and unique_id not in selected
|
||||||
and not adapter.get_relation(current.database, current.schema, current.identifier)
|
and (
|
||||||
|
not adapter.get_relation(current.database, current.schema, current.identifier)
|
||||||
|
or favor_state
|
||||||
|
)
|
||||||
):
|
):
|
||||||
merged.add(unique_id)
|
merged.add(unique_id)
|
||||||
self.nodes[unique_id] = node.replace(deferred=True)
|
self.nodes[unique_id] = node.replace(deferred=True)
|
||||||
@@ -1034,7 +1119,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
|
|
||||||
# log up to 5 items
|
# log up to 5 items
|
||||||
sample = list(islice(merged, 5))
|
sample = list(islice(merged, 5))
|
||||||
fire_event(MergedFromState(nbr_merged=len(merged), sample=sample))
|
fire_event(MergedFromState(num_merged=len(merged), sample=sample))
|
||||||
|
|
||||||
# Methods that were formerly in ParseResult
|
# Methods that were formerly in ParseResult
|
||||||
|
|
||||||
@@ -1093,6 +1178,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
source_file.add_test(node.unique_id, test_from)
|
source_file.add_test(node.unique_id, test_from)
|
||||||
if isinstance(node, ParsedMetric):
|
if isinstance(node, ParsedMetric):
|
||||||
source_file.metrics.append(node.unique_id)
|
source_file.metrics.append(node.unique_id)
|
||||||
|
if isinstance(node, ParsedEntity):
|
||||||
|
source_file.entities.append(node.unique_id)
|
||||||
if isinstance(node, ParsedExposure):
|
if isinstance(node, ParsedExposure):
|
||||||
source_file.exposures.append(node.unique_id)
|
source_file.exposures.append(node.unique_id)
|
||||||
else:
|
else:
|
||||||
@@ -1108,6 +1195,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.metrics[metric.unique_id] = metric
|
self.metrics[metric.unique_id] = metric
|
||||||
source_file.metrics.append(metric.unique_id)
|
source_file.metrics.append(metric.unique_id)
|
||||||
|
|
||||||
|
def add_entity(self, source_file: SchemaSourceFile, entity: ParsedEntity):
|
||||||
|
_check_duplicates(entity, self.entities)
|
||||||
|
self.entities[entity.unique_id] = entity
|
||||||
|
source_file.entities.append(entity.unique_id)
|
||||||
|
|
||||||
def add_disabled_nofile(self, node: GraphMemberNode):
|
def add_disabled_nofile(self, node: GraphMemberNode):
|
||||||
# There can be multiple disabled nodes for the same unique_id
|
# There can be multiple disabled nodes for the same unique_id
|
||||||
if node.unique_id in self.disabled:
|
if node.unique_id in self.disabled:
|
||||||
@@ -1123,6 +1215,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
source_file.add_test(node.unique_id, test_from)
|
source_file.add_test(node.unique_id, test_from)
|
||||||
if isinstance(node, ParsedMetric):
|
if isinstance(node, ParsedMetric):
|
||||||
source_file.metrics.append(node.unique_id)
|
source_file.metrics.append(node.unique_id)
|
||||||
|
if isinstance(node, ParsedEntity):
|
||||||
|
source_file.entities.append(node.unique_id)
|
||||||
if isinstance(node, ParsedExposure):
|
if isinstance(node, ParsedExposure):
|
||||||
source_file.exposures.append(node.unique_id)
|
source_file.exposures.append(node.unique_id)
|
||||||
else:
|
else:
|
||||||
@@ -1150,6 +1244,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self.docs,
|
self.docs,
|
||||||
self.exposures,
|
self.exposures,
|
||||||
self.metrics,
|
self.metrics,
|
||||||
|
self.entities,
|
||||||
self.selectors,
|
self.selectors,
|
||||||
self.files,
|
self.files,
|
||||||
self.metadata,
|
self.metadata,
|
||||||
@@ -1162,6 +1257,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
|||||||
self._source_lookup,
|
self._source_lookup,
|
||||||
self._ref_lookup,
|
self._ref_lookup,
|
||||||
self._metric_lookup,
|
self._metric_lookup,
|
||||||
|
self._entity_lookup,
|
||||||
self._disabled_lookup,
|
self._disabled_lookup,
|
||||||
self._analysis_lookup,
|
self._analysis_lookup,
|
||||||
)
|
)
|
||||||
@@ -1181,7 +1277,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@schema_version("manifest", 7)
|
@schema_version("manifest", 8)
|
||||||
class WritableManifest(ArtifactMixin):
|
class WritableManifest(ArtifactMixin):
|
||||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||||
@@ -1203,6 +1299,9 @@ class WritableManifest(ArtifactMixin):
|
|||||||
metrics: Mapping[UniqueID, ParsedMetric] = field(
|
metrics: Mapping[UniqueID, ParsedMetric] = field(
|
||||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||||
)
|
)
|
||||||
|
entities: Mapping[UniqueID, ParsedEntity] = field(
|
||||||
|
metadata=dict(description=("The entities defined in the dbt project and its dependencies"))
|
||||||
|
)
|
||||||
selectors: Mapping[UniqueID, Any] = field(
|
selectors: Mapping[UniqueID, Any] = field(
|
||||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||||
)
|
)
|
||||||
@@ -1227,7 +1326,7 @@ class WritableManifest(ArtifactMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def compatible_previous_versions(self):
|
def compatible_previous_versions(self):
|
||||||
return [("manifest", 4), ("manifest", 5), ("manifest", 6)]
|
return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||||
|
|
||||||
def __post_serialize__(self, dct):
|
def __post_serialize__(self, dct):
|
||||||
for unique_id, node in dct["nodes"].items():
|
for unique_id, node in dct["nodes"].items():
|
||||||
|
|||||||
@@ -367,6 +367,9 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
|||||||
class MetricConfig(BaseConfig):
|
class MetricConfig(BaseConfig):
|
||||||
enabled: bool = True
|
enabled: bool = True
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EntityConfig(BaseConfig):
|
||||||
|
enabled: bool = True
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ExposureConfig(BaseConfig):
|
class ExposureConfig(BaseConfig):
|
||||||
@@ -376,40 +379,6 @@ class ExposureConfig(BaseConfig):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class SourceConfig(BaseConfig):
|
class SourceConfig(BaseConfig):
|
||||||
enabled: bool = True
|
enabled: bool = True
|
||||||
# to be implmented to complete CT-201
|
|
||||||
# quoting: Dict[str, Any] = field(
|
|
||||||
# default_factory=dict,
|
|
||||||
# metadata=MergeBehavior.Update.meta(),
|
|
||||||
# )
|
|
||||||
# freshness: Optional[Dict[str, Any]] = field(
|
|
||||||
# default=None,
|
|
||||||
# metadata=CompareBehavior.Exclude.meta(),
|
|
||||||
# )
|
|
||||||
# loader: Optional[str] = field(
|
|
||||||
# default=None,
|
|
||||||
# metadata=CompareBehavior.Exclude.meta(),
|
|
||||||
# )
|
|
||||||
# # TODO what type is this? docs say: "<column_name_or_expression>"
|
|
||||||
# loaded_at_field: Optional[str] = field(
|
|
||||||
# default=None,
|
|
||||||
# metadata=CompareBehavior.Exclude.meta(),
|
|
||||||
# )
|
|
||||||
# database: Optional[str] = field(
|
|
||||||
# default=None,
|
|
||||||
# metadata=CompareBehavior.Exclude.meta(),
|
|
||||||
# )
|
|
||||||
# schema: Optional[str] = field(
|
|
||||||
# default=None,
|
|
||||||
# metadata=CompareBehavior.Exclude.meta(),
|
|
||||||
# )
|
|
||||||
# meta: Dict[str, Any] = field(
|
|
||||||
# default_factory=dict,
|
|
||||||
# metadata=MergeBehavior.Update.meta(),
|
|
||||||
# )
|
|
||||||
# tags: Union[List[str], str] = field(
|
|
||||||
# default_factory=list_str,
|
|
||||||
# metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
|
||||||
# )
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -529,6 +498,12 @@ class SeedConfig(NodeConfig):
|
|||||||
materialized: str = "seed"
|
materialized: str = "seed"
|
||||||
quote_columns: Optional[bool] = None
|
quote_columns: Optional[bool] = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, data):
|
||||||
|
super().validate(data)
|
||||||
|
if data.get("materialized") and data.get("materialized") != "seed":
|
||||||
|
raise ValidationError("A seed must have a materialized value of 'seed'")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TestConfig(NodeAndTestConfig):
|
class TestConfig(NodeAndTestConfig):
|
||||||
@@ -568,6 +543,12 @@ class TestConfig(NodeAndTestConfig):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, data):
|
||||||
|
super().validate(data)
|
||||||
|
if data.get("materialized") and data.get("materialized") != "test":
|
||||||
|
raise ValidationError("A test must have a materialized value of 'test'")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class EmptySnapshotConfig(NodeConfig):
|
class EmptySnapshotConfig(NodeConfig):
|
||||||
@@ -604,7 +585,6 @@ class SnapshotConfig(EmptySnapshotConfig):
|
|||||||
f"Invalid value for 'check_cols': {data['check_cols']}. "
|
f"Invalid value for 'check_cols': {data['check_cols']}. "
|
||||||
"Expected 'all' or a list of strings."
|
"Expected 'all' or a list of strings."
|
||||||
)
|
)
|
||||||
|
|
||||||
elif data.get("strategy") == "timestamp":
|
elif data.get("strategy") == "timestamp":
|
||||||
if not data.get("updated_at"):
|
if not data.get("updated_at"):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
@@ -616,6 +596,9 @@ class SnapshotConfig(EmptySnapshotConfig):
|
|||||||
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
||||||
# formerly supported with GenericSnapshotConfig
|
# formerly supported with GenericSnapshotConfig
|
||||||
|
|
||||||
|
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||||
|
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||||
|
|
||||||
def finalize_and_validate(self):
|
def finalize_and_validate(self):
|
||||||
data = self.to_dict(omit_none=True)
|
data = self.to_dict(omit_none=True)
|
||||||
self.validate(data)
|
self.validate(data)
|
||||||
@@ -624,6 +607,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
|||||||
|
|
||||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||||
NodeType.Metric: MetricConfig,
|
NodeType.Metric: MetricConfig,
|
||||||
|
NodeType.Entity: EntityConfig,
|
||||||
NodeType.Exposure: ExposureConfig,
|
NodeType.Exposure: ExposureConfig,
|
||||||
NodeType.Source: SourceConfig,
|
NodeType.Source: SourceConfig,
|
||||||
NodeType.Seed: SeedConfig,
|
NodeType.Seed: SeedConfig,
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from typing import (
|
|||||||
from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
||||||
|
|
||||||
from dbt.clients.system import write_file
|
from dbt.clients.system import write_file
|
||||||
from dbt.contracts.files import FileHash, MAXIMUM_SEED_SIZE_NAME
|
from dbt.contracts.files import FileHash
|
||||||
from dbt.contracts.graph.unparsed import (
|
from dbt.contracts.graph.unparsed import (
|
||||||
UnparsedNode,
|
UnparsedNode,
|
||||||
UnparsedDocumentation,
|
UnparsedDocumentation,
|
||||||
@@ -38,9 +38,17 @@ from dbt.contracts.graph.unparsed import (
|
|||||||
MaturityType,
|
MaturityType,
|
||||||
MetricFilter,
|
MetricFilter,
|
||||||
MetricTime,
|
MetricTime,
|
||||||
|
EntityDimension
|
||||||
)
|
)
|
||||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||||
from dbt.exceptions import warn_or_error
|
from dbt.events.proto_types import NodeInfo
|
||||||
|
from dbt.events.functions import warn_or_error
|
||||||
|
from dbt.events.types import (
|
||||||
|
SeedIncreased,
|
||||||
|
SeedExceedsLimitSamePath,
|
||||||
|
SeedExceedsLimitAndPathChanged,
|
||||||
|
SeedExceedsLimitChecksumChanged,
|
||||||
|
)
|
||||||
from dbt import flags
|
from dbt import flags
|
||||||
from dbt.node_types import ModelLanguage, NodeType
|
from dbt.node_types import ModelLanguage, NodeType
|
||||||
|
|
||||||
@@ -51,6 +59,7 @@ from .model_config import (
|
|||||||
TestConfig,
|
TestConfig,
|
||||||
SourceConfig,
|
SourceConfig,
|
||||||
MetricConfig,
|
MetricConfig,
|
||||||
|
EntityConfig,
|
||||||
ExposureConfig,
|
ExposureConfig,
|
||||||
EmptySnapshotConfig,
|
EmptySnapshotConfig,
|
||||||
SnapshotConfig,
|
SnapshotConfig,
|
||||||
@@ -192,7 +201,8 @@ class NodeInfoMixin:
|
|||||||
"node_started_at": self._event_status.get("started_at"),
|
"node_started_at": self._event_status.get("started_at"),
|
||||||
"node_finished_at": self._event_status.get("finished_at"),
|
"node_finished_at": self._event_status.get("finished_at"),
|
||||||
}
|
}
|
||||||
return node_info
|
node_info_msg = NodeInfo(**node_info)
|
||||||
|
return node_info_msg
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -201,6 +211,7 @@ class ParsedNodeDefaults(NodeInfoMixin, ParsedNodeMandatory):
|
|||||||
refs: List[List[str]] = field(default_factory=list)
|
refs: List[List[str]] = field(default_factory=list)
|
||||||
sources: List[List[str]] = field(default_factory=list)
|
sources: List[List[str]] = field(default_factory=list)
|
||||||
metrics: List[List[str]] = field(default_factory=list)
|
metrics: List[List[str]] = field(default_factory=list)
|
||||||
|
entities: List[List[str]] = field(default_factory=list)
|
||||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||||
description: str = field(default="")
|
description: str = field(default="")
|
||||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||||
@@ -244,7 +255,7 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _deserialize(cls, dct: Dict[str, int]):
|
def _deserialize(cls, dct: Dict[str, int]):
|
||||||
# The serialized ParsedNodes do not differ from each other
|
# The serialized ParsedNodes do not differ from each other
|
||||||
# in fields that would allow 'from_dict' to distinguis
|
# in fields that would allow 'from_dict' to distinguish
|
||||||
# between them.
|
# between them.
|
||||||
resource_type = dct["resource_type"]
|
resource_type = dct["resource_type"]
|
||||||
if resource_type == "model":
|
if resource_type == "model":
|
||||||
@@ -373,30 +384,28 @@ def same_seeds(first: ParsedNode, second: ParsedNode) -> bool:
|
|||||||
if first.checksum.name == "path":
|
if first.checksum.name == "path":
|
||||||
msg: str
|
msg: str
|
||||||
if second.checksum.name != "path":
|
if second.checksum.name != "path":
|
||||||
msg = (
|
warn_or_error(
|
||||||
f"Found a seed ({first.package_name}.{first.name}) "
|
SeedIncreased(package_name=first.package_name, name=first.name), node=first
|
||||||
f">{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was "
|
|
||||||
f"<={MAXIMUM_SEED_SIZE_NAME}, so it has changed"
|
|
||||||
)
|
)
|
||||||
elif result:
|
elif result:
|
||||||
msg = (
|
warn_or_error(
|
||||||
f"Found a seed ({first.package_name}.{first.name}) "
|
SeedExceedsLimitSamePath(package_name=first.package_name, name=first.name),
|
||||||
f">{MAXIMUM_SEED_SIZE_NAME} in size at the same path, dbt "
|
node=first,
|
||||||
f"cannot tell if it has changed: assuming they are the same"
|
|
||||||
)
|
)
|
||||||
elif not result:
|
elif not result:
|
||||||
msg = (
|
warn_or_error(
|
||||||
f"Found a seed ({first.package_name}.{first.name}) "
|
SeedExceedsLimitAndPathChanged(package_name=first.package_name, name=first.name),
|
||||||
f">{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was in "
|
node=first,
|
||||||
f"a different location, assuming it has changed"
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
msg = (
|
warn_or_error(
|
||||||
f"Found a seed ({first.package_name}.{first.name}) "
|
SeedExceedsLimitChecksumChanged(
|
||||||
f">{MAXIMUM_SEED_SIZE_NAME} in size. The previous file had a "
|
package_name=first.package_name,
|
||||||
f"checksum type of {second.checksum.name}, so it has changed"
|
name=first.name,
|
||||||
|
checksum_name=second.checksum.name,
|
||||||
|
),
|
||||||
|
node=first,
|
||||||
)
|
)
|
||||||
warn_or_error(msg, node=first)
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -406,6 +415,9 @@ class ParsedSeedNode(ParsedNode):
|
|||||||
# keep this in sync with CompiledSeedNode!
|
# keep this in sync with CompiledSeedNode!
|
||||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
||||||
config: SeedConfig = field(default_factory=SeedConfig)
|
config: SeedConfig = field(default_factory=SeedConfig)
|
||||||
|
# seeds need the root_path because the contents are not loaded initially
|
||||||
|
# and we need the root_path to load the seed later
|
||||||
|
root_path: Optional[str] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
@@ -755,6 +767,7 @@ class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
|
|||||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||||
refs: List[List[str]] = field(default_factory=list)
|
refs: List[List[str]] = field(default_factory=list)
|
||||||
sources: List[List[str]] = field(default_factory=list)
|
sources: List[List[str]] = field(default_factory=list)
|
||||||
|
metrics: List[List[str]] = field(default_factory=list)
|
||||||
created_at: float = field(default_factory=lambda: time.time())
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -835,7 +848,7 @@ class ParsedMetric(UnparsedBaseNode, HasUniqueID, HasFqn):
|
|||||||
resource_type: NodeType = NodeType.Metric
|
resource_type: NodeType = NodeType.Metric
|
||||||
meta: Dict[str, Any] = field(default_factory=dict)
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
config: MetricConfig = field(default_factory=MetricConfig)
|
config: EntityConfig = field(default_factory=EntityConfig)
|
||||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||||
sources: List[List[str]] = field(default_factory=list)
|
sources: List[List[str]] = field(default_factory=list)
|
||||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||||
@@ -908,6 +921,60 @@ class ParsedMetric(UnparsedBaseNode, HasUniqueID, HasFqn):
|
|||||||
and True
|
and True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ParsedEntity(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||||
|
name: str
|
||||||
|
model: str
|
||||||
|
description: str
|
||||||
|
dimensions: Dict[str, EntityDimension] = field(default_factory=dict)
|
||||||
|
model_unique_id: Optional[str] = None
|
||||||
|
resource_type: NodeType = NodeType.Metric
|
||||||
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
tags: List[str] = field(default_factory=list)
|
||||||
|
config: MetricConfig = field(default_factory=MetricConfig)
|
||||||
|
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
sources: List[List[str]] = field(default_factory=list)
|
||||||
|
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||||
|
refs: List[List[str]] = field(default_factory=list)
|
||||||
|
entities: List[List[str]] = field(default_factory=list)
|
||||||
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def depends_on_nodes(self):
|
||||||
|
return self.depends_on.nodes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def search_name(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def same_model(self, old: "ParsedEntity") -> bool:
|
||||||
|
return self.model == old.model
|
||||||
|
|
||||||
|
def same_description(self, old: "ParsedEntity") -> bool:
|
||||||
|
return self.description == old.description
|
||||||
|
|
||||||
|
def same_dimensions(self, old: "ParsedEntity") -> bool:
|
||||||
|
return self.dimensions == old.dimensions
|
||||||
|
|
||||||
|
def same_config(self, old: "ParsedEntity") -> bool:
|
||||||
|
return self.config.same_contents(
|
||||||
|
self.unrendered_config,
|
||||||
|
old.unrendered_config,
|
||||||
|
)
|
||||||
|
|
||||||
|
def same_contents(self, old: Optional["ParsedEntity"]) -> bool:
|
||||||
|
# existing when it didn't before is a change!
|
||||||
|
# metadata/tags changes are not "changes"
|
||||||
|
if old is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.same_model(old)
|
||||||
|
and self.same_description(old)
|
||||||
|
and self.same_dimensions(old)
|
||||||
|
and self.same_config(old)
|
||||||
|
and True
|
||||||
|
)
|
||||||
|
|
||||||
ManifestNodes = Union[
|
ManifestNodes = Union[
|
||||||
ParsedAnalysisNode,
|
ParsedAnalysisNode,
|
||||||
@@ -928,5 +995,6 @@ ParsedResource = Union[
|
|||||||
ParsedNode,
|
ParsedNode,
|
||||||
ParsedExposure,
|
ParsedExposure,
|
||||||
ParsedMetric,
|
ParsedMetric,
|
||||||
|
ParsedEntity,
|
||||||
ParsedSourceDefinition,
|
ParsedSourceDefinition,
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ from typing import Optional, List, Union, Dict, Any, Sequence
|
|||||||
@dataclass
|
@dataclass
|
||||||
class UnparsedBaseNode(dbtClassMixin, Replaceable):
|
class UnparsedBaseNode(dbtClassMixin, Replaceable):
|
||||||
package_name: str
|
package_name: str
|
||||||
root_path: str
|
|
||||||
path: str
|
path: str
|
||||||
original_file_path: str
|
original_file_path: str
|
||||||
|
|
||||||
@@ -232,7 +231,7 @@ class ExternalTable(AdditionalPropertiesAllowed, Mergeable):
|
|||||||
file_format: Optional[str] = None
|
file_format: Optional[str] = None
|
||||||
row_format: Optional[str] = None
|
row_format: Optional[str] = None
|
||||||
tbl_properties: Optional[str] = None
|
tbl_properties: Optional[str] = None
|
||||||
partitions: Optional[List[ExternalPartition]] = None
|
partitions: Optional[Union[List[str], List[ExternalPartition]]] = None
|
||||||
|
|
||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
return self.location is not None
|
return self.location is not None
|
||||||
@@ -364,7 +363,6 @@ class SourcePatch(dbtClassMixin, Replaceable):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class UnparsedDocumentation(dbtClassMixin, Replaceable):
|
class UnparsedDocumentation(dbtClassMixin, Replaceable):
|
||||||
package_name: str
|
package_name: str
|
||||||
root_path: str
|
|
||||||
path: str
|
path: str
|
||||||
original_file_path: str
|
original_file_path: str
|
||||||
|
|
||||||
@@ -525,3 +523,47 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
|||||||
|
|
||||||
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
||||||
raise ValidationError("Derived metrics cannot have a 'model' property")
|
raise ValidationError("Derived metrics cannot have a 'model' property")
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EntityDimension(dbtClassMixin, Mergeable):
|
||||||
|
"""This class is used for the dimension information at the entity level. It
|
||||||
|
closely matches the implementation of columns for models."""
|
||||||
|
name: str
|
||||||
|
description: str = ""
|
||||||
|
column_name: Optional[str] = None
|
||||||
|
date_type: Optional[str] = None
|
||||||
|
default_timestamp: Optional[bool] = None
|
||||||
|
primary_key: Optional[bool] = None
|
||||||
|
time_grains: Optional[List[str]] = field(default_factory=list)
|
||||||
|
tags: List[str] = field(default_factory=list)
|
||||||
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EntityInheritence(EntityDimension):
|
||||||
|
"""This class is used for entity dimension inheritence. This class is optional
|
||||||
|
but if it is present then include needs to be present. Exclude cannot be present
|
||||||
|
without some idea of what is being included, whereas exclude is fully optional.
|
||||||
|
The acceptable inputs for include are either a list of columns/dimensions or *
|
||||||
|
to represent all fields. The acceptable inputs for exclude are a list of columns/
|
||||||
|
dimensions
|
||||||
|
"""
|
||||||
|
include: Union[List[str],str] = field(default_factory=list)
|
||||||
|
exclude: Optional[List[str]] = field(default_factory=list)
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UnparsedEntity(dbtClassMixin, Replaceable):
|
||||||
|
"""This class is used for entity information"""
|
||||||
|
name: str
|
||||||
|
model: str
|
||||||
|
description: str = ""
|
||||||
|
dimensions: Optional[Union[Optional[Sequence[EntityDimension]],Optional[EntityInheritence]]] = None
|
||||||
|
# dimensions: Optional[Sequence[EntityDimension]] = None
|
||||||
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
tags: List[str] = field(default_factory=list)
|
||||||
|
config: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, data):
|
||||||
|
super(UnparsedEntity, cls).validate(data)
|
||||||
|
errors = []
|
||||||
|
## TODO: Add validation here around include/exclude and others
|
||||||
@@ -12,9 +12,7 @@ from dataclasses import dataclass, field
|
|||||||
from typing import Optional, List, Dict, Union, Any
|
from typing import Optional, List, Dict, Union, Any
|
||||||
from mashumaro.types import SerializableType
|
from mashumaro.types import SerializableType
|
||||||
|
|
||||||
PIN_PACKAGE_URL = (
|
|
||||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions" # noqa
|
|
||||||
)
|
|
||||||
DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
|
DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
|
||||||
|
|
||||||
|
|
||||||
@@ -91,6 +89,23 @@ PackageSpec = Union[LocalPackage, GitPackage, RegistryPackage]
|
|||||||
class PackageConfig(dbtClassMixin, Replaceable):
|
class PackageConfig(dbtClassMixin, Replaceable):
|
||||||
packages: List[PackageSpec]
|
packages: List[PackageSpec]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, data):
|
||||||
|
for package in data.get("packages", data):
|
||||||
|
if isinstance(package, dict) and package.get("package"):
|
||||||
|
if not package["version"]:
|
||||||
|
raise ValidationError(
|
||||||
|
f"{package['package']} is missing the version. When installing from the Hub "
|
||||||
|
"package index, version is a required property"
|
||||||
|
)
|
||||||
|
|
||||||
|
if "/" not in package["package"]:
|
||||||
|
raise ValidationError(
|
||||||
|
f"{package['package']} was not found in the package index. Packages on the index "
|
||||||
|
"require a namespace, e.g dbt-labs/dbt_utils"
|
||||||
|
)
|
||||||
|
super().validate(data)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ProjectPackageMetadata:
|
class ProjectPackageMetadata:
|
||||||
@@ -193,6 +208,7 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
|||||||
sources: Dict[str, Any] = field(default_factory=dict)
|
sources: Dict[str, Any] = field(default_factory=dict)
|
||||||
tests: Dict[str, Any] = field(default_factory=dict)
|
tests: Dict[str, Any] = field(default_factory=dict)
|
||||||
metrics: Dict[str, Any] = field(default_factory=dict)
|
metrics: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
entities: Dict[str, Any] = field(default_factory=dict)
|
||||||
exposures: Dict[str, Any] = field(default_factory=dict)
|
exposures: Dict[str, Any] = field(default_factory=dict)
|
||||||
vars: Optional[Dict[str, Any]] = field(
|
vars: Optional[Dict[str, Any]] = field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|||||||
@@ -11,11 +11,12 @@ from dbt.contracts.util import (
|
|||||||
from dbt.exceptions import InternalException
|
from dbt.exceptions import InternalException
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event
|
||||||
from dbt.events.types import TimingInfoCollected
|
from dbt.events.types import TimingInfoCollected
|
||||||
|
from dbt.events.proto_types import RunResultMsg
|
||||||
from dbt.logger import (
|
from dbt.logger import (
|
||||||
TimingProcessor,
|
TimingProcessor,
|
||||||
JsonOnly,
|
JsonOnly,
|
||||||
)
|
)
|
||||||
from dbt.utils import lowercase
|
from dbt.utils import lowercase, cast_to_str, cast_to_int
|
||||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||||
|
|
||||||
import agate
|
import agate
|
||||||
@@ -119,6 +120,17 @@ class BaseResult(dbtClassMixin):
|
|||||||
data["failures"] = None
|
data["failures"] = None
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def to_msg(self):
|
||||||
|
# TODO: add more fields
|
||||||
|
msg = RunResultMsg()
|
||||||
|
msg.status = str(self.status)
|
||||||
|
msg.message = cast_to_str(self.message)
|
||||||
|
msg.thread = self.thread_id
|
||||||
|
msg.execution_time = self.execution_time
|
||||||
|
msg.num_failures = cast_to_int(self.failures)
|
||||||
|
# timing_info, adapter_response, message
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class NodeResult(BaseResult):
|
class NodeResult(BaseResult):
|
||||||
@@ -208,7 +220,9 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|||||||
generated_at: datetime,
|
generated_at: datetime,
|
||||||
args: Dict,
|
args: Dict,
|
||||||
):
|
):
|
||||||
processed_results = [process_run_result(result) for result in results]
|
processed_results = [
|
||||||
|
process_run_result(result) for result in results if isinstance(result, RunResult)
|
||||||
|
]
|
||||||
meta = RunResultsMetadata(
|
meta = RunResultsMetadata(
|
||||||
dbt_schema_version=str(cls.dbt_schema_version),
|
dbt_schema_version=str(cls.dbt_schema_version),
|
||||||
generated_at=generated_at,
|
generated_at=generated_at,
|
||||||
@@ -327,7 +341,7 @@ def process_freshness_result(result: FreshnessNodeResult) -> FreshnessNodeOutput
|
|||||||
criteria = result.node.freshness
|
criteria = result.node.freshness
|
||||||
if criteria is None:
|
if criteria is None:
|
||||||
raise InternalException(
|
raise InternalException(
|
||||||
"Somehow evaluated a freshness result for a source " "that has no freshness criteria!"
|
"Somehow evaluated a freshness result for a source that has no freshness criteria!"
|
||||||
)
|
)
|
||||||
return SourceFreshnessOutput(
|
return SourceFreshnessOutput(
|
||||||
unique_id=unique_id,
|
unique_id=unique_id,
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import dataclasses
|
import dataclasses
|
||||||
import os
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import List, Tuple, ClassVar, Type, TypeVar, Dict, Any, Optional
|
from typing import List, Tuple, ClassVar, Type, TypeVar, Dict, Any, Optional
|
||||||
|
|
||||||
@@ -11,7 +10,7 @@ from dbt.exceptions import (
|
|||||||
IncompatibleSchemaException,
|
IncompatibleSchemaException,
|
||||||
)
|
)
|
||||||
from dbt.version import __version__
|
from dbt.version import __version__
|
||||||
from dbt.events.functions import get_invocation_id
|
from dbt.events.functions import get_invocation_id, get_metadata_vars
|
||||||
from dbt.dataclass_schema import dbtClassMixin
|
from dbt.dataclass_schema import dbtClassMixin
|
||||||
|
|
||||||
from dbt.dataclass_schema import (
|
from dbt.dataclass_schema import (
|
||||||
@@ -148,20 +147,6 @@ class SchemaVersion:
|
|||||||
return BASE_SCHEMAS_URL + self.path
|
return BASE_SCHEMAS_URL + self.path
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_VERSION_KEY = "dbt_schema_version"
|
|
||||||
|
|
||||||
|
|
||||||
METADATA_ENV_PREFIX = "DBT_ENV_CUSTOM_ENV_"
|
|
||||||
|
|
||||||
|
|
||||||
def get_metadata_env() -> Dict[str, str]:
|
|
||||||
return {
|
|
||||||
k[len(METADATA_ENV_PREFIX) :]: v
|
|
||||||
for k, v in os.environ.items()
|
|
||||||
if k.startswith(METADATA_ENV_PREFIX)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# This is used in the ManifestMetadata, RunResultsMetadata, RunOperationResultMetadata,
|
# This is used in the ManifestMetadata, RunResultsMetadata, RunOperationResultMetadata,
|
||||||
# FreshnessMetadata, and CatalogMetadata classes
|
# FreshnessMetadata, and CatalogMetadata classes
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
@@ -170,7 +155,7 @@ class BaseArtifactMetadata(dbtClassMixin):
|
|||||||
dbt_version: str = __version__
|
dbt_version: str = __version__
|
||||||
generated_at: datetime = dataclasses.field(default_factory=datetime.utcnow)
|
generated_at: datetime = dataclasses.field(default_factory=datetime.utcnow)
|
||||||
invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id)
|
invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id)
|
||||||
env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_env)
|
env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_vars)
|
||||||
|
|
||||||
def __post_serialize__(self, dct):
|
def __post_serialize__(self, dct):
|
||||||
dct = super().__post_serialize__(dct)
|
dct = super().__post_serialize__(dct)
|
||||||
@@ -255,13 +240,32 @@ def rename_sql_attr(node_content: dict) -> dict:
|
|||||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||||
for node_content in manifest.get("nodes", {}).values():
|
for node_content in manifest.get("nodes", {}).values():
|
||||||
node_content = rename_sql_attr(node_content)
|
node_content = rename_sql_attr(node_content)
|
||||||
|
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||||
|
del node_content["root_path"]
|
||||||
for disabled in manifest.get("disabled", {}).values():
|
for disabled in manifest.get("disabled", {}).values():
|
||||||
# There can be multiple disabled nodes for the same unique_id
|
# There can be multiple disabled nodes for the same unique_id
|
||||||
# so make sure all the nodes get the attr renamed
|
# so make sure all the nodes get the attr renamed
|
||||||
disabled = [rename_sql_attr(n) for n in disabled]
|
for node_content in disabled:
|
||||||
|
rename_sql_attr(node_content)
|
||||||
|
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||||
|
del node_content["root_path"]
|
||||||
for metric_content in manifest.get("metrics", {}).values():
|
for metric_content in manifest.get("metrics", {}).values():
|
||||||
# handle attr renames + value translation ("expression" -> "derived")
|
# handle attr renames + value translation ("expression" -> "derived")
|
||||||
metric_content = rename_metric_attr(metric_content)
|
metric_content = rename_metric_attr(metric_content)
|
||||||
|
if "root_path" in metric_content:
|
||||||
|
del metric_content["root_path"]
|
||||||
|
for exposure_content in manifest.get("exposures", {}).values():
|
||||||
|
if "root_path" in exposure_content:
|
||||||
|
del exposure_content["root_path"]
|
||||||
|
for source_content in manifest.get("sources", {}).values():
|
||||||
|
if "root_path" in exposure_content:
|
||||||
|
del source_content["root_path"]
|
||||||
|
for macro_content in manifest.get("macros", {}).values():
|
||||||
|
if "root_path" in macro_content:
|
||||||
|
del macro_content["root_path"]
|
||||||
|
for doc_content in manifest.get("docs", {}).values():
|
||||||
|
if "root_path" in doc_content:
|
||||||
|
del doc_content["root_path"]
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
|
|
||||||
@@ -306,7 +310,7 @@ class VersionedSchema(dbtClassMixin):
|
|||||||
expected=str(cls.dbt_schema_version),
|
expected=str(cls.dbt_schema_version),
|
||||||
found=previous_schema_version,
|
found=previous_schema_version,
|
||||||
)
|
)
|
||||||
if get_manifest_schema_version(data) <= 6:
|
if get_manifest_schema_version(data) <= 7:
|
||||||
data = upgrade_manifest_json(data)
|
data = upgrade_manifest_json(data)
|
||||||
return cls.from_dict(data) # type: ignore
|
return cls.from_dict(data) # type: ignore
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
|
import abc
|
||||||
from typing import Optional, Set, List, Dict, ClassVar
|
from typing import Optional, Set, List, Dict, ClassVar
|
||||||
|
|
||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
from dbt import ui
|
|
||||||
|
|
||||||
import dbt.tracking
|
import dbt.tracking
|
||||||
|
|
||||||
|
|
||||||
class DBTDeprecation:
|
class DBTDeprecation:
|
||||||
_name: ClassVar[Optional[str]] = None
|
_name: ClassVar[Optional[str]] = None
|
||||||
_description: ClassVar[Optional[str]] = None
|
_event: ClassVar[Optional[str]] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
@@ -21,66 +21,50 @@ class DBTDeprecation:
|
|||||||
dbt.tracking.track_deprecation_warn({"deprecation_name": self.name})
|
dbt.tracking.track_deprecation_warn({"deprecation_name": self.name})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self) -> str:
|
def event(self) -> abc.ABCMeta:
|
||||||
if self._description is not None:
|
if self._event is not None:
|
||||||
return self._description
|
module_path = dbt.events.types
|
||||||
raise NotImplementedError("description not implemented for {}".format(self))
|
class_name = self._event
|
||||||
|
|
||||||
|
try:
|
||||||
|
return getattr(module_path, class_name)
|
||||||
|
except AttributeError:
|
||||||
|
msg = f"Event Class `{class_name}` is not defined in `{module_path}`"
|
||||||
|
raise NameError(msg)
|
||||||
|
raise NotImplementedError("event not implemented for {}".format(self._event))
|
||||||
|
|
||||||
def show(self, *args, **kwargs) -> None:
|
def show(self, *args, **kwargs) -> None:
|
||||||
if self.name not in active_deprecations:
|
if self.name not in active_deprecations:
|
||||||
desc = self.description.format(**kwargs)
|
event = self.event(**kwargs)
|
||||||
msg = ui.line_wrap_message(desc, prefix="Deprecated functionality\n\n")
|
dbt.events.functions.warn_or_error(event)
|
||||||
dbt.exceptions.warn_or_error(msg, log_fmt=ui.warning_tag("{}"))
|
|
||||||
self.track_deprecation_warn()
|
self.track_deprecation_warn()
|
||||||
active_deprecations.add(self.name)
|
active_deprecations.add(self.name)
|
||||||
|
|
||||||
|
|
||||||
class PackageRedirectDeprecation(DBTDeprecation):
|
class PackageRedirectDeprecation(DBTDeprecation):
|
||||||
_name = "package-redirect"
|
_name = "package-redirect"
|
||||||
_description = """\
|
_event = "PackageRedirectDeprecation"
|
||||||
The `{old_name}` package is deprecated in favor of `{new_name}`. Please update
|
|
||||||
your `packages.yml` configuration to use `{new_name}` instead.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class PackageInstallPathDeprecation(DBTDeprecation):
|
class PackageInstallPathDeprecation(DBTDeprecation):
|
||||||
_name = "install-packages-path"
|
_name = "install-packages-path"
|
||||||
_description = """\
|
_event = "PackageInstallPathDeprecation"
|
||||||
The default package install path has changed from `dbt_modules` to `dbt_packages`.
|
|
||||||
Please update `clean-targets` in `dbt_project.yml` and check `.gitignore` as well.
|
|
||||||
Or, set `packages-install-path: dbt_modules` if you'd like to keep the current value.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigPathDeprecation(DBTDeprecation):
|
class ConfigSourcePathDeprecation(DBTDeprecation):
|
||||||
_description = """\
|
|
||||||
The `{deprecated_path}` config has been renamed to `{exp_path}`.
|
|
||||||
Please update your `dbt_project.yml` configuration to reflect this change.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigSourcePathDeprecation(ConfigPathDeprecation):
|
|
||||||
_name = "project-config-source-paths"
|
_name = "project-config-source-paths"
|
||||||
|
_event = "ConfigSourcePathDeprecation"
|
||||||
|
|
||||||
|
|
||||||
class ConfigDataPathDeprecation(ConfigPathDeprecation):
|
class ConfigDataPathDeprecation(DBTDeprecation):
|
||||||
_name = "project-config-data-paths"
|
_name = "project-config-data-paths"
|
||||||
|
_event = "ConfigDataPathDeprecation"
|
||||||
|
|
||||||
_adapter_renamed_description = """\
|
|
||||||
The adapter function `adapter.{old_name}` is deprecated and will be removed in
|
|
||||||
a future release of dbt. Please use `adapter.{new_name}` instead.
|
|
||||||
|
|
||||||
Documentation for {new_name} can be found here:
|
|
||||||
|
|
||||||
https://docs.getdbt.com/docs/adapter
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def renamed_method(old_name: str, new_name: str):
|
def renamed_method(old_name: str, new_name: str):
|
||||||
class AdapterDeprecationWarning(DBTDeprecation):
|
class AdapterDeprecationWarning(DBTDeprecation):
|
||||||
_name = "adapter:{}".format(old_name)
|
_name = "adapter:{}".format(old_name)
|
||||||
_description = _adapter_renamed_description.format(old_name=old_name, new_name=new_name)
|
_event = "AdapterDeprecationWarning"
|
||||||
|
|
||||||
dep = AdapterDeprecationWarning()
|
dep = AdapterDeprecationWarning()
|
||||||
deprecations_list.append(dep)
|
deprecations_list.append(dep)
|
||||||
@@ -89,26 +73,12 @@ def renamed_method(old_name: str, new_name: str):
|
|||||||
|
|
||||||
class MetricAttributesRenamed(DBTDeprecation):
|
class MetricAttributesRenamed(DBTDeprecation):
|
||||||
_name = "metric-attr-renamed"
|
_name = "metric-attr-renamed"
|
||||||
_description = """\
|
_event = "MetricAttributesRenamed"
|
||||||
dbt-core v1.3 renamed attributes for metrics:
|
|
||||||
\n 'sql' -> 'expression'
|
|
||||||
\n 'type' -> 'calculation_method'
|
|
||||||
\n 'type: expression' -> 'calculation_method: derived'
|
|
||||||
\nThe old metric parameter names will be fully deprecated in v1.4.
|
|
||||||
\nPlease remove them from the metric definition of metric '{metric_name}'
|
|
||||||
\nRelevant issue here: https://github.com/dbt-labs/dbt-core/issues/5849
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class ExposureNameDeprecation(DBTDeprecation):
|
class ExposureNameDeprecation(DBTDeprecation):
|
||||||
_name = "exposure-name"
|
_name = "exposure-name"
|
||||||
_description = """\
|
_event = "ExposureNameDeprecation"
|
||||||
Starting in v1.3, the 'name' of an exposure should contain only letters, numbers, and underscores.
|
|
||||||
Exposures support a new property, 'label', which may contain spaces, capital letters, and special characters.
|
|
||||||
{exposure} does not follow this pattern.
|
|
||||||
Please update the 'name', and use the 'label' property for a human-friendly title.
|
|
||||||
This will raise an error in a future version of dbt-core.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def warn(name, *args, **kwargs):
|
def warn(name, *args, **kwargs):
|
||||||
@@ -125,12 +95,12 @@ def warn(name, *args, **kwargs):
|
|||||||
active_deprecations: Set[str] = set()
|
active_deprecations: Set[str] = set()
|
||||||
|
|
||||||
deprecations_list: List[DBTDeprecation] = [
|
deprecations_list: List[DBTDeprecation] = [
|
||||||
ExposureNameDeprecation(),
|
PackageRedirectDeprecation(),
|
||||||
|
PackageInstallPathDeprecation(),
|
||||||
ConfigSourcePathDeprecation(),
|
ConfigSourcePathDeprecation(),
|
||||||
ConfigDataPathDeprecation(),
|
ConfigDataPathDeprecation(),
|
||||||
PackageInstallPathDeprecation(),
|
|
||||||
PackageRedirectDeprecation(),
|
|
||||||
MetricAttributesRenamed(),
|
MetricAttributesRenamed(),
|
||||||
|
ExposureNameDeprecation(),
|
||||||
]
|
]
|
||||||
|
|
||||||
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class PinnedPackage(BasePackage):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def install(self, project):
|
def install(self, project, renderer):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
|
|||||||
@@ -9,14 +9,9 @@ from dbt.contracts.project import (
|
|||||||
GitPackage,
|
GitPackage,
|
||||||
)
|
)
|
||||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||||
from dbt.exceptions import ExecutableError, warn_or_error, raise_dependency_error
|
from dbt.exceptions import ExecutableError, raise_dependency_error
|
||||||
from dbt.events.functions import fire_event
|
from dbt.events.functions import fire_event, warn_or_error
|
||||||
from dbt.events.types import EnsureGitInstalled
|
from dbt.events.types import EnsureGitInstalled, DepsUnpinned
|
||||||
from dbt import ui
|
|
||||||
|
|
||||||
PIN_PACKAGE_URL = (
|
|
||||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions" # noqa
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def md5sum(s: str):
|
def md5sum(s: str):
|
||||||
@@ -62,14 +57,6 @@ class GitPinnedPackage(GitPackageMixin, PinnedPackage):
|
|||||||
else:
|
else:
|
||||||
return "revision {}".format(self.revision)
|
return "revision {}".format(self.revision)
|
||||||
|
|
||||||
def unpinned_msg(self):
|
|
||||||
if self.revision == "HEAD":
|
|
||||||
return "not pinned, using HEAD (default branch)"
|
|
||||||
elif self.revision in ("main", "master"):
|
|
||||||
return f'pinned to the "{self.revision}" branch'
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _checkout(self):
|
def _checkout(self):
|
||||||
"""Performs a shallow clone of the repository into the downloads
|
"""Performs a shallow clone of the repository into the downloads
|
||||||
directory. This function can be called repeatedly. If the project has
|
directory. This function can be called repeatedly. If the project has
|
||||||
@@ -92,14 +79,8 @@ class GitPinnedPackage(GitPackageMixin, PinnedPackage):
|
|||||||
def _fetch_metadata(self, project, renderer) -> ProjectPackageMetadata:
|
def _fetch_metadata(self, project, renderer) -> ProjectPackageMetadata:
|
||||||
path = self._checkout()
|
path = self._checkout()
|
||||||
|
|
||||||
if self.unpinned_msg() and self.warn_unpinned:
|
if (self.revision == "HEAD" or self.revision in ("main", "master")) and self.warn_unpinned:
|
||||||
warn_or_error(
|
warn_or_error(DepsUnpinned(git=self.git))
|
||||||
'The git package "{}" \n\tis {}.\n\tThis can introduce '
|
|
||||||
"breaking changes into your project without warning!\n\nSee {}".format(
|
|
||||||
self.git, self.unpinned_msg(), PIN_PACKAGE_URL
|
|
||||||
),
|
|
||||||
log_fmt=ui.yellow("WARNING: {}"),
|
|
||||||
)
|
|
||||||
loaded = Project.from_project_root(path, renderer)
|
loaded = Project.from_project_root(path, renderer)
|
||||||
return ProjectPackageMetadata.from_project(loaded)
|
return ProjectPackageMetadata.from_project(loaded)
|
||||||
|
|
||||||
|
|||||||
20
core/dbt/docs/Makefile
Normal file
20
core/dbt/docs/Makefile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line, and also
|
||||||
|
# from the environment for the first two.
|
||||||
|
SPHINXOPTS ?=
|
||||||
|
SPHINXBUILD ?= sphinx-build
|
||||||
|
SOURCEDIR = source
|
||||||
|
BUILDDIR = build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Normal file
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Normal file
Binary file not shown.
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
Normal file
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
Normal file
Binary file not shown.
4
core/dbt/docs/build/html/.buildinfo
vendored
Normal file
4
core/dbt/docs/build/html/.buildinfo
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Sphinx build info version 1
|
||||||
|
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
|
||||||
|
config: 1ee31fc16e025fb98598189ba2cb5fcb
|
||||||
|
tags: 645f666f9bcd5a90fca523b33c5a78b7
|
||||||
4
core/dbt/docs/build/html/_sources/index.rst.txt
vendored
Normal file
4
core/dbt/docs/build/html/_sources/index.rst.txt
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
dbt-core's API documentation
|
||||||
|
============================
|
||||||
|
|
||||||
|
.. dbt_click:: dbt.cli.main:cli
|
||||||
134
core/dbt/docs/build/html/_static/_sphinx_javascript_frameworks_compat.js
vendored
Normal file
134
core/dbt/docs/build/html/_static/_sphinx_javascript_frameworks_compat.js
vendored
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
/*
|
||||||
|
* _sphinx_javascript_frameworks_compat.js
|
||||||
|
* ~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Compatability shim for jQuery and underscores.js.
|
||||||
|
*
|
||||||
|
* WILL BE REMOVED IN Sphinx 6.0
|
||||||
|
* xref RemovedInSphinx60Warning
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* select a different prefix for underscore
|
||||||
|
*/
|
||||||
|
$u = _.noConflict();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* small helper function to urldecode strings
|
||||||
|
*
|
||||||
|
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL
|
||||||
|
*/
|
||||||
|
jQuery.urldecode = function(x) {
|
||||||
|
if (!x) {
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
return decodeURIComponent(x.replace(/\+/g, ' '));
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* small helper function to urlencode strings
|
||||||
|
*/
|
||||||
|
jQuery.urlencode = encodeURIComponent;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function returns the parsed url parameters of the
|
||||||
|
* current request. Multiple values per key are supported,
|
||||||
|
* it will always return arrays of strings for the value parts.
|
||||||
|
*/
|
||||||
|
jQuery.getQueryParameters = function(s) {
|
||||||
|
if (typeof s === 'undefined')
|
||||||
|
s = document.location.search;
|
||||||
|
var parts = s.substr(s.indexOf('?') + 1).split('&');
|
||||||
|
var result = {};
|
||||||
|
for (var i = 0; i < parts.length; i++) {
|
||||||
|
var tmp = parts[i].split('=', 2);
|
||||||
|
var key = jQuery.urldecode(tmp[0]);
|
||||||
|
var value = jQuery.urldecode(tmp[1]);
|
||||||
|
if (key in result)
|
||||||
|
result[key].push(value);
|
||||||
|
else
|
||||||
|
result[key] = [value];
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* highlight a given string on a jquery object by wrapping it in
|
||||||
|
* span elements with the given class name.
|
||||||
|
*/
|
||||||
|
jQuery.fn.highlightText = function(text, className) {
|
||||||
|
function highlight(node, addItems) {
|
||||||
|
if (node.nodeType === 3) {
|
||||||
|
var val = node.nodeValue;
|
||||||
|
var pos = val.toLowerCase().indexOf(text);
|
||||||
|
if (pos >= 0 &&
|
||||||
|
!jQuery(node.parentNode).hasClass(className) &&
|
||||||
|
!jQuery(node.parentNode).hasClass("nohighlight")) {
|
||||||
|
var span;
|
||||||
|
var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
|
||||||
|
if (isInSVG) {
|
||||||
|
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||||
|
} else {
|
||||||
|
span = document.createElement("span");
|
||||||
|
span.className = className;
|
||||||
|
}
|
||||||
|
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||||
|
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
|
||||||
|
document.createTextNode(val.substr(pos + text.length)),
|
||||||
|
node.nextSibling));
|
||||||
|
node.nodeValue = val.substr(0, pos);
|
||||||
|
if (isInSVG) {
|
||||||
|
var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
|
||||||
|
var bbox = node.parentElement.getBBox();
|
||||||
|
rect.x.baseVal.value = bbox.x;
|
||||||
|
rect.y.baseVal.value = bbox.y;
|
||||||
|
rect.width.baseVal.value = bbox.width;
|
||||||
|
rect.height.baseVal.value = bbox.height;
|
||||||
|
rect.setAttribute('class', className);
|
||||||
|
addItems.push({
|
||||||
|
"parent": node.parentNode,
|
||||||
|
"target": rect});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (!jQuery(node).is("button, select, textarea")) {
|
||||||
|
jQuery.each(node.childNodes, function() {
|
||||||
|
highlight(this, addItems);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var addItems = [];
|
||||||
|
var result = this.each(function() {
|
||||||
|
highlight(this, addItems);
|
||||||
|
});
|
||||||
|
for (var i = 0; i < addItems.length; ++i) {
|
||||||
|
jQuery(addItems[i].parent).before(addItems[i].target);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* backward compatibility for jQuery.browser
|
||||||
|
* This will be supported until firefox bug is fixed.
|
||||||
|
*/
|
||||||
|
if (!jQuery.browser) {
|
||||||
|
jQuery.uaMatch = function(ua) {
|
||||||
|
ua = ua.toLowerCase();
|
||||||
|
|
||||||
|
var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
|
||||||
|
/(webkit)[ \/]([\w.]+)/.exec(ua) ||
|
||||||
|
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
|
||||||
|
/(msie) ([\w.]+)/.exec(ua) ||
|
||||||
|
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
|
||||||
|
[];
|
||||||
|
|
||||||
|
return {
|
||||||
|
browser: match[ 1 ] || "",
|
||||||
|
version: match[ 2 ] || "0"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
jQuery.browser = {};
|
||||||
|
jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
|
||||||
|
}
|
||||||
701
core/dbt/docs/build/html/_static/alabaster.css
vendored
Normal file
701
core/dbt/docs/build/html/_static/alabaster.css
vendored
Normal file
@@ -0,0 +1,701 @@
|
|||||||
|
@import url("basic.css");
|
||||||
|
|
||||||
|
/* -- page layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-size: 17px;
|
||||||
|
background-color: #fff;
|
||||||
|
color: #000;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: 940px;
|
||||||
|
margin: 30px auto 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
float: left;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 0 0 220px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
width: 220px;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
border: 1px solid #B1B4B6;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
background-color: #fff;
|
||||||
|
color: #3E4349;
|
||||||
|
padding: 0 30px 0 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body > .section {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
width: 940px;
|
||||||
|
margin: 20px auto 30px auto;
|
||||||
|
font-size: 14px;
|
||||||
|
color: #888;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer a {
|
||||||
|
color: #888;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.caption {
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.relations {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
div.sphinxsidebar a {
|
||||||
|
color: #444;
|
||||||
|
text-decoration: none;
|
||||||
|
border-bottom: 1px dotted #999;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar a:hover {
|
||||||
|
border-bottom: 1px solid #999;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
padding: 18px 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper p.logo {
|
||||||
|
padding: 0;
|
||||||
|
margin: -10px 0 0 0px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper h1.logo {
|
||||||
|
margin-top: -10px;
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 5px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper h1.logo-name {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper p.blurb {
|
||||||
|
margin-top: 0;
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3,
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
color: #444;
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 0 0 5px 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #444;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.logo a,
|
||||||
|
div.sphinxsidebar h3 a,
|
||||||
|
div.sphinxsidebar p.logo a:hover,
|
||||||
|
div.sphinxsidebar h3 a:hover {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p {
|
||||||
|
color: #555;
|
||||||
|
margin: 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
margin: 10px 0;
|
||||||
|
padding: 0;
|
||||||
|
color: #000;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul li.toctree-l1 > a {
|
||||||
|
font-size: 120%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul li.toctree-l2 > a {
|
||||||
|
font-size: 110%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar hr {
|
||||||
|
border: none;
|
||||||
|
height: 1px;
|
||||||
|
color: #AAA;
|
||||||
|
background: #AAA;
|
||||||
|
|
||||||
|
text-align: left;
|
||||||
|
margin-left: 0;
|
||||||
|
width: 50%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .badge {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar .badge:hover {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* To address an issue with donation coming after search */
|
||||||
|
div.sphinxsidebar h3.donation {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- body styles ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #004B6B;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
color: #6D4100;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1,
|
||||||
|
div.body h2,
|
||||||
|
div.body h3,
|
||||||
|
div.body h4,
|
||||||
|
div.body h5,
|
||||||
|
div.body h6 {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 30px 0px 10px 0px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; }
|
||||||
|
div.body h2 { font-size: 180%; }
|
||||||
|
div.body h3 { font-size: 150%; }
|
||||||
|
div.body h4 { font-size: 130%; }
|
||||||
|
div.body h5 { font-size: 100%; }
|
||||||
|
div.body h6 { font-size: 100%; }
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
color: #DDD;
|
||||||
|
padding: 0 4px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink:hover {
|
||||||
|
color: #444;
|
||||||
|
background: #EAEAEA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li {
|
||||||
|
line-height: 1.4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
margin: 20px 0px;
|
||||||
|
padding: 10px 30px;
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition tt.xref, div.admonition code.xref, div.admonition a tt {
|
||||||
|
background-color: #FBFBFB;
|
||||||
|
border-bottom: 1px solid #fafafa;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title {
|
||||||
|
font-family: Georgia, serif;
|
||||||
|
font-weight: normal;
|
||||||
|
font-size: 24px;
|
||||||
|
margin: 0 0 10px 0;
|
||||||
|
padding: 0;
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.last {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight {
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
dt:target, .highlight {
|
||||||
|
background: #FAF3E8;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.danger {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.error {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
box-shadow: 2px 2px 4px #D52C2C;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.caution {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.attention {
|
||||||
|
background-color: #FCC;
|
||||||
|
border: 1px solid #FAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.important {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.note {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.tip {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.hint {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.seealso {
|
||||||
|
background-color: #EEE;
|
||||||
|
border: 1px solid #CCC;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
background-color: #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title:after {
|
||||||
|
content: ":";
|
||||||
|
}
|
||||||
|
|
||||||
|
pre, tt, code {
|
||||||
|
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hll {
|
||||||
|
background-color: #FFC;
|
||||||
|
margin: 0 -12px;
|
||||||
|
padding: 0 12px;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.screenshot {
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname, tt.descclassname, code.descname, code.descclassname {
|
||||||
|
font-size: 0.95em;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname, code.descname {
|
||||||
|
padding-right: 0.08em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.screenshot {
|
||||||
|
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
box-shadow: 2px 2px 4px #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
border: 1px solid #888;
|
||||||
|
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||||
|
box-shadow: 2px 2px 4px #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils td, table.docutils th {
|
||||||
|
border: 1px solid #888;
|
||||||
|
padding: 0.25em 0.7em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list, table.footnote {
|
||||||
|
border: none;
|
||||||
|
-moz-box-shadow: none;
|
||||||
|
-webkit-box-shadow: none;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote {
|
||||||
|
margin: 15px 0;
|
||||||
|
width: 100%;
|
||||||
|
border: 1px solid #EEE;
|
||||||
|
background: #FDFDFD;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote + table.footnote {
|
||||||
|
margin-top: -15px;
|
||||||
|
border-top: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list th {
|
||||||
|
padding: 0 0.8em 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list td {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list p {
|
||||||
|
margin-bottom: 0.8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Cloned from
|
||||||
|
* https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68
|
||||||
|
*/
|
||||||
|
.field-name {
|
||||||
|
-moz-hyphens: manual;
|
||||||
|
-ms-hyphens: manual;
|
||||||
|
-webkit-hyphens: manual;
|
||||||
|
hyphens: manual;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td.label {
|
||||||
|
width: .1px;
|
||||||
|
padding: 0.3em 0 0.3em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td {
|
||||||
|
padding: 0.3em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl dd {
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
blockquote {
|
||||||
|
margin: 0 0 0 30px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul, ol {
|
||||||
|
/* Matches the 30px from the narrow-screen "li > ul" selector below */
|
||||||
|
margin: 10px 0 10px 30px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
background: #EEE;
|
||||||
|
padding: 7px 30px;
|
||||||
|
margin: 15px 0px;
|
||||||
|
line-height: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
background: #ffd;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl pre, blockquote pre, li pre {
|
||||||
|
margin-left: 0;
|
||||||
|
padding-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt, code {
|
||||||
|
background-color: #ecf0f3;
|
||||||
|
color: #222;
|
||||||
|
/* padding: 1px 2px; */
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.xref, code.xref, a tt {
|
||||||
|
background-color: #FBFBFB;
|
||||||
|
border-bottom: 1px solid #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.reference {
|
||||||
|
text-decoration: none;
|
||||||
|
border-bottom: 1px dotted #004B6B;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Don't put an underline on images */
|
||||||
|
a.image-reference, a.image-reference:hover {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.reference:hover {
|
||||||
|
border-bottom: 1px solid #6D4100;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.footnote-reference {
|
||||||
|
text-decoration: none;
|
||||||
|
font-size: 0.7em;
|
||||||
|
vertical-align: top;
|
||||||
|
border-bottom: 1px dotted #004B6B;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.footnote-reference:hover {
|
||||||
|
border-bottom: 1px solid #6D4100;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover tt, a:hover code {
|
||||||
|
background: #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@media screen and (max-width: 870px) {
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: 100%;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
margin-left: 0;
|
||||||
|
margin-top: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
margin-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
margin-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
li > ul {
|
||||||
|
/* Matches the 30px from the "ul, ol" selector above */
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.document {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bodywrapper {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@media screen and (max-width: 875px) {
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 20px 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
float: none;
|
||||||
|
background: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
display: block;
|
||||||
|
float: none;
|
||||||
|
width: 102.5%;
|
||||||
|
margin: 50px -30px -20px -30px;
|
||||||
|
padding: 10px 20px;
|
||||||
|
background: #333;
|
||||||
|
color: #FFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p,
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar a {
|
||||||
|
color: #AAA;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.logo {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
width: 100%;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
min-height: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rtd_doc_footer {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.document {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* misc. */
|
||||||
|
|
||||||
|
.revsys-inline {
|
||||||
|
display: none!important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Make nested-list/multi-paragraph items look better in Releases changelog
|
||||||
|
* pages. Without this, docutils' magical list fuckery causes inconsistent
|
||||||
|
* formatting between different release sub-lists.
|
||||||
|
*/
|
||||||
|
div#changelog > div.section > ul > li > p:only-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide fugly table cell borders in ..bibliography:: directive output */
|
||||||
|
table.docutils.citation, table.docutils.citation td, table.docutils.citation th {
|
||||||
|
border: none;
|
||||||
|
/* Below needed in some edge cases; if not applied, bottom shadows appear */
|
||||||
|
-moz-box-shadow: none;
|
||||||
|
-webkit-box-shadow: none;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* relbar */
|
||||||
|
|
||||||
|
.related {
|
||||||
|
line-height: 30px;
|
||||||
|
width: 100%;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related.top {
|
||||||
|
border-bottom: 1px solid #EEE;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related.bottom {
|
||||||
|
border-top: 1px solid #EEE;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related ul {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.related li {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav#rellinks {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
nav#rellinks li+li:before {
|
||||||
|
content: "|";
|
||||||
|
}
|
||||||
|
|
||||||
|
nav#breadcrumbs li+li:before {
|
||||||
|
content: "\00BB";
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide certain items when printing */
|
||||||
|
@media print {
|
||||||
|
div.related {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
900
core/dbt/docs/build/html/_static/basic.css
vendored
Normal file
900
core/dbt/docs/build/html/_static/basic.css
vendored
Normal file
@@ -0,0 +1,900 @@
|
|||||||
|
/*
|
||||||
|
* basic.css
|
||||||
|
* ~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx stylesheet -- basic theme.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* -- main layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.clearer {
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.section::after {
|
||||||
|
display: block;
|
||||||
|
content: '';
|
||||||
|
clear: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- relbar ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
width: 100%;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related h3 {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 0 0 10px;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related li {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related li.right {
|
||||||
|
float: right;
|
||||||
|
margin-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- sidebar --------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
padding: 10px 5px 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
float: left;
|
||||||
|
width: 230px;
|
||||||
|
margin-left: -100%;
|
||||||
|
font-size: 90%;
|
||||||
|
word-wrap: break-word;
|
||||||
|
overflow-wrap : break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul,
|
||||||
|
div.sphinxsidebar ul.want-points {
|
||||||
|
margin-left: 20px;
|
||||||
|
list-style: square;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar form {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #98dbcc;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar #searchbox form.search {
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar #searchbox input[type="text"] {
|
||||||
|
float: left;
|
||||||
|
width: 80%;
|
||||||
|
padding: 0.25em;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar #searchbox input[type="submit"] {
|
||||||
|
float: left;
|
||||||
|
width: 20%;
|
||||||
|
border-left: none;
|
||||||
|
padding: 0.25em;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
img {
|
||||||
|
border: 0;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- search page ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
ul.search {
|
||||||
|
margin: 10px 0 0 20px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li {
|
||||||
|
padding: 5px 0 5px 20px;
|
||||||
|
background-image: url(file.png);
|
||||||
|
background-repeat: no-repeat;
|
||||||
|
background-position: 0 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li p.context {
|
||||||
|
color: #888;
|
||||||
|
margin: 2px 0 0 30px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.keywordmatches li.goodmatch a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- index page ------------------------------------------------------------ */
|
||||||
|
|
||||||
|
table.contentstable {
|
||||||
|
width: 90%;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.contentstable p.biglink {
|
||||||
|
line-height: 150%;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.biglink {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.linkdescr {
|
||||||
|
font-style: italic;
|
||||||
|
padding-top: 5px;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general index --------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.indextable {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable td {
|
||||||
|
text-align: left;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable ul {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable > tbody > tr > td > ul {
|
||||||
|
padding-left: 0em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.pcap {
|
||||||
|
height: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.cap {
|
||||||
|
margin-top: 10px;
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.toggler {
|
||||||
|
margin-right: 3px;
|
||||||
|
margin-top: 3px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.modindex-jumpbox {
|
||||||
|
border-top: 1px solid #ddd;
|
||||||
|
border-bottom: 1px solid #ddd;
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
padding: 0.4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.genindex-jumpbox {
|
||||||
|
border-top: 1px solid #ddd;
|
||||||
|
border-bottom: 1px solid #ddd;
|
||||||
|
margin: 1em 0 1em 0;
|
||||||
|
padding: 0.4em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- domain module index --------------------------------------------------- */
|
||||||
|
|
||||||
|
table.modindextable td {
|
||||||
|
padding: 2px;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general body styles --------------------------------------------------- */
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
min-width: 360px;
|
||||||
|
max-width: 800px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li, div.body blockquote {
|
||||||
|
-moz-hyphens: auto;
|
||||||
|
-ms-hyphens: auto;
|
||||||
|
-webkit-hyphens: auto;
|
||||||
|
hyphens: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
visibility: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1:hover > a.headerlink,
|
||||||
|
h2:hover > a.headerlink,
|
||||||
|
h3:hover > a.headerlink,
|
||||||
|
h4:hover > a.headerlink,
|
||||||
|
h5:hover > a.headerlink,
|
||||||
|
h6:hover > a.headerlink,
|
||||||
|
dt:hover > a.headerlink,
|
||||||
|
caption:hover > a.headerlink,
|
||||||
|
p.caption:hover > a.headerlink,
|
||||||
|
div.code-block-caption:hover > a.headerlink {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p.caption {
|
||||||
|
text-align: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body td {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.first {
|
||||||
|
margin-top: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.rubric {
|
||||||
|
margin-top: 30px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-left, figure.align-left, .figure.align-left, object.align-left {
|
||||||
|
clear: left;
|
||||||
|
float: left;
|
||||||
|
margin-right: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-right, figure.align-right, .figure.align-right, object.align-right {
|
||||||
|
clear: right;
|
||||||
|
float: right;
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-center, figure.align-center, .figure.align-center, object.align-center {
|
||||||
|
display: block;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.align-default, figure.align-default, .figure.align-default {
|
||||||
|
display: block;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-left {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-center {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-default {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-right {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- sidebars -------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.sidebar,
|
||||||
|
aside.sidebar {
|
||||||
|
margin: 0 0 0.5em 1em;
|
||||||
|
border: 1px solid #ddb;
|
||||||
|
padding: 7px;
|
||||||
|
background-color: #ffe;
|
||||||
|
width: 40%;
|
||||||
|
float: right;
|
||||||
|
clear: right;
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.sidebar-title {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
nav.contents,
|
||||||
|
aside.topic,
|
||||||
|
div.admonition, div.topic, blockquote {
|
||||||
|
clear: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- topics ---------------------------------------------------------------- */
|
||||||
|
nav.contents,
|
||||||
|
aside.topic,
|
||||||
|
div.topic {
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
padding: 7px;
|
||||||
|
margin: 10px 0 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.topic-title {
|
||||||
|
font-size: 1.1em;
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- admonitions ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
padding: 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition dt {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
margin: 0px 10px 5px 0px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p.centered {
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 25px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- content of sidebars/topics/admonitions -------------------------------- */
|
||||||
|
|
||||||
|
div.sidebar > :last-child,
|
||||||
|
aside.sidebar > :last-child,
|
||||||
|
nav.contents > :last-child,
|
||||||
|
aside.topic > :last-child,
|
||||||
|
div.topic > :last-child,
|
||||||
|
div.admonition > :last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar::after,
|
||||||
|
aside.sidebar::after,
|
||||||
|
nav.contents::after,
|
||||||
|
aside.topic::after,
|
||||||
|
div.topic::after,
|
||||||
|
div.admonition::after,
|
||||||
|
blockquote::after {
|
||||||
|
display: block;
|
||||||
|
content: '';
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- tables ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.align-center {
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.align-default {
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table caption span.caption-number {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
table caption span.caption-text {
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils td, table.docutils th {
|
||||||
|
padding: 1px 8px 1px 5px;
|
||||||
|
border-top: 0;
|
||||||
|
border-left: 0;
|
||||||
|
border-right: 0;
|
||||||
|
border-bottom: 1px solid #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
th {
|
||||||
|
text-align: left;
|
||||||
|
padding-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.citation {
|
||||||
|
border-left: solid 1px gray;
|
||||||
|
margin-left: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.citation td {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
th > :first-child,
|
||||||
|
td > :first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
th > :last-child,
|
||||||
|
td > :last-child {
|
||||||
|
margin-bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- figures --------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.figure, figure {
|
||||||
|
margin: 0.5em;
|
||||||
|
padding: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.figure p.caption, figcaption {
|
||||||
|
padding: 0.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.figure p.caption span.caption-number,
|
||||||
|
figcaption span.caption-number {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.figure p.caption span.caption-text,
|
||||||
|
figcaption span.caption-text {
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- field list styles ----------------------------------------------------- */
|
||||||
|
|
||||||
|
table.field-list td, table.field-list th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list ul {
|
||||||
|
margin: 0;
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-name {
|
||||||
|
-moz-hyphens: manual;
|
||||||
|
-ms-hyphens: manual;
|
||||||
|
-webkit-hyphens: manual;
|
||||||
|
hyphens: manual;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- hlist styles ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.hlist {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.hlist td {
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- object description styles --------------------------------------------- */
|
||||||
|
|
||||||
|
.sig {
|
||||||
|
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig-name, code.descname {
|
||||||
|
background-color: transparent;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig-name {
|
||||||
|
font-size: 1.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
code.descname {
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig-prename, code.descclassname {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.optional {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig-paren {
|
||||||
|
font-size: larger;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig-param.n {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* C++ specific styling */
|
||||||
|
|
||||||
|
.sig-inline.c-texpr,
|
||||||
|
.sig-inline.cpp-texpr {
|
||||||
|
font-family: unset;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig.c .k, .sig.c .kt,
|
||||||
|
.sig.cpp .k, .sig.cpp .kt {
|
||||||
|
color: #0033B3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig.c .m,
|
||||||
|
.sig.cpp .m {
|
||||||
|
color: #1750EB;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sig.c .s, .sig.c .sc,
|
||||||
|
.sig.cpp .s, .sig.cpp .sc {
|
||||||
|
color: #067D17;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* -- other body styles ----------------------------------------------------- */
|
||||||
|
|
||||||
|
ol.arabic {
|
||||||
|
list-style: decimal;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.loweralpha {
|
||||||
|
list-style: lower-alpha;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.upperalpha {
|
||||||
|
list-style: upper-alpha;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.lowerroman {
|
||||||
|
list-style: lower-roman;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.upperroman {
|
||||||
|
list-style: upper-roman;
|
||||||
|
}
|
||||||
|
|
||||||
|
:not(li) > ol > li:first-child > :first-child,
|
||||||
|
:not(li) > ul > li:first-child > :first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
:not(li) > ol > li:last-child > :last-child,
|
||||||
|
:not(li) > ul > li:last-child > :last-child {
|
||||||
|
margin-bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.simple ol p,
|
||||||
|
ol.simple ul p,
|
||||||
|
ul.simple ol p,
|
||||||
|
ul.simple ul p {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.simple > li:not(:first-child) > p,
|
||||||
|
ul.simple > li:not(:first-child) > p {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol.simple p,
|
||||||
|
ul.simple p {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
aside.footnote > span,
|
||||||
|
div.citation > span {
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
aside.footnote > span:last-of-type,
|
||||||
|
div.citation > span:last-of-type {
|
||||||
|
padding-right: 0.5em;
|
||||||
|
}
|
||||||
|
aside.footnote > p {
|
||||||
|
margin-left: 2em;
|
||||||
|
}
|
||||||
|
div.citation > p {
|
||||||
|
margin-left: 4em;
|
||||||
|
}
|
||||||
|
aside.footnote > p:last-of-type,
|
||||||
|
div.citation > p:last-of-type {
|
||||||
|
margin-bottom: 0em;
|
||||||
|
}
|
||||||
|
aside.footnote > p:last-of-type:after,
|
||||||
|
div.citation > p:last-of-type:after {
|
||||||
|
content: "";
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: fit-content(30%) auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list > dt {
|
||||||
|
font-weight: bold;
|
||||||
|
word-break: break-word;
|
||||||
|
padding-left: 0.5em;
|
||||||
|
padding-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.field-list > dd {
|
||||||
|
padding-left: 0.5em;
|
||||||
|
margin-top: 0em;
|
||||||
|
margin-left: 0em;
|
||||||
|
margin-bottom: 0em;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl {
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd > :first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd ul, dd table {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd {
|
||||||
|
margin-top: 3px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl > dd:last-child,
|
||||||
|
dl > dd:last-child > :last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
dt:target, span.highlighted {
|
||||||
|
background-color: #fbe54e;
|
||||||
|
}
|
||||||
|
|
||||||
|
rect.highlighted {
|
||||||
|
fill: #fbe54e;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.glossary dt {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versionmodified {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.system-message {
|
||||||
|
background-color: #fda;
|
||||||
|
padding: 5px;
|
||||||
|
border: 3px solid red;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footnote:target {
|
||||||
|
background-color: #ffa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block {
|
||||||
|
display: block;
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block .line-block {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
margin-left: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.guilabel, .menuselection {
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
.accelerator {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.classifier {
|
||||||
|
font-style: oblique;
|
||||||
|
}
|
||||||
|
|
||||||
|
.classifier:before {
|
||||||
|
font-style: normal;
|
||||||
|
margin: 0 0.5em;
|
||||||
|
content: ":";
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
abbr, acronym {
|
||||||
|
border-bottom: dotted 1px;
|
||||||
|
cursor: help;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- code displays --------------------------------------------------------- */
|
||||||
|
|
||||||
|
pre {
|
||||||
|
overflow: auto;
|
||||||
|
overflow-y: hidden; /* fixes display issues on Chrome browsers */
|
||||||
|
}
|
||||||
|
|
||||||
|
pre, div[class*="highlight-"] {
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.pre {
|
||||||
|
-moz-hyphens: none;
|
||||||
|
-ms-hyphens: none;
|
||||||
|
-webkit-hyphens: none;
|
||||||
|
hyphens: none;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
div[class*="highlight-"] {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.linenos pre {
|
||||||
|
border: 0;
|
||||||
|
background-color: transparent;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable tbody {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable tr {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td.linenos {
|
||||||
|
padding-right: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td.code {
|
||||||
|
flex: 1;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.highlight .hll {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight pre,
|
||||||
|
table.highlighttable pre {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption + div {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption {
|
||||||
|
margin-top: 1em;
|
||||||
|
padding: 2px 5px;
|
||||||
|
font-size: small;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption code {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td.linenos,
|
||||||
|
span.linenos,
|
||||||
|
div.highlight span.gp { /* gp: Generic.Prompt */
|
||||||
|
user-select: none;
|
||||||
|
-webkit-user-select: text; /* Safari fallback only */
|
||||||
|
-webkit-user-select: none; /* Chrome/Safari */
|
||||||
|
-moz-user-select: none; /* Firefox */
|
||||||
|
-ms-user-select: none; /* IE10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption span.caption-number {
|
||||||
|
padding: 0.1em 0.3em;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.code-block-caption span.caption-text {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.literal-block-wrapper {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
code.xref, a code {
|
||||||
|
background-color: transparent;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-link {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.viewcode-back {
|
||||||
|
float: right;
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.viewcode-block:target {
|
||||||
|
margin: -1px -10px;
|
||||||
|
padding: 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- math display ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
img.math {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body div.math p {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.eqno {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.eqno a.headerlink {
|
||||||
|
position: absolute;
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.math:hover a.headerlink {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- printout stylesheet --------------------------------------------------- */
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
div.document,
|
||||||
|
div.documentwrapper,
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 !important;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar,
|
||||||
|
div.related,
|
||||||
|
div.footer,
|
||||||
|
#top-link {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
1
core/dbt/docs/build/html/_static/custom.css
vendored
Normal file
1
core/dbt/docs/build/html/_static/custom.css
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/* This file intentionally left blank. */
|
||||||
156
core/dbt/docs/build/html/_static/doctools.js
vendored
Normal file
156
core/dbt/docs/build/html/_static/doctools.js
vendored
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
/*
|
||||||
|
* doctools.js
|
||||||
|
* ~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Base JavaScript utilities for all Sphinx HTML documentation.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
|
||||||
|
"TEXTAREA",
|
||||||
|
"INPUT",
|
||||||
|
"SELECT",
|
||||||
|
"BUTTON",
|
||||||
|
]);
|
||||||
|
|
||||||
|
const _ready = (callback) => {
|
||||||
|
if (document.readyState !== "loading") {
|
||||||
|
callback();
|
||||||
|
} else {
|
||||||
|
document.addEventListener("DOMContentLoaded", callback);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Small JavaScript module for the documentation.
|
||||||
|
*/
|
||||||
|
const Documentation = {
|
||||||
|
init: () => {
|
||||||
|
Documentation.initDomainIndexTable();
|
||||||
|
Documentation.initOnKeyListeners();
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* i18n support
|
||||||
|
*/
|
||||||
|
TRANSLATIONS: {},
|
||||||
|
PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
|
||||||
|
LOCALE: "unknown",
|
||||||
|
|
||||||
|
// gettext and ngettext don't access this so that the functions
|
||||||
|
// can safely bound to a different name (_ = Documentation.gettext)
|
||||||
|
gettext: (string) => {
|
||||||
|
const translated = Documentation.TRANSLATIONS[string];
|
||||||
|
switch (typeof translated) {
|
||||||
|
case "undefined":
|
||||||
|
return string; // no translation
|
||||||
|
case "string":
|
||||||
|
return translated; // translation exists
|
||||||
|
default:
|
||||||
|
return translated[0]; // (singular, plural) translation tuple exists
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
ngettext: (singular, plural, n) => {
|
||||||
|
const translated = Documentation.TRANSLATIONS[singular];
|
||||||
|
if (typeof translated !== "undefined")
|
||||||
|
return translated[Documentation.PLURAL_EXPR(n)];
|
||||||
|
return n === 1 ? singular : plural;
|
||||||
|
},
|
||||||
|
|
||||||
|
addTranslations: (catalog) => {
|
||||||
|
Object.assign(Documentation.TRANSLATIONS, catalog.messages);
|
||||||
|
Documentation.PLURAL_EXPR = new Function(
|
||||||
|
"n",
|
||||||
|
`return (${catalog.plural_expr})`
|
||||||
|
);
|
||||||
|
Documentation.LOCALE = catalog.locale;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper function to focus on search bar
|
||||||
|
*/
|
||||||
|
focusSearchBar: () => {
|
||||||
|
document.querySelectorAll("input[name=q]")[0]?.focus();
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialise the domain index toggle buttons
|
||||||
|
*/
|
||||||
|
initDomainIndexTable: () => {
|
||||||
|
const toggler = (el) => {
|
||||||
|
const idNumber = el.id.substr(7);
|
||||||
|
const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
|
||||||
|
if (el.src.substr(-9) === "minus.png") {
|
||||||
|
el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
|
||||||
|
toggledRows.forEach((el) => (el.style.display = "none"));
|
||||||
|
} else {
|
||||||
|
el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
|
||||||
|
toggledRows.forEach((el) => (el.style.display = ""));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const togglerElements = document.querySelectorAll("img.toggler");
|
||||||
|
togglerElements.forEach((el) =>
|
||||||
|
el.addEventListener("click", (event) => toggler(event.currentTarget))
|
||||||
|
);
|
||||||
|
togglerElements.forEach((el) => (el.style.display = ""));
|
||||||
|
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
|
||||||
|
},
|
||||||
|
|
||||||
|
initOnKeyListeners: () => {
|
||||||
|
// only install a listener if it is really needed
|
||||||
|
if (
|
||||||
|
!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
|
||||||
|
!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
|
||||||
|
document.addEventListener("keydown", (event) => {
|
||||||
|
// bail for input elements
|
||||||
|
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||||
|
// bail with special keys
|
||||||
|
if (event.altKey || event.ctrlKey || event.metaKey) return;
|
||||||
|
|
||||||
|
if (!event.shiftKey) {
|
||||||
|
switch (event.key) {
|
||||||
|
case "ArrowLeft":
|
||||||
|
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||||
|
|
||||||
|
const prevLink = document.querySelector('link[rel="prev"]');
|
||||||
|
if (prevLink && prevLink.href) {
|
||||||
|
window.location.href = prevLink.href;
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "ArrowRight":
|
||||||
|
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||||
|
|
||||||
|
const nextLink = document.querySelector('link[rel="next"]');
|
||||||
|
if (nextLink && nextLink.href) {
|
||||||
|
window.location.href = nextLink.href;
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// some keyboard layouts may need Shift to get /
|
||||||
|
switch (event.key) {
|
||||||
|
case "/":
|
||||||
|
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
|
||||||
|
Documentation.focusSearchBar();
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// quick alias for translations
|
||||||
|
const _ = Documentation.gettext;
|
||||||
|
|
||||||
|
_ready(Documentation.init);
|
||||||
14
core/dbt/docs/build/html/_static/documentation_options.js
vendored
Normal file
14
core/dbt/docs/build/html/_static/documentation_options.js
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
var DOCUMENTATION_OPTIONS = {
|
||||||
|
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
|
||||||
|
VERSION: '',
|
||||||
|
LANGUAGE: 'en',
|
||||||
|
COLLAPSE_INDEX: false,
|
||||||
|
BUILDER: 'html',
|
||||||
|
FILE_SUFFIX: '.html',
|
||||||
|
LINK_SUFFIX: '.html',
|
||||||
|
HAS_SOURCE: true,
|
||||||
|
SOURCELINK_SUFFIX: '.txt',
|
||||||
|
NAVIGATION_WITH_KEYS: false,
|
||||||
|
SHOW_SEARCH_SUMMARY: true,
|
||||||
|
ENABLE_SEARCH_SHORTCUTS: true,
|
||||||
|
};
|
||||||
BIN
core/dbt/docs/build/html/_static/file.png
vendored
Normal file
BIN
core/dbt/docs/build/html/_static/file.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 286 B |
10881
core/dbt/docs/build/html/_static/jquery-3.6.0.js
vendored
Normal file
10881
core/dbt/docs/build/html/_static/jquery-3.6.0.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
core/dbt/docs/build/html/_static/jquery.js
vendored
Normal file
2
core/dbt/docs/build/html/_static/jquery.js
vendored
Normal file
File diff suppressed because one or more lines are too long
199
core/dbt/docs/build/html/_static/language_data.js
vendored
Normal file
199
core/dbt/docs/build/html/_static/language_data.js
vendored
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
/*
|
||||||
|
* language_data.js
|
||||||
|
* ~~~~~~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* This script contains the language-specific data used by searchtools.js,
|
||||||
|
* namely the list of stopwords, stemmer, scorer and splitter.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
|
||||||
|
|
||||||
|
|
||||||
|
/* Non-minified version is copied as a separate JS file, is available */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Porter Stemmer
|
||||||
|
*/
|
||||||
|
var Stemmer = function() {
|
||||||
|
|
||||||
|
var step2list = {
|
||||||
|
ational: 'ate',
|
||||||
|
tional: 'tion',
|
||||||
|
enci: 'ence',
|
||||||
|
anci: 'ance',
|
||||||
|
izer: 'ize',
|
||||||
|
bli: 'ble',
|
||||||
|
alli: 'al',
|
||||||
|
entli: 'ent',
|
||||||
|
eli: 'e',
|
||||||
|
ousli: 'ous',
|
||||||
|
ization: 'ize',
|
||||||
|
ation: 'ate',
|
||||||
|
ator: 'ate',
|
||||||
|
alism: 'al',
|
||||||
|
iveness: 'ive',
|
||||||
|
fulness: 'ful',
|
||||||
|
ousness: 'ous',
|
||||||
|
aliti: 'al',
|
||||||
|
iviti: 'ive',
|
||||||
|
biliti: 'ble',
|
||||||
|
logi: 'log'
|
||||||
|
};
|
||||||
|
|
||||||
|
var step3list = {
|
||||||
|
icate: 'ic',
|
||||||
|
ative: '',
|
||||||
|
alize: 'al',
|
||||||
|
iciti: 'ic',
|
||||||
|
ical: 'ic',
|
||||||
|
ful: '',
|
||||||
|
ness: ''
|
||||||
|
};
|
||||||
|
|
||||||
|
var c = "[^aeiou]"; // consonant
|
||||||
|
var v = "[aeiouy]"; // vowel
|
||||||
|
var C = c + "[^aeiouy]*"; // consonant sequence
|
||||||
|
var V = v + "[aeiou]*"; // vowel sequence
|
||||||
|
|
||||||
|
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
|
||||||
|
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
|
||||||
|
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
|
||||||
|
var s_v = "^(" + C + ")?" + v; // vowel in stem
|
||||||
|
|
||||||
|
this.stemWord = function (w) {
|
||||||
|
var stem;
|
||||||
|
var suffix;
|
||||||
|
var firstch;
|
||||||
|
var origword = w;
|
||||||
|
|
||||||
|
if (w.length < 3)
|
||||||
|
return w;
|
||||||
|
|
||||||
|
var re;
|
||||||
|
var re2;
|
||||||
|
var re3;
|
||||||
|
var re4;
|
||||||
|
|
||||||
|
firstch = w.substr(0,1);
|
||||||
|
if (firstch == "y")
|
||||||
|
w = firstch.toUpperCase() + w.substr(1);
|
||||||
|
|
||||||
|
// Step 1a
|
||||||
|
re = /^(.+?)(ss|i)es$/;
|
||||||
|
re2 = /^(.+?)([^s])s$/;
|
||||||
|
|
||||||
|
if (re.test(w))
|
||||||
|
w = w.replace(re,"$1$2");
|
||||||
|
else if (re2.test(w))
|
||||||
|
w = w.replace(re2,"$1$2");
|
||||||
|
|
||||||
|
// Step 1b
|
||||||
|
re = /^(.+?)eed$/;
|
||||||
|
re2 = /^(.+?)(ed|ing)$/;
|
||||||
|
if (re.test(w)) {
|
||||||
|
var fp = re.exec(w);
|
||||||
|
re = new RegExp(mgr0);
|
||||||
|
if (re.test(fp[1])) {
|
||||||
|
re = /.$/;
|
||||||
|
w = w.replace(re,"");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (re2.test(w)) {
|
||||||
|
var fp = re2.exec(w);
|
||||||
|
stem = fp[1];
|
||||||
|
re2 = new RegExp(s_v);
|
||||||
|
if (re2.test(stem)) {
|
||||||
|
w = stem;
|
||||||
|
re2 = /(at|bl|iz)$/;
|
||||||
|
re3 = new RegExp("([^aeiouylsz])\\1$");
|
||||||
|
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||||
|
if (re2.test(w))
|
||||||
|
w = w + "e";
|
||||||
|
else if (re3.test(w)) {
|
||||||
|
re = /.$/;
|
||||||
|
w = w.replace(re,"");
|
||||||
|
}
|
||||||
|
else if (re4.test(w))
|
||||||
|
w = w + "e";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 1c
|
||||||
|
re = /^(.+?)y$/;
|
||||||
|
if (re.test(w)) {
|
||||||
|
var fp = re.exec(w);
|
||||||
|
stem = fp[1];
|
||||||
|
re = new RegExp(s_v);
|
||||||
|
if (re.test(stem))
|
||||||
|
w = stem + "i";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2
|
||||||
|
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
|
||||||
|
if (re.test(w)) {
|
||||||
|
var fp = re.exec(w);
|
||||||
|
stem = fp[1];
|
||||||
|
suffix = fp[2];
|
||||||
|
re = new RegExp(mgr0);
|
||||||
|
if (re.test(stem))
|
||||||
|
w = stem + step2list[suffix];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3
|
||||||
|
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
|
||||||
|
if (re.test(w)) {
|
||||||
|
var fp = re.exec(w);
|
||||||
|
stem = fp[1];
|
||||||
|
suffix = fp[2];
|
||||||
|
re = new RegExp(mgr0);
|
||||||
|
if (re.test(stem))
|
||||||
|
w = stem + step3list[suffix];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4
|
||||||
|
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
|
||||||
|
re2 = /^(.+?)(s|t)(ion)$/;
|
||||||
|
if (re.test(w)) {
|
||||||
|
var fp = re.exec(w);
|
||||||
|
stem = fp[1];
|
||||||
|
re = new RegExp(mgr1);
|
||||||
|
if (re.test(stem))
|
||||||
|
w = stem;
|
||||||
|
}
|
||||||
|
else if (re2.test(w)) {
|
||||||
|
var fp = re2.exec(w);
|
||||||
|
stem = fp[1] + fp[2];
|
||||||
|
re2 = new RegExp(mgr1);
|
||||||
|
if (re2.test(stem))
|
||||||
|
w = stem;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5
|
||||||
|
re = /^(.+?)e$/;
|
||||||
|
if (re.test(w)) {
|
||||||
|
var fp = re.exec(w);
|
||||||
|
stem = fp[1];
|
||||||
|
re = new RegExp(mgr1);
|
||||||
|
re2 = new RegExp(meq1);
|
||||||
|
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||||
|
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
|
||||||
|
w = stem;
|
||||||
|
}
|
||||||
|
re = /ll$/;
|
||||||
|
re2 = new RegExp(mgr1);
|
||||||
|
if (re.test(w) && re2.test(w)) {
|
||||||
|
re = /.$/;
|
||||||
|
w = w.replace(re,"");
|
||||||
|
}
|
||||||
|
|
||||||
|
// and turn initial Y back to y
|
||||||
|
if (firstch == "y")
|
||||||
|
w = firstch.toLowerCase() + w.substr(1);
|
||||||
|
return w;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
BIN
core/dbt/docs/build/html/_static/minus.png
vendored
Normal file
BIN
core/dbt/docs/build/html/_static/minus.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 B |
BIN
core/dbt/docs/build/html/_static/plus.png
vendored
Normal file
BIN
core/dbt/docs/build/html/_static/plus.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 B |
83
core/dbt/docs/build/html/_static/pygments.css
vendored
Normal file
83
core/dbt/docs/build/html/_static/pygments.css
vendored
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
pre { line-height: 125%; }
|
||||||
|
td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||||
|
span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||||
|
td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||||
|
span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||||
|
.highlight .hll { background-color: #ffffcc }
|
||||||
|
.highlight { background: #f8f8f8; }
|
||||||
|
.highlight .c { color: #8f5902; font-style: italic } /* Comment */
|
||||||
|
.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */
|
||||||
|
.highlight .g { color: #000000 } /* Generic */
|
||||||
|
.highlight .k { color: #004461; font-weight: bold } /* Keyword */
|
||||||
|
.highlight .l { color: #000000 } /* Literal */
|
||||||
|
.highlight .n { color: #000000 } /* Name */
|
||||||
|
.highlight .o { color: #582800 } /* Operator */
|
||||||
|
.highlight .x { color: #000000 } /* Other */
|
||||||
|
.highlight .p { color: #000000; font-weight: bold } /* Punctuation */
|
||||||
|
.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */
|
||||||
|
.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */
|
||||||
|
.highlight .cp { color: #8f5902 } /* Comment.Preproc */
|
||||||
|
.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */
|
||||||
|
.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */
|
||||||
|
.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */
|
||||||
|
.highlight .gd { color: #a40000 } /* Generic.Deleted */
|
||||||
|
.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */
|
||||||
|
.highlight .gr { color: #ef2929 } /* Generic.Error */
|
||||||
|
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||||
|
.highlight .gi { color: #00A000 } /* Generic.Inserted */
|
||||||
|
.highlight .go { color: #888888 } /* Generic.Output */
|
||||||
|
.highlight .gp { color: #745334 } /* Generic.Prompt */
|
||||||
|
.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */
|
||||||
|
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||||
|
.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */
|
||||||
|
.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */
|
||||||
|
.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */
|
||||||
|
.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */
|
||||||
|
.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */
|
||||||
|
.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */
|
||||||
|
.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */
|
||||||
|
.highlight .ld { color: #000000 } /* Literal.Date */
|
||||||
|
.highlight .m { color: #990000 } /* Literal.Number */
|
||||||
|
.highlight .s { color: #4e9a06 } /* Literal.String */
|
||||||
|
.highlight .na { color: #c4a000 } /* Name.Attribute */
|
||||||
|
.highlight .nb { color: #004461 } /* Name.Builtin */
|
||||||
|
.highlight .nc { color: #000000 } /* Name.Class */
|
||||||
|
.highlight .no { color: #000000 } /* Name.Constant */
|
||||||
|
.highlight .nd { color: #888888 } /* Name.Decorator */
|
||||||
|
.highlight .ni { color: #ce5c00 } /* Name.Entity */
|
||||||
|
.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */
|
||||||
|
.highlight .nf { color: #000000 } /* Name.Function */
|
||||||
|
.highlight .nl { color: #f57900 } /* Name.Label */
|
||||||
|
.highlight .nn { color: #000000 } /* Name.Namespace */
|
||||||
|
.highlight .nx { color: #000000 } /* Name.Other */
|
||||||
|
.highlight .py { color: #000000 } /* Name.Property */
|
||||||
|
.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */
|
||||||
|
.highlight .nv { color: #000000 } /* Name.Variable */
|
||||||
|
.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */
|
||||||
|
.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */
|
||||||
|
.highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */
|
||||||
|
.highlight .mb { color: #990000 } /* Literal.Number.Bin */
|
||||||
|
.highlight .mf { color: #990000 } /* Literal.Number.Float */
|
||||||
|
.highlight .mh { color: #990000 } /* Literal.Number.Hex */
|
||||||
|
.highlight .mi { color: #990000 } /* Literal.Number.Integer */
|
||||||
|
.highlight .mo { color: #990000 } /* Literal.Number.Oct */
|
||||||
|
.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */
|
||||||
|
.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */
|
||||||
|
.highlight .sc { color: #4e9a06 } /* Literal.String.Char */
|
||||||
|
.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */
|
||||||
|
.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */
|
||||||
|
.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */
|
||||||
|
.highlight .se { color: #4e9a06 } /* Literal.String.Escape */
|
||||||
|
.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */
|
||||||
|
.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */
|
||||||
|
.highlight .sx { color: #4e9a06 } /* Literal.String.Other */
|
||||||
|
.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */
|
||||||
|
.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */
|
||||||
|
.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */
|
||||||
|
.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */
|
||||||
|
.highlight .fm { color: #000000 } /* Name.Function.Magic */
|
||||||
|
.highlight .vc { color: #000000 } /* Name.Variable.Class */
|
||||||
|
.highlight .vg { color: #000000 } /* Name.Variable.Global */
|
||||||
|
.highlight .vi { color: #000000 } /* Name.Variable.Instance */
|
||||||
|
.highlight .vm { color: #000000 } /* Name.Variable.Magic */
|
||||||
|
.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */
|
||||||
566
core/dbt/docs/build/html/_static/searchtools.js
vendored
Normal file
566
core/dbt/docs/build/html/_static/searchtools.js
vendored
Normal file
@@ -0,0 +1,566 @@
|
|||||||
|
/*
|
||||||
|
* searchtools.js
|
||||||
|
* ~~~~~~~~~~~~~~~~
|
||||||
|
*
|
||||||
|
* Sphinx JavaScript utilities for the full-text search.
|
||||||
|
*
|
||||||
|
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||||
|
* :license: BSD, see LICENSE for details.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple result scoring code.
|
||||||
|
*/
|
||||||
|
if (typeof Scorer === "undefined") {
|
||||||
|
var Scorer = {
|
||||||
|
// Implement the following function to further tweak the score for each result
|
||||||
|
// The function takes a result array [docname, title, anchor, descr, score, filename]
|
||||||
|
// and returns the new score.
|
||||||
|
/*
|
||||||
|
score: result => {
|
||||||
|
const [docname, title, anchor, descr, score, filename] = result
|
||||||
|
return score
|
||||||
|
},
|
||||||
|
*/
|
||||||
|
|
||||||
|
// query matches the full name of an object
|
||||||
|
objNameMatch: 11,
|
||||||
|
// or matches in the last dotted part of the object name
|
||||||
|
objPartialMatch: 6,
|
||||||
|
// Additive scores depending on the priority of the object
|
||||||
|
objPrio: {
|
||||||
|
0: 15, // used to be importantResults
|
||||||
|
1: 5, // used to be objectResults
|
||||||
|
2: -5, // used to be unimportantResults
|
||||||
|
},
|
||||||
|
// Used when the priority is not in the mapping.
|
||||||
|
objPrioDefault: 0,
|
||||||
|
|
||||||
|
// query found in title
|
||||||
|
title: 15,
|
||||||
|
partialTitle: 7,
|
||||||
|
// query found in terms
|
||||||
|
term: 5,
|
||||||
|
partialTerm: 2,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const _removeChildren = (element) => {
|
||||||
|
while (element && element.lastChild) element.removeChild(element.lastChild);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
||||||
|
*/
|
||||||
|
const _escapeRegExp = (string) =>
|
||||||
|
string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
|
|
||||||
|
const _displayItem = (item, searchTerms) => {
|
||||||
|
const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
|
||||||
|
const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT;
|
||||||
|
const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
|
||||||
|
const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
|
||||||
|
const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
|
||||||
|
|
||||||
|
const [docName, title, anchor, descr, score, _filename] = item;
|
||||||
|
|
||||||
|
let listItem = document.createElement("li");
|
||||||
|
let requestUrl;
|
||||||
|
let linkUrl;
|
||||||
|
if (docBuilder === "dirhtml") {
|
||||||
|
// dirhtml builder
|
||||||
|
let dirname = docName + "/";
|
||||||
|
if (dirname.match(/\/index\/$/))
|
||||||
|
dirname = dirname.substring(0, dirname.length - 6);
|
||||||
|
else if (dirname === "index/") dirname = "";
|
||||||
|
requestUrl = docUrlRoot + dirname;
|
||||||
|
linkUrl = requestUrl;
|
||||||
|
} else {
|
||||||
|
// normal html builders
|
||||||
|
requestUrl = docUrlRoot + docName + docFileSuffix;
|
||||||
|
linkUrl = docName + docLinkSuffix;
|
||||||
|
}
|
||||||
|
let linkEl = listItem.appendChild(document.createElement("a"));
|
||||||
|
linkEl.href = linkUrl + anchor;
|
||||||
|
linkEl.dataset.score = score;
|
||||||
|
linkEl.innerHTML = title;
|
||||||
|
if (descr)
|
||||||
|
listItem.appendChild(document.createElement("span")).innerHTML =
|
||||||
|
" (" + descr + ")";
|
||||||
|
else if (showSearchSummary)
|
||||||
|
fetch(requestUrl)
|
||||||
|
.then((responseData) => responseData.text())
|
||||||
|
.then((data) => {
|
||||||
|
if (data)
|
||||||
|
listItem.appendChild(
|
||||||
|
Search.makeSearchSummary(data, searchTerms)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
Search.output.appendChild(listItem);
|
||||||
|
};
|
||||||
|
const _finishSearch = (resultCount) => {
|
||||||
|
Search.stopPulse();
|
||||||
|
Search.title.innerText = _("Search Results");
|
||||||
|
if (!resultCount)
|
||||||
|
Search.status.innerText = Documentation.gettext(
|
||||||
|
"Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||||
|
);
|
||||||
|
else
|
||||||
|
Search.status.innerText = _(
|
||||||
|
`Search finished, found ${resultCount} page(s) matching the search query.`
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const _displayNextItem = (
|
||||||
|
results,
|
||||||
|
resultCount,
|
||||||
|
searchTerms
|
||||||
|
) => {
|
||||||
|
// results left, load the summary and display it
|
||||||
|
// this is intended to be dynamic (don't sub resultsCount)
|
||||||
|
if (results.length) {
|
||||||
|
_displayItem(results.pop(), searchTerms);
|
||||||
|
setTimeout(
|
||||||
|
() => _displayNextItem(results, resultCount, searchTerms),
|
||||||
|
5
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// search finished, update title and status message
|
||||||
|
else _finishSearch(resultCount);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default splitQuery function. Can be overridden in ``sphinx.search`` with a
|
||||||
|
* custom function per language.
|
||||||
|
*
|
||||||
|
* The regular expression works by splitting the string on consecutive characters
|
||||||
|
* that are not Unicode letters, numbers, underscores, or emoji characters.
|
||||||
|
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
|
||||||
|
*/
|
||||||
|
if (typeof splitQuery === "undefined") {
|
||||||
|
var splitQuery = (query) => query
|
||||||
|
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
|
||||||
|
.filter(term => term) // remove remaining empty strings
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search Module
|
||||||
|
*/
|
||||||
|
const Search = {
|
||||||
|
_index: null,
|
||||||
|
_queued_query: null,
|
||||||
|
_pulse_status: -1,
|
||||||
|
|
||||||
|
htmlToText: (htmlString) => {
|
||||||
|
const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
|
||||||
|
htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() });
|
||||||
|
const docContent = htmlElement.querySelector('[role="main"]');
|
||||||
|
if (docContent !== undefined) return docContent.textContent;
|
||||||
|
console.warn(
|
||||||
|
"Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template."
|
||||||
|
);
|
||||||
|
return "";
|
||||||
|
},
|
||||||
|
|
||||||
|
init: () => {
|
||||||
|
const query = new URLSearchParams(window.location.search).get("q");
|
||||||
|
document
|
||||||
|
.querySelectorAll('input[name="q"]')
|
||||||
|
.forEach((el) => (el.value = query));
|
||||||
|
if (query) Search.performSearch(query);
|
||||||
|
},
|
||||||
|
|
||||||
|
loadIndex: (url) =>
|
||||||
|
(document.body.appendChild(document.createElement("script")).src = url),
|
||||||
|
|
||||||
|
setIndex: (index) => {
|
||||||
|
Search._index = index;
|
||||||
|
if (Search._queued_query !== null) {
|
||||||
|
const query = Search._queued_query;
|
||||||
|
Search._queued_query = null;
|
||||||
|
Search.query(query);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
hasIndex: () => Search._index !== null,
|
||||||
|
|
||||||
|
deferQuery: (query) => (Search._queued_query = query),
|
||||||
|
|
||||||
|
stopPulse: () => (Search._pulse_status = -1),
|
||||||
|
|
||||||
|
startPulse: () => {
|
||||||
|
if (Search._pulse_status >= 0) return;
|
||||||
|
|
||||||
|
const pulse = () => {
|
||||||
|
Search._pulse_status = (Search._pulse_status + 1) % 4;
|
||||||
|
Search.dots.innerText = ".".repeat(Search._pulse_status);
|
||||||
|
if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
|
||||||
|
};
|
||||||
|
pulse();
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* perform a search for something (or wait until index is loaded)
|
||||||
|
*/
|
||||||
|
performSearch: (query) => {
|
||||||
|
// create the required interface elements
|
||||||
|
const searchText = document.createElement("h2");
|
||||||
|
searchText.textContent = _("Searching");
|
||||||
|
const searchSummary = document.createElement("p");
|
||||||
|
searchSummary.classList.add("search-summary");
|
||||||
|
searchSummary.innerText = "";
|
||||||
|
const searchList = document.createElement("ul");
|
||||||
|
searchList.classList.add("search");
|
||||||
|
|
||||||
|
const out = document.getElementById("search-results");
|
||||||
|
Search.title = out.appendChild(searchText);
|
||||||
|
Search.dots = Search.title.appendChild(document.createElement("span"));
|
||||||
|
Search.status = out.appendChild(searchSummary);
|
||||||
|
Search.output = out.appendChild(searchList);
|
||||||
|
|
||||||
|
const searchProgress = document.getElementById("search-progress");
|
||||||
|
// Some themes don't use the search progress node
|
||||||
|
if (searchProgress) {
|
||||||
|
searchProgress.innerText = _("Preparing search...");
|
||||||
|
}
|
||||||
|
Search.startPulse();
|
||||||
|
|
||||||
|
// index already loaded, the browser was quick!
|
||||||
|
if (Search.hasIndex()) Search.query(query);
|
||||||
|
else Search.deferQuery(query);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* execute search (requires search index to be loaded)
|
||||||
|
*/
|
||||||
|
query: (query) => {
|
||||||
|
const filenames = Search._index.filenames;
|
||||||
|
const docNames = Search._index.docnames;
|
||||||
|
const titles = Search._index.titles;
|
||||||
|
const allTitles = Search._index.alltitles;
|
||||||
|
const indexEntries = Search._index.indexentries;
|
||||||
|
|
||||||
|
// stem the search terms and add them to the correct list
|
||||||
|
const stemmer = new Stemmer();
|
||||||
|
const searchTerms = new Set();
|
||||||
|
const excludedTerms = new Set();
|
||||||
|
const highlightTerms = new Set();
|
||||||
|
const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
|
||||||
|
splitQuery(query.trim()).forEach((queryTerm) => {
|
||||||
|
const queryTermLower = queryTerm.toLowerCase();
|
||||||
|
|
||||||
|
// maybe skip this "word"
|
||||||
|
// stopwords array is from language_data.js
|
||||||
|
if (
|
||||||
|
stopwords.indexOf(queryTermLower) !== -1 ||
|
||||||
|
queryTerm.match(/^\d+$/)
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// stem the word
|
||||||
|
let word = stemmer.stemWord(queryTermLower);
|
||||||
|
// select the correct list
|
||||||
|
if (word[0] === "-") excludedTerms.add(word.substr(1));
|
||||||
|
else {
|
||||||
|
searchTerms.add(word);
|
||||||
|
highlightTerms.add(queryTermLower);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
|
||||||
|
localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// console.debug("SEARCH: searching for:");
|
||||||
|
// console.info("required: ", [...searchTerms]);
|
||||||
|
// console.info("excluded: ", [...excludedTerms]);
|
||||||
|
|
||||||
|
// array of [docname, title, anchor, descr, score, filename]
|
||||||
|
let results = [];
|
||||||
|
_removeChildren(document.getElementById("search-progress"));
|
||||||
|
|
||||||
|
const queryLower = query.toLowerCase();
|
||||||
|
for (const [title, foundTitles] of Object.entries(allTitles)) {
|
||||||
|
if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) {
|
||||||
|
for (const [file, id] of foundTitles) {
|
||||||
|
let score = Math.round(100 * queryLower.length / title.length)
|
||||||
|
results.push([
|
||||||
|
docNames[file],
|
||||||
|
titles[file] !== title ? `${titles[file]} > ${title}` : title,
|
||||||
|
id !== null ? "#" + id : "",
|
||||||
|
null,
|
||||||
|
score,
|
||||||
|
filenames[file],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// search for explicit entries in index directives
|
||||||
|
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
|
||||||
|
if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
|
||||||
|
for (const [file, id] of foundEntries) {
|
||||||
|
let score = Math.round(100 * queryLower.length / entry.length)
|
||||||
|
results.push([
|
||||||
|
docNames[file],
|
||||||
|
titles[file],
|
||||||
|
id ? "#" + id : "",
|
||||||
|
null,
|
||||||
|
score,
|
||||||
|
filenames[file],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// lookup as object
|
||||||
|
objectTerms.forEach((term) =>
|
||||||
|
results.push(...Search.performObjectSearch(term, objectTerms))
|
||||||
|
);
|
||||||
|
|
||||||
|
// lookup as search terms in fulltext
|
||||||
|
results.push(...Search.performTermsSearch(searchTerms, excludedTerms));
|
||||||
|
|
||||||
|
// let the scorer override scores with a custom scoring function
|
||||||
|
if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item)));
|
||||||
|
|
||||||
|
// now sort the results by score (in opposite order of appearance, since the
|
||||||
|
// display function below uses pop() to retrieve items) and then
|
||||||
|
// alphabetically
|
||||||
|
results.sort((a, b) => {
|
||||||
|
const leftScore = a[4];
|
||||||
|
const rightScore = b[4];
|
||||||
|
if (leftScore === rightScore) {
|
||||||
|
// same score: sort alphabetically
|
||||||
|
const leftTitle = a[1].toLowerCase();
|
||||||
|
const rightTitle = b[1].toLowerCase();
|
||||||
|
if (leftTitle === rightTitle) return 0;
|
||||||
|
return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
|
||||||
|
}
|
||||||
|
return leftScore > rightScore ? 1 : -1;
|
||||||
|
});
|
||||||
|
|
||||||
|
// remove duplicate search results
|
||||||
|
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
|
||||||
|
let seen = new Set();
|
||||||
|
results = results.reverse().reduce((acc, result) => {
|
||||||
|
let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
|
||||||
|
if (!seen.has(resultStr)) {
|
||||||
|
acc.push(result);
|
||||||
|
seen.add(resultStr);
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
results = results.reverse();
|
||||||
|
|
||||||
|
// for debugging
|
||||||
|
//Search.lastresults = results.slice(); // a copy
|
||||||
|
// console.info("search results:", Search.lastresults);
|
||||||
|
|
||||||
|
// print the results
|
||||||
|
_displayNextItem(results, results.length, searchTerms);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* search for object names
|
||||||
|
*/
|
||||||
|
performObjectSearch: (object, objectTerms) => {
|
||||||
|
const filenames = Search._index.filenames;
|
||||||
|
const docNames = Search._index.docnames;
|
||||||
|
const objects = Search._index.objects;
|
||||||
|
const objNames = Search._index.objnames;
|
||||||
|
const titles = Search._index.titles;
|
||||||
|
|
||||||
|
const results = [];
|
||||||
|
|
||||||
|
const objectSearchCallback = (prefix, match) => {
|
||||||
|
const name = match[4]
|
||||||
|
const fullname = (prefix ? prefix + "." : "") + name;
|
||||||
|
const fullnameLower = fullname.toLowerCase();
|
||||||
|
if (fullnameLower.indexOf(object) < 0) return;
|
||||||
|
|
||||||
|
let score = 0;
|
||||||
|
const parts = fullnameLower.split(".");
|
||||||
|
|
||||||
|
// check for different match types: exact matches of full name or
|
||||||
|
// "last name" (i.e. last dotted part)
|
||||||
|
if (fullnameLower === object || parts.slice(-1)[0] === object)
|
||||||
|
score += Scorer.objNameMatch;
|
||||||
|
else if (parts.slice(-1)[0].indexOf(object) > -1)
|
||||||
|
score += Scorer.objPartialMatch; // matches in last name
|
||||||
|
|
||||||
|
const objName = objNames[match[1]][2];
|
||||||
|
const title = titles[match[0]];
|
||||||
|
|
||||||
|
// If more than one term searched for, we require other words to be
|
||||||
|
// found in the name/title/description
|
||||||
|
const otherTerms = new Set(objectTerms);
|
||||||
|
otherTerms.delete(object);
|
||||||
|
if (otherTerms.size > 0) {
|
||||||
|
const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
|
||||||
|
if (
|
||||||
|
[...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let anchor = match[3];
|
||||||
|
if (anchor === "") anchor = fullname;
|
||||||
|
else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
|
||||||
|
|
||||||
|
const descr = objName + _(", in ") + title;
|
||||||
|
|
||||||
|
// add custom score for some objects according to scorer
|
||||||
|
if (Scorer.objPrio.hasOwnProperty(match[2]))
|
||||||
|
score += Scorer.objPrio[match[2]];
|
||||||
|
else score += Scorer.objPrioDefault;
|
||||||
|
|
||||||
|
results.push([
|
||||||
|
docNames[match[0]],
|
||||||
|
fullname,
|
||||||
|
"#" + anchor,
|
||||||
|
descr,
|
||||||
|
score,
|
||||||
|
filenames[match[0]],
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
Object.keys(objects).forEach((prefix) =>
|
||||||
|
objects[prefix].forEach((array) =>
|
||||||
|
objectSearchCallback(prefix, array)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* search for full-text terms in the index
|
||||||
|
*/
|
||||||
|
performTermsSearch: (searchTerms, excludedTerms) => {
|
||||||
|
// prepare search
|
||||||
|
const terms = Search._index.terms;
|
||||||
|
const titleTerms = Search._index.titleterms;
|
||||||
|
const filenames = Search._index.filenames;
|
||||||
|
const docNames = Search._index.docnames;
|
||||||
|
const titles = Search._index.titles;
|
||||||
|
|
||||||
|
const scoreMap = new Map();
|
||||||
|
const fileMap = new Map();
|
||||||
|
|
||||||
|
// perform the search on the required terms
|
||||||
|
searchTerms.forEach((word) => {
|
||||||
|
const files = [];
|
||||||
|
const arr = [
|
||||||
|
{ files: terms[word], score: Scorer.term },
|
||||||
|
{ files: titleTerms[word], score: Scorer.title },
|
||||||
|
];
|
||||||
|
// add support for partial matches
|
||||||
|
if (word.length > 2) {
|
||||||
|
const escapedWord = _escapeRegExp(word);
|
||||||
|
Object.keys(terms).forEach((term) => {
|
||||||
|
if (term.match(escapedWord) && !terms[word])
|
||||||
|
arr.push({ files: terms[term], score: Scorer.partialTerm });
|
||||||
|
});
|
||||||
|
Object.keys(titleTerms).forEach((term) => {
|
||||||
|
if (term.match(escapedWord) && !titleTerms[word])
|
||||||
|
arr.push({ files: titleTerms[word], score: Scorer.partialTitle });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// no match but word was a required one
|
||||||
|
if (arr.every((record) => record.files === undefined)) return;
|
||||||
|
|
||||||
|
// found search word in contents
|
||||||
|
arr.forEach((record) => {
|
||||||
|
if (record.files === undefined) return;
|
||||||
|
|
||||||
|
let recordFiles = record.files;
|
||||||
|
if (recordFiles.length === undefined) recordFiles = [recordFiles];
|
||||||
|
files.push(...recordFiles);
|
||||||
|
|
||||||
|
// set score for the word in each file
|
||||||
|
recordFiles.forEach((file) => {
|
||||||
|
if (!scoreMap.has(file)) scoreMap.set(file, {});
|
||||||
|
scoreMap.get(file)[word] = record.score;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// create the mapping
|
||||||
|
files.forEach((file) => {
|
||||||
|
if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1)
|
||||||
|
fileMap.get(file).push(word);
|
||||||
|
else fileMap.set(file, [word]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// now check if the files don't contain excluded terms
|
||||||
|
const results = [];
|
||||||
|
for (const [file, wordList] of fileMap) {
|
||||||
|
// check if all requirements are matched
|
||||||
|
|
||||||
|
// as search terms with length < 3 are discarded
|
||||||
|
const filteredTermCount = [...searchTerms].filter(
|
||||||
|
(term) => term.length > 2
|
||||||
|
).length;
|
||||||
|
if (
|
||||||
|
wordList.length !== searchTerms.size &&
|
||||||
|
wordList.length !== filteredTermCount
|
||||||
|
)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
// ensure that none of the excluded terms is in the search result
|
||||||
|
if (
|
||||||
|
[...excludedTerms].some(
|
||||||
|
(term) =>
|
||||||
|
terms[term] === file ||
|
||||||
|
titleTerms[term] === file ||
|
||||||
|
(terms[term] || []).includes(file) ||
|
||||||
|
(titleTerms[term] || []).includes(file)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
break;
|
||||||
|
|
||||||
|
// select one (max) score for the file.
|
||||||
|
const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w]));
|
||||||
|
// add result to the result list
|
||||||
|
results.push([
|
||||||
|
docNames[file],
|
||||||
|
titles[file],
|
||||||
|
"",
|
||||||
|
null,
|
||||||
|
score,
|
||||||
|
filenames[file],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper function to return a node containing the
|
||||||
|
* search summary for a given text. keywords is a list
|
||||||
|
* of stemmed words.
|
||||||
|
*/
|
||||||
|
makeSearchSummary: (htmlText, keywords) => {
|
||||||
|
const text = Search.htmlToText(htmlText);
|
||||||
|
if (text === "") return null;
|
||||||
|
|
||||||
|
const textLower = text.toLowerCase();
|
||||||
|
const actualStartPosition = [...keywords]
|
||||||
|
.map((k) => textLower.indexOf(k.toLowerCase()))
|
||||||
|
.filter((i) => i > -1)
|
||||||
|
.slice(-1)[0];
|
||||||
|
const startWithContext = Math.max(actualStartPosition - 120, 0);
|
||||||
|
|
||||||
|
const top = startWithContext === 0 ? "" : "...";
|
||||||
|
const tail = startWithContext + 240 < text.length ? "..." : "";
|
||||||
|
|
||||||
|
let summary = document.createElement("p");
|
||||||
|
summary.classList.add("context");
|
||||||
|
summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
_ready(Search.init);
|
||||||
144
core/dbt/docs/build/html/_static/sphinx_highlight.js
vendored
Normal file
144
core/dbt/docs/build/html/_static/sphinx_highlight.js
vendored
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
/* Highlighting utilities for Sphinx HTML documentation. */
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
const SPHINX_HIGHLIGHT_ENABLED = true
|
||||||
|
|
||||||
|
/**
|
||||||
|
* highlight a given string on a node by wrapping it in
|
||||||
|
* span elements with the given class name.
|
||||||
|
*/
|
||||||
|
const _highlight = (node, addItems, text, className) => {
|
||||||
|
if (node.nodeType === Node.TEXT_NODE) {
|
||||||
|
const val = node.nodeValue;
|
||||||
|
const parent = node.parentNode;
|
||||||
|
const pos = val.toLowerCase().indexOf(text);
|
||||||
|
if (
|
||||||
|
pos >= 0 &&
|
||||||
|
!parent.classList.contains(className) &&
|
||||||
|
!parent.classList.contains("nohighlight")
|
||||||
|
) {
|
||||||
|
let span;
|
||||||
|
|
||||||
|
const closestNode = parent.closest("body, svg, foreignObject");
|
||||||
|
const isInSVG = closestNode && closestNode.matches("svg");
|
||||||
|
if (isInSVG) {
|
||||||
|
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||||
|
} else {
|
||||||
|
span = document.createElement("span");
|
||||||
|
span.classList.add(className);
|
||||||
|
}
|
||||||
|
|
||||||
|
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||||
|
parent.insertBefore(
|
||||||
|
span,
|
||||||
|
parent.insertBefore(
|
||||||
|
document.createTextNode(val.substr(pos + text.length)),
|
||||||
|
node.nextSibling
|
||||||
|
)
|
||||||
|
);
|
||||||
|
node.nodeValue = val.substr(0, pos);
|
||||||
|
|
||||||
|
if (isInSVG) {
|
||||||
|
const rect = document.createElementNS(
|
||||||
|
"http://www.w3.org/2000/svg",
|
||||||
|
"rect"
|
||||||
|
);
|
||||||
|
const bbox = parent.getBBox();
|
||||||
|
rect.x.baseVal.value = bbox.x;
|
||||||
|
rect.y.baseVal.value = bbox.y;
|
||||||
|
rect.width.baseVal.value = bbox.width;
|
||||||
|
rect.height.baseVal.value = bbox.height;
|
||||||
|
rect.setAttribute("class", className);
|
||||||
|
addItems.push({ parent: parent, target: rect });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (node.matches && !node.matches("button, select, textarea")) {
|
||||||
|
node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const _highlightText = (thisNode, text, className) => {
|
||||||
|
let addItems = [];
|
||||||
|
_highlight(thisNode, addItems, text, className);
|
||||||
|
addItems.forEach((obj) =>
|
||||||
|
obj.parent.insertAdjacentElement("beforebegin", obj.target)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Small JavaScript module for the documentation.
|
||||||
|
*/
|
||||||
|
const SphinxHighlight = {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* highlight the search words provided in localstorage in the text
|
||||||
|
*/
|
||||||
|
highlightSearchWords: () => {
|
||||||
|
if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
|
||||||
|
|
||||||
|
// get and clear terms from localstorage
|
||||||
|
const url = new URL(window.location);
|
||||||
|
const highlight =
|
||||||
|
localStorage.getItem("sphinx_highlight_terms")
|
||||||
|
|| url.searchParams.get("highlight")
|
||||||
|
|| "";
|
||||||
|
localStorage.removeItem("sphinx_highlight_terms")
|
||||||
|
url.searchParams.delete("highlight");
|
||||||
|
window.history.replaceState({}, "", url);
|
||||||
|
|
||||||
|
// get individual terms from highlight string
|
||||||
|
const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
|
||||||
|
if (terms.length === 0) return; // nothing to do
|
||||||
|
|
||||||
|
// There should never be more than one element matching "div.body"
|
||||||
|
const divBody = document.querySelectorAll("div.body");
|
||||||
|
const body = divBody.length ? divBody[0] : document.querySelector("body");
|
||||||
|
window.setTimeout(() => {
|
||||||
|
terms.forEach((term) => _highlightText(body, term, "highlighted"));
|
||||||
|
}, 10);
|
||||||
|
|
||||||
|
const searchBox = document.getElementById("searchbox");
|
||||||
|
if (searchBox === null) return;
|
||||||
|
searchBox.appendChild(
|
||||||
|
document
|
||||||
|
.createRange()
|
||||||
|
.createContextualFragment(
|
||||||
|
'<p class="highlight-link">' +
|
||||||
|
'<a href="javascript:SphinxHighlight.hideSearchWords()">' +
|
||||||
|
_("Hide Search Matches") +
|
||||||
|
"</a></p>"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper function to hide the search marks again
|
||||||
|
*/
|
||||||
|
hideSearchWords: () => {
|
||||||
|
document
|
||||||
|
.querySelectorAll("#searchbox .highlight-link")
|
||||||
|
.forEach((el) => el.remove());
|
||||||
|
document
|
||||||
|
.querySelectorAll("span.highlighted")
|
||||||
|
.forEach((el) => el.classList.remove("highlighted"));
|
||||||
|
localStorage.removeItem("sphinx_highlight_terms")
|
||||||
|
},
|
||||||
|
|
||||||
|
initEscapeListener: () => {
|
||||||
|
// only install a listener if it is really needed
|
||||||
|
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
|
||||||
|
|
||||||
|
document.addEventListener("keydown", (event) => {
|
||||||
|
// bail for input elements
|
||||||
|
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||||
|
// bail with special keys
|
||||||
|
if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
|
||||||
|
if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
|
||||||
|
SphinxHighlight.hideSearchWords();
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
_ready(SphinxHighlight.highlightSearchWords);
|
||||||
|
_ready(SphinxHighlight.initEscapeListener);
|
||||||
2042
core/dbt/docs/build/html/_static/underscore-1.13.1.js
vendored
Normal file
2042
core/dbt/docs/build/html/_static/underscore-1.13.1.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user