forked from repo-mirrors/dbt-core
Compare commits
50 Commits
jerco/expe
...
adding-sem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb8b161351 | ||
|
|
7ecb431278 | ||
|
|
792150ff6a | ||
|
|
85d0b5afc7 | ||
|
|
1fbcaa4484 | ||
|
|
481235a943 | ||
|
|
2289e45571 | ||
|
|
b5d303f12a | ||
|
|
c3be975783 | ||
|
|
47c2edb42a | ||
|
|
b3440417ad | ||
|
|
020f639c7a | ||
|
|
55db15aba8 | ||
|
|
bce0e7c096 | ||
|
|
7d7066466d | ||
|
|
517576c088 | ||
|
|
987764858b | ||
|
|
a235abd176 | ||
|
|
9297e4d55c | ||
|
|
eae98677b9 | ||
|
|
66ac107409 | ||
|
|
39c5c42215 | ||
|
|
9f280a8469 | ||
|
|
73116fb816 | ||
|
|
f02243506d | ||
|
|
d5e9ce1797 | ||
|
|
4e786184d2 | ||
|
|
930bd3541e | ||
|
|
6c76137da4 | ||
|
|
68d06d8a9c | ||
|
|
d0543c9242 | ||
|
|
cfad27f963 | ||
|
|
c3ccbe3357 | ||
|
|
8e28f5906e | ||
|
|
d23285b4ba | ||
|
|
a42748433d | ||
|
|
be4a91a0fe | ||
|
|
8145eed603 | ||
|
|
fc00239f36 | ||
|
|
77dfec7214 | ||
|
|
7b73264ec8 | ||
|
|
1916784287 | ||
|
|
c2856017a1 | ||
|
|
17b82661d2 | ||
|
|
6c8609499a | ||
|
|
53ae325576 | ||
|
|
a7670a3ab9 | ||
|
|
ff2f1f42c3 | ||
|
|
35f7975d8f | ||
|
|
a9c8bc0e0a |
7
.changes/unreleased/Dependency-20220923-000646.yaml
Normal file
7
.changes/unreleased/Dependency-20220923-000646.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: "Dependency"
|
||||
body: "Update pathspec requirement from ~=0.9.0 to >=0.9,<0.11 in /core"
|
||||
time: 2022-09-23T00:06:46.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 5917
|
||||
7
.changes/unreleased/Dependency-20221020-000753.yaml
Normal file
7
.changes/unreleased/Dependency-20221020-000753.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: "Dependency"
|
||||
body: "Bump mashumaro[msgpack] from 3.0.4 to 3.1.1 in /core"
|
||||
time: 2022-10-20T00:07:53.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6108
|
||||
7
.changes/unreleased/Dependency-20221026-000910.yaml
Normal file
7
.changes/unreleased/Dependency-20221026-000910.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: "Dependency"
|
||||
body: "Update colorama requirement from <0.4.6,>=0.3.9 to >=0.3.9,<0.4.7 in /core"
|
||||
time: 2022-10-26T00:09:10.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 4904
|
||||
PR: 6144
|
||||
6
.changes/unreleased/Docs-20221017-171411.yaml
Normal file
6
.changes/unreleased/Docs-20221017-171411.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
time: 2022-10-17T17:14:11.715348-05:00
|
||||
custom:
|
||||
Author: paulbenschmidt
|
||||
Issue: "5880"
|
||||
PR: "324"
|
||||
7
.changes/unreleased/Docs-20221116-155743.yaml
Normal file
7
.changes/unreleased/Docs-20221116-155743.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Docs
|
||||
body: Fix rendering of sample code for metrics
|
||||
time: 2022-11-16T15:57:43.204201+01:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "323"
|
||||
PR: "346"
|
||||
8
.changes/unreleased/Features-20220408-165459.yaml
Normal file
8
.changes/unreleased/Features-20220408-165459.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Features
|
||||
body: Added favor-state flag to optionally favor state nodes even if unselected node
|
||||
exists
|
||||
time: 2022-04-08T16:54:59.696564+01:00
|
||||
custom:
|
||||
Author: daniel-murray josephberni
|
||||
Issue: "2968"
|
||||
PR: "5859"
|
||||
8
.changes/unreleased/Features-20221102-150003.yaml
Normal file
8
.changes/unreleased/Features-20221102-150003.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Features
|
||||
body: This pulls the profile name from args when constructing a RuntimeConfig in lib.py,
|
||||
enabling the dbt-server to override the value that's in the dbt_project.yml
|
||||
time: 2022-11-02T15:00:03.000805-05:00
|
||||
custom:
|
||||
Author: racheldaniel
|
||||
Issue: "6201"
|
||||
PR: "6202"
|
||||
7
.changes/unreleased/Features-20221114-185207.yaml
Normal file
7
.changes/unreleased/Features-20221114-185207.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Added an md5 function to the base context
|
||||
time: 2022-11-14T18:52:07.788593+02:00
|
||||
custom:
|
||||
Author: haritamar
|
||||
Issue: "6246"
|
||||
PR: "6247"
|
||||
7
.changes/unreleased/Features-20221130-112913.yaml
Normal file
7
.changes/unreleased/Features-20221130-112913.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Features
|
||||
body: Exposures support metrics in lineage
|
||||
time: 2022-11-30T11:29:13.256034-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "6057"
|
||||
PR: "6342"
|
||||
8
.changes/unreleased/Fixes-20221016-173742.yaml
Normal file
8
.changes/unreleased/Fixes-20221016-173742.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Fixes
|
||||
body: Add functors to ensure event types with str-type attributes are initialized
|
||||
to spec, even when provided non-str type params.
|
||||
time: 2022-10-16T17:37:42.846683-07:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "5436"
|
||||
PR: "5874"
|
||||
7
.changes/unreleased/Fixes-20221107-095314.yaml
Normal file
7
.changes/unreleased/Fixes-20221107-095314.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Allow hooks to fail without halting execution flow
|
||||
time: 2022-11-07T09:53:14.340257-06:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "5625"
|
||||
PR: "6059"
|
||||
7
.changes/unreleased/Fixes-20221115-081021.yaml
Normal file
7
.changes/unreleased/Fixes-20221115-081021.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Clarify Error Message for how many models are allowed in a Python file
|
||||
time: 2022-11-15T08:10:21.527884-05:00
|
||||
custom:
|
||||
Author: justbldwn
|
||||
Issue: "6245"
|
||||
PR: "6251"
|
||||
7
.changes/unreleased/Under the Hood-20221017-151511.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221017-151511.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Fixed extra whitespace in strings introduced by black.
|
||||
time: 2022-10-17T15:15:11.499246-05:00
|
||||
custom:
|
||||
Author: luke-bassett
|
||||
Issue: "1350"
|
||||
PR: "6086"
|
||||
7
.changes/unreleased/Under the Hood-20221017-155844.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221017-155844.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Clean up string formatting
|
||||
time: 2022-10-17T15:58:44.676549-04:00
|
||||
custom:
|
||||
Author: eve-johns
|
||||
Issue: "6068"
|
||||
PR: "6082"
|
||||
7
.changes/unreleased/Under the Hood-20221028-104837.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221028-104837.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Remove the 'root_path' field from most nodes
|
||||
time: 2022-10-28T10:48:37.687886-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6171"
|
||||
PR: "6172"
|
||||
7
.changes/unreleased/Under the Hood-20221028-110344.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221028-110344.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Combine certain logging events with different levels
|
||||
time: 2022-10-28T11:03:44.887836-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "6173"
|
||||
PR: "6174"
|
||||
7
.changes/unreleased/Under the Hood-20221108-074550.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221108-074550.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Convert threading tests to pytest
|
||||
time: 2022-11-08T07:45:50.589147-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
PR: "6226"
|
||||
7
.changes/unreleased/Under the Hood-20221108-115633.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221108-115633.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Convert postgres index tests to pytest
|
||||
time: 2022-11-08T11:56:33.743042-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5770"
|
||||
PR: "6228"
|
||||
7
.changes/unreleased/Under the Hood-20221108-133104.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221108-133104.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Convert use color tests to pytest
|
||||
time: 2022-11-08T13:31:04.788547-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5771"
|
||||
PR: "6230"
|
||||
7
.changes/unreleased/Under the Hood-20221116-130037.yaml
Normal file
7
.changes/unreleased/Under the Hood-20221116-130037.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Add github actions workflow to generate high level CLI API docs
|
||||
time: 2022-11-16T13:00:37.916202-06:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5942"
|
||||
PR: "6187"
|
||||
@@ -44,7 +44,7 @@ custom:
|
||||
footerFormat: |
|
||||
{{- $contributorDict := dict }}
|
||||
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
|
||||
{{- $core_team := list "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- $core_team := list "michelleark" "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
|
||||
{{- range $change := .Changes }}
|
||||
{{- $authorList := splitList " " $change.Custom.Author }}
|
||||
{{- /* loop through all authors for a PR */}}
|
||||
|
||||
166
.github/workflows/generate-cli-api-docs.yml
vendored
Normal file
166
.github/workflows/generate-cli-api-docs.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
# **what?**
|
||||
# On push, if anything in core/dbt/docs or core/dbt/cli has been
|
||||
# created or modified, regenerate the CLI API docs using sphinx.
|
||||
|
||||
# **why?**
|
||||
# We watch for changes in core/dbt/cli because the CLI API docs rely on click
|
||||
# and all supporting flags/params to be generated. We watch for changes in
|
||||
# core/dbt/docs since any changes to sphinx configuration or any of the
|
||||
# .rst files there could result in a differently build final index.html file.
|
||||
|
||||
# **when?**
|
||||
# Whenever a change has been pushed to a branch, and only if there is a diff
|
||||
# between the PR branch and main's core/dbt/cli and or core/dbt/docs dirs.
|
||||
|
||||
# TODO: add bot comment to PR informing contributor that the docs have been committed
|
||||
# TODO: figure out why github action triggered pushes cause github to fail to report
|
||||
# the status of jobs
|
||||
|
||||
name: Generate CLI API docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
CLI_DIR: ${{ github.workspace }}/core/dbt/cli
|
||||
DOCS_DIR: ${{ github.workspace }}/core/dbt/docs
|
||||
DOCS_BUILD_DIR: ${{ github.workspace }}/core/dbt/docs/build
|
||||
|
||||
jobs:
|
||||
check_gen:
|
||||
name: check if generation needed
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
cli_dir_changed: ${{ steps.check_cli.outputs.cli_dir_changed }}
|
||||
docs_dir_changed: ${{ steps.check_docs.outputs.docs_dir_changed }}
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] print variables"
|
||||
run: |
|
||||
echo "env.CLI_DIR: ${{ env.CLI_DIR }}"
|
||||
echo "env.DOCS_BUILD_DIR: ${{ env.DOCS_BUILD_DIR }}"
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
echo ">>>>> git log"
|
||||
git log --pretty=oneline | head -5
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: set shas
|
||||
id: set_shas
|
||||
run: |
|
||||
THIS_SHA=$(git rev-parse @)
|
||||
LAST_SHA=$(git rev-parse @~1)
|
||||
|
||||
echo "this sha: $THIS_SHA"
|
||||
echo "last sha: $LAST_SHA"
|
||||
|
||||
echo "this_sha=$THIS_SHA" >> $GITHUB_OUTPUT
|
||||
echo "last_sha=$LAST_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: check for changes in core/dbt/cli
|
||||
id: check_cli
|
||||
run: |
|
||||
CLI_DIR_CHANGES=$(git diff \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.CLI_DIR }})
|
||||
|
||||
if [ -n "$CLI_DIR_CHANGES" ]; then
|
||||
echo "changes found"
|
||||
echo $CLI_DIR_CHANGES
|
||||
echo "cli_dir_changed=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "cli_dir_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "no changes found"
|
||||
|
||||
- name: check for changes in core/dbt/docs
|
||||
id: check_docs
|
||||
if: steps.check_cli.outputs.cli_dir_changed == 'false'
|
||||
run: |
|
||||
DOCS_DIR_CHANGES=$(git diff --name-only \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.DOCS_DIR }} ':!${{ env.DOCS_BUILD_DIR }}')
|
||||
|
||||
DOCS_BUILD_DIR_CHANGES=$(git diff --name-only \
|
||||
${{ steps.set_shas.outputs.last_sha }} \
|
||||
${{ steps.set_shas.outputs.this_sha }} \
|
||||
-- ${{ env.DOCS_BUILD_DIR }})
|
||||
|
||||
if [ -n "$DOCS_DIR_CHANGES" ] && [ -z "$DOCS_BUILD_DIR_CHANGES" ]; then
|
||||
echo "changes found"
|
||||
echo $DOCS_DIR_CHANGES
|
||||
echo "docs_dir_changed=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "docs_dir_changed=false" >> $GITHUB_OUTPUT
|
||||
echo "no changes found"
|
||||
|
||||
gen_docs:
|
||||
name: generate docs
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check_gen]
|
||||
if: |
|
||||
needs.check_gen.outputs.cli_dir_changed == 'true'
|
||||
|| needs.check_gen.outputs.docs_dir_changed == 'true'
|
||||
|
||||
steps:
|
||||
- name: "[DEBUG] print variables"
|
||||
run: |
|
||||
echo "env.DOCS_DIR: ${{ env.DOCS_DIR }}"
|
||||
echo "github head_ref: ${{ github.head_ref }}"
|
||||
|
||||
- name: git checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: install python
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: install dev requirements
|
||||
run: |
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt -r dev-requirements.txt
|
||||
|
||||
- name: generate docs
|
||||
run: |
|
||||
source env/bin/activate
|
||||
cd ${{ env.DOCS_DIR }}
|
||||
|
||||
echo "cleaning existing docs"
|
||||
make clean
|
||||
|
||||
echo "creating docs"
|
||||
make html
|
||||
|
||||
- name: debug
|
||||
run: |
|
||||
echo ">>>>> status"
|
||||
git status
|
||||
echo ">>>>> remotes"
|
||||
git remote -v
|
||||
echo ">>>>> branch"
|
||||
git branch -v
|
||||
echo ">>>>> log"
|
||||
git log --pretty=oneline | head -5
|
||||
|
||||
- name: commit docs
|
||||
run: |
|
||||
git config user.name 'Github Build Bot'
|
||||
git config user.email 'buildbot@fishtownanalytics.com'
|
||||
git commit -am "Add generated CLI API docs"
|
||||
git push -u origin ${{ github.head_ref }}
|
||||
14
.github/workflows/main.yml
vendored
14
.github/workflows/main.yml
vendored
@@ -45,7 +45,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
@@ -82,7 +84,7 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -117,7 +119,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-latest]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
@@ -137,7 +139,7 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -190,9 +192,9 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4.3.0
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
|
||||
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
@@ -9,13 +9,4 @@ permissions:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# pinned at v4 (https://github.com/actions/stale/releases/tag/v4.0.0)
|
||||
- uses: actions/stale@cdf15f641adb27a71842045a94023bef6945e3aa
|
||||
with:
|
||||
stale-issue-message: "This issue has been marked as Stale because it has been open for 180 days with no activity. If you would like the issue to remain open, please remove the stale label or comment on the issue, or it will be closed in 7 days."
|
||||
stale-pr-message: "This PR has been marked as Stale because it has been open for 180 days with no activity. If you would like the PR to remain open, please remove the stale label or comment on the PR, or it will be closed in 7 days."
|
||||
close-issue-message: "Although we are closing this issue as stale, it's not gone forever. Issues can be reopened if there is renewed community interest; add a comment to notify the maintainers."
|
||||
# mark issues/PRs stale when they haven't seen activity in 180 days
|
||||
days-before-stale: 180
|
||||
uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -11,6 +11,7 @@ __pycache__/
|
||||
env*/
|
||||
dbt_env/
|
||||
build/
|
||||
!core/dbt/docs/build
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Eventually the hooks described here will be run as tests before merging each PR.
|
||||
|
||||
# TODO: remove global exclusion of tests when testing overhaul is complete
|
||||
exclude: ^test/
|
||||
exclude: ^(test/|core/dbt/docs/build/)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
@@ -30,7 +30,7 @@ repos:
|
||||
args:
|
||||
- "--check"
|
||||
- "--diff"
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 4.0.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
@@ -41,10 +41,10 @@ from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
NewConnection,
|
||||
ConnectionReused,
|
||||
ConnectionLeftOpenInCleanup,
|
||||
ConnectionLeftOpen,
|
||||
ConnectionLeftOpen2,
|
||||
ConnectionClosedInCleanup,
|
||||
ConnectionClosed,
|
||||
ConnectionClosed2,
|
||||
Rollback,
|
||||
RollbackFailed,
|
||||
)
|
||||
@@ -306,9 +306,9 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
with self.lock:
|
||||
for connection in self.thread_connections.values():
|
||||
if connection.state not in {"closed", "init"}:
|
||||
fire_event(ConnectionLeftOpen(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name)))
|
||||
else:
|
||||
fire_event(ConnectionClosed(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name)))
|
||||
self.close(connection)
|
||||
|
||||
# garbage collect these connections
|
||||
@@ -345,10 +345,10 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Perform the actual close operation."""
|
||||
# On windows, sometimes connection handles don't have a close() attr.
|
||||
if hasattr(connection.handle, "close"):
|
||||
fire_event(ConnectionClosed2(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(ConnectionClosed(conn_name=cast_to_str(connection.name)))
|
||||
connection.handle.close()
|
||||
else:
|
||||
fire_event(ConnectionLeftOpen2(conn_name=cast_to_str(connection.name)))
|
||||
fire_event(ConnectionLeftOpen(conn_name=cast_to_str(connection.name)))
|
||||
|
||||
@classmethod
|
||||
def _rollback(cls, connection: Connection) -> None:
|
||||
|
||||
@@ -41,13 +41,13 @@ from dbt.clients.jinja import MacroGenerator
|
||||
from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
|
||||
from dbt.contracts.graph.manifest import Manifest, MacroManifest
|
||||
from dbt.contracts.graph.parsed import ParsedSeedNode
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import (
|
||||
CacheMiss,
|
||||
ListRelations,
|
||||
CodeExecution,
|
||||
CodeExecutionStatus,
|
||||
CatalogGenerationError,
|
||||
)
|
||||
from dbt.utils import filter_null_values, executor, cast_to_str
|
||||
|
||||
@@ -581,7 +581,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
:rtype: List[self.Relation]
|
||||
"""
|
||||
raise NotImplementedException(
|
||||
"`list_relations_without_caching` is not implemented for this " "adapter!"
|
||||
"`list_relations_without_caching` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
@@ -1327,7 +1327,7 @@ def catch_as_completed(
|
||||
elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
|
||||
raise exc
|
||||
else:
|
||||
warn_or_error(f"Encountered an error while generating catalog: {str(exc)}")
|
||||
warn_or_error(CatalogGenerationError(exc=str(exc)))
|
||||
# exc is not None, derives from Exception, and isn't ctrl+c
|
||||
exceptions.append(exc)
|
||||
return merge_tables(tables), exceptions
|
||||
|
||||
@@ -367,9 +367,9 @@ class BlockIterator:
|
||||
if self.current:
|
||||
linecount = self.data[: self.current.end].count("\n") + 1
|
||||
dbt.exceptions.raise_compiler_error(
|
||||
(
|
||||
"Reached EOF without finding a close tag for " "{} (searched from line {})"
|
||||
).format(self.current.block_type_name, linecount)
|
||||
("Reached EOF without finding a close tag for {} (searched from line {})").format(
|
||||
self.current.block_type_name, linecount
|
||||
)
|
||||
)
|
||||
|
||||
if collect_raw_data:
|
||||
|
||||
@@ -56,6 +56,7 @@ def print_compile_stats(stats):
|
||||
NodeType.Source: "source",
|
||||
NodeType.Exposure: "exposure",
|
||||
NodeType.Metric: "metric",
|
||||
NodeType.Entity: "entity",
|
||||
}
|
||||
|
||||
results = {k: 0 for k in names.keys()}
|
||||
@@ -91,6 +92,8 @@ def _generate_stats(manifest: Manifest):
|
||||
stats[exposure.resource_type] += 1
|
||||
for metric in manifest.metrics.values():
|
||||
stats[metric.resource_type] += 1
|
||||
for entity in manifest.entities.values():
|
||||
stats[entity.resource_type] += 1
|
||||
for macro in manifest.macros.values():
|
||||
stats[macro.resource_type] += 1
|
||||
return stats
|
||||
|
||||
@@ -248,7 +248,7 @@ class PartialProject(RenderComponents):
|
||||
project_name: Optional[str] = field(
|
||||
metadata=dict(
|
||||
description=(
|
||||
"The name of the project. This should always be set and will not " "be rendered"
|
||||
"The name of the project. This should always be set and will not be rendered"
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -381,6 +381,7 @@ class PartialProject(RenderComponents):
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
|
||||
@@ -391,6 +392,7 @@ class PartialProject(RenderComponents):
|
||||
sources = cfg.sources
|
||||
tests = cfg.tests
|
||||
metrics = cfg.metrics
|
||||
entities = cfg.entities
|
||||
exposures = cfg.exposures
|
||||
if cfg.vars is None:
|
||||
vars_dict: Dict[str, Any] = {}
|
||||
@@ -446,6 +448,7 @@ class PartialProject(RenderComponents):
|
||||
sources=sources,
|
||||
tests=tests,
|
||||
metrics=metrics,
|
||||
entities=entities,
|
||||
exposures=exposures,
|
||||
vars=vars_value,
|
||||
config_version=cfg.config_version,
|
||||
@@ -550,6 +553,7 @@ class Project:
|
||||
sources: Dict[str, Any]
|
||||
tests: Dict[str, Any]
|
||||
metrics: Dict[str, Any]
|
||||
entities: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars: VarProvider
|
||||
dbt_version: List[VersionSpecifier]
|
||||
@@ -624,6 +628,7 @@ class Project:
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -668,7 +673,7 @@ class Project:
|
||||
def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
|
||||
if name not in self.selectors:
|
||||
raise RuntimeException(
|
||||
f"Could not find selector named {name}, expected one of " f"{list(self.selectors)}"
|
||||
f"Could not find selector named {name}, expected one of {list(self.selectors)}"
|
||||
)
|
||||
return self.selectors[name]["definition"]
|
||||
|
||||
|
||||
@@ -3,31 +3,41 @@ import os
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, Mapping, Iterator, Iterable, Tuple, List, MutableSet, Type
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
from dbt import flags
|
||||
from dbt.adapters.factory import get_relation_class_by_name, get_include_paths
|
||||
from dbt.helper_types import FQNPath, PathSet, DictDefaultEmptyStr
|
||||
from dbt.adapters.factory import get_include_paths, get_relation_class_by_name
|
||||
from dbt.config.profile import read_user_config
|
||||
from dbt.contracts.connection import AdapterRequiredConfig, Credentials
|
||||
from dbt.contracts.graph.manifest import ManifestMetadata
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
from dbt.contracts.project import Configuration, UserConfig
|
||||
from dbt.exceptions import (
|
||||
RuntimeException,
|
||||
DbtProjectError,
|
||||
validator_error_message,
|
||||
warn_or_error,
|
||||
raise_compiler_error,
|
||||
)
|
||||
|
||||
from dbt.contracts.relation import ComponentName
|
||||
from dbt.dataclass_schema import ValidationError
|
||||
from dbt.exceptions import (
|
||||
DbtProjectError,
|
||||
RuntimeException,
|
||||
raise_compiler_error,
|
||||
validator_error_message,
|
||||
)
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import UnusedResourceConfigPath
|
||||
from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet
|
||||
|
||||
from .profile import Profile
|
||||
from .project import Project, PartialProject
|
||||
from .renderer import DbtProjectYamlRenderer, ProfileRenderer
|
||||
from .utils import parse_cli_vars
|
||||
|
||||
|
||||
def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
|
||||
@@ -106,6 +116,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -190,28 +201,52 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
@classmethod
|
||||
def collect_parts(cls: Type["RuntimeConfig"], args: Any) -> Tuple[Project, Profile]:
|
||||
# profile_name from the project
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
# Note: only the named profile section is rendered. The rest of the
|
||||
# profile is ignored.
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
|
||||
profile = cls.collect_profile(args=args)
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = cls.collect_project(args=args, project_renderer=project_renderer)
|
||||
assert type(project) is Project
|
||||
return (project, profile)
|
||||
|
||||
@classmethod
|
||||
def collect_profile(
|
||||
cls: Type["RuntimeConfig"], args: Any, profile_name: Optional[str] = None
|
||||
) -> Profile:
|
||||
|
||||
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, "vars", "{}"))
|
||||
profile_renderer = ProfileRenderer(cli_vars)
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
# build the profile using the base renderer and the one fact we know
|
||||
if profile_name is None:
|
||||
# Note: only the named profile section is rendered here. The rest of the
|
||||
# profile is ignored.
|
||||
partial = cls.collect_project(args)
|
||||
assert type(partial) is PartialProject
|
||||
profile_name = partial.render_profile_name(profile_renderer)
|
||||
|
||||
profile = cls._get_rendered_profile(args, profile_renderer, profile_name)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
profile.profile_env_vars = profile_renderer.ctx_obj.env_vars
|
||||
return profile
|
||||
|
||||
# get a new renderer using our target information and render the
|
||||
# project
|
||||
project_renderer = DbtProjectYamlRenderer(profile, cli_vars)
|
||||
project = partial.render(project_renderer)
|
||||
# Save env_vars encountered in rendering for partial parsing
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return (project, profile)
|
||||
@classmethod
|
||||
def collect_project(
|
||||
cls: Type["RuntimeConfig"],
|
||||
args: Any,
|
||||
project_renderer: Optional[DbtProjectYamlRenderer] = None,
|
||||
) -> Union[Project, PartialProject]:
|
||||
|
||||
project_root = args.project_dir if args.project_dir else os.getcwd()
|
||||
version_check = bool(flags.VERSION_CHECK)
|
||||
partial = Project.partial_load(project_root, verify_version=version_check)
|
||||
if project_renderer is None:
|
||||
return partial
|
||||
else:
|
||||
project = partial.render(project_renderer)
|
||||
project.project_env_vars = project_renderer.ctx_obj.env_vars
|
||||
return project
|
||||
|
||||
# Called in main.py, lib.py, task/base.py
|
||||
@classmethod
|
||||
@@ -277,14 +312,15 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
"sources": self._get_config_paths(self.sources),
|
||||
"tests": self._get_config_paths(self.tests),
|
||||
"metrics": self._get_config_paths(self.metrics),
|
||||
"entities": self._get_config_paths(self.entities),
|
||||
"exposures": self._get_config_paths(self.exposures),
|
||||
}
|
||||
|
||||
def get_unused_resource_config_paths(
|
||||
def warn_for_unused_resource_config_paths(
|
||||
self,
|
||||
resource_fqns: Mapping[str, PathSet],
|
||||
disabled: PathSet,
|
||||
) -> List[FQNPath]:
|
||||
) -> None:
|
||||
"""Return a list of lists of strings, where each inner list of strings
|
||||
represents a type + FQN path of a resource configuration that is not
|
||||
used.
|
||||
@@ -298,23 +334,13 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
|
||||
for config_path in config_paths:
|
||||
if not _is_config_used(config_path, fqns):
|
||||
unused_resource_config_paths.append((resource_type,) + config_path)
|
||||
return unused_resource_config_paths
|
||||
resource_path = ".".join(i for i in ((resource_type,) + config_path))
|
||||
unused_resource_config_paths.append(resource_path)
|
||||
|
||||
def warn_for_unused_resource_config_paths(
|
||||
self,
|
||||
resource_fqns: Mapping[str, PathSet],
|
||||
disabled: PathSet,
|
||||
) -> None:
|
||||
unused = self.get_unused_resource_config_paths(resource_fqns, disabled)
|
||||
if len(unused) == 0:
|
||||
if len(unused_resource_config_paths) == 0:
|
||||
return
|
||||
|
||||
msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format(
|
||||
len(unused), "\n".join("- {}".format(".".join(u)) for u in unused)
|
||||
)
|
||||
|
||||
warn_or_error(msg, log_fmt=warning_tag("{}"))
|
||||
warn_or_error(UnusedResourceConfigPath(unused_config_paths=unused_resource_config_paths))
|
||||
|
||||
def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]:
|
||||
if self.dependencies is None:
|
||||
@@ -482,6 +508,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
"sources": self.sources,
|
||||
"tests": self.tests,
|
||||
"metrics": self.metrics,
|
||||
"entities": self.entities,
|
||||
"exposures": self.exposures,
|
||||
"vars": self.vars.to_dict(),
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
@@ -544,6 +571,7 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
sources=project.sources,
|
||||
tests=project.tests,
|
||||
metrics=project.metrics,
|
||||
entities=project.entities,
|
||||
exposures=project.exposures,
|
||||
vars=project.vars,
|
||||
config_version=project.config_version,
|
||||
@@ -591,14 +619,6 @@ class UnsetProfileConfig(RuntimeConfig):
|
||||
return cls.from_parts(project=project, profile=profile, args=args)
|
||||
|
||||
|
||||
UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE = """\
|
||||
Configuration paths exist in your dbt_project.yml file which do not \
|
||||
apply to any resources.
|
||||
There are {} unused configuration paths:
|
||||
{}
|
||||
"""
|
||||
|
||||
|
||||
def _is_config_used(path, fqns):
|
||||
if fqns:
|
||||
for fqn in fqns:
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
SECRET_ENV_PREFIX = "DBT_ENV_SECRET_"
|
||||
DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER"
|
||||
METADATA_ENV_PREFIX = "DBT_ENV_CUSTOM_ENV_"
|
||||
|
||||
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
|
||||
MAXIMUM_SEED_SIZE_NAME = "1MB"
|
||||
|
||||
PIN_PACKAGE_URL = (
|
||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
from dbt import utils
|
||||
from dbt.clients.jinja import get_rendered
|
||||
from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401
|
||||
from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER
|
||||
@@ -16,7 +17,7 @@ from dbt.exceptions import (
|
||||
disallow_secret_env_var,
|
||||
)
|
||||
from dbt.events.functions import fire_event, get_invocation_id
|
||||
from dbt.events.types import MacroEventInfo, MacroEventDebug
|
||||
from dbt.events.types import JinjaLogInfo, JinjaLogDebug
|
||||
from dbt.version import __version__ as dbt_version
|
||||
|
||||
# These modules are added to the context. Consider alternative
|
||||
@@ -126,7 +127,7 @@ class ContextMeta(type):
|
||||
|
||||
|
||||
class Var:
|
||||
UndefinedVarError = "Required var '{}' not found in config:\nVars " "supplied to {} = {}"
|
||||
UndefinedVarError = "Required var '{}' not found in config:\nVars supplied to {} = {}"
|
||||
_VAR_NOTSET = object()
|
||||
|
||||
def __init__(
|
||||
@@ -557,9 +558,9 @@ class BaseContext(metaclass=ContextMeta):
|
||||
{% endmacro %}"
|
||||
"""
|
||||
if info:
|
||||
fire_event(MacroEventInfo(msg=msg))
|
||||
fire_event(JinjaLogInfo(msg=msg))
|
||||
else:
|
||||
fire_event(MacroEventDebug(msg=msg))
|
||||
fire_event(JinjaLogDebug(msg=msg))
|
||||
return ""
|
||||
|
||||
@contextproperty
|
||||
@@ -687,6 +688,19 @@ class BaseContext(metaclass=ContextMeta):
|
||||
dict_diff.update({k: dict_a[k]})
|
||||
return dict_diff
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def local_md5(value: str) -> str:
|
||||
"""Calculates an MD5 hash of the given string.
|
||||
It's called "local_md5" to emphasize that it runs locally in dbt (in jinja context) and not an MD5 SQL command.
|
||||
|
||||
:param value: The value to hash
|
||||
|
||||
Usage:
|
||||
{% set value_hash = local_md5("hello world") %}
|
||||
"""
|
||||
return utils.md5(value)
|
||||
|
||||
|
||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
ctx = BaseContext(cli_vars)
|
||||
|
||||
@@ -45,6 +45,8 @@ class UnrenderedConfig(ConfigSource):
|
||||
model_configs = unrendered.get("tests")
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = unrendered.get("metrics")
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = unrendered.get("entities")
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = unrendered.get("exposures")
|
||||
else:
|
||||
@@ -70,6 +72,8 @@ class RenderedConfig(ConfigSource):
|
||||
model_configs = self.project.tests
|
||||
elif resource_type == NodeType.Metric:
|
||||
model_configs = self.project.metrics
|
||||
elif resource_type == NodeType.Entity:
|
||||
model_configs = self.project.entities
|
||||
elif resource_type == NodeType.Exposure:
|
||||
model_configs = self.project.exposures
|
||||
else:
|
||||
|
||||
@@ -37,6 +37,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedMacro,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedEntity,
|
||||
ParsedSeedNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
@@ -53,7 +54,6 @@ from dbt.exceptions import (
|
||||
raise_compiler_error,
|
||||
ref_invalid_args,
|
||||
metric_invalid_args,
|
||||
ref_target_not_found,
|
||||
target_not_found,
|
||||
ref_bad_context,
|
||||
wrapped_exports,
|
||||
@@ -182,7 +182,7 @@ class BaseDatabaseWrapper:
|
||||
return macro
|
||||
|
||||
searched = ", ".join(repr(a) for a in attempts)
|
||||
msg = f"In dispatch: No macro named '{macro_name}' found\n" f" Searched for: {searched}"
|
||||
msg = f"In dispatch: No macro named '{macro_name}' found\n Searched for: {searched}"
|
||||
raise CompilationException(msg)
|
||||
|
||||
|
||||
@@ -220,12 +220,12 @@ class BaseRefResolver(BaseResolver):
|
||||
def validate_args(self, name: str, package: Optional[str]):
|
||||
if not isinstance(name, str):
|
||||
raise CompilationException(
|
||||
f"The name argument to ref() must be a string, got " f"{type(name)}"
|
||||
f"The name argument to ref() must be a string, got {type(name)}"
|
||||
)
|
||||
|
||||
if package is not None and not isinstance(package, str):
|
||||
raise CompilationException(
|
||||
f"The package argument to ref() must be a string or None, got " f"{type(package)}"
|
||||
f"The package argument to ref() must be a string or None, got {type(package)}"
|
||||
)
|
||||
|
||||
def __call__(self, *args: str) -> RelationProxy:
|
||||
@@ -302,12 +302,10 @@ class BaseMetricResolver(BaseResolver):
|
||||
self.validate_args(name, package)
|
||||
return self.resolve(name, package)
|
||||
|
||||
|
||||
class Config(Protocol):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
...
|
||||
|
||||
|
||||
# Implementation of "config(..)" calls in models
|
||||
class ParseConfigObject(Config):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
@@ -476,10 +474,11 @@ class RuntimeRefResolver(BaseRefResolver):
|
||||
)
|
||||
|
||||
if target_model is None or isinstance(target_model, Disabled):
|
||||
ref_target_not_found(
|
||||
self.model,
|
||||
target_name,
|
||||
target_package,
|
||||
target_not_found(
|
||||
node=self.model,
|
||||
target_name=target_name,
|
||||
target_kind="node",
|
||||
target_package=target_package,
|
||||
disabled=isinstance(target_model, Disabled),
|
||||
)
|
||||
self.validate(target_model, target_name, target_package)
|
||||
@@ -803,6 +802,7 @@ class ProviderContext(ManifestContext):
|
||||
raise_compiler_error(
|
||||
"can only load_agate_table for seeds (got a {})".format(self.model.resource_type)
|
||||
)
|
||||
assert self.model.root_path
|
||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||
column_types = self.model.config.column_types
|
||||
try:
|
||||
@@ -1434,6 +1434,14 @@ class ExposureSourceResolver(BaseResolver):
|
||||
return ""
|
||||
|
||||
|
||||
class ExposureMetricResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
if len(args) not in (1, 2):
|
||||
metric_invalid_args(self.model, args)
|
||||
self.model.metrics.append(list(args))
|
||||
return ""
|
||||
|
||||
|
||||
def generate_parse_exposure(
|
||||
exposure: ParsedExposure,
|
||||
config: RuntimeConfig,
|
||||
@@ -1454,6 +1462,12 @@ def generate_parse_exposure(
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
"metric": ExposureMetricResolver(
|
||||
None,
|
||||
exposure,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -1477,7 +1491,6 @@ class MetricRefResolver(BaseResolver):
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
|
||||
|
||||
def generate_parse_metrics(
|
||||
metric: ParsedMetric,
|
||||
config: RuntimeConfig,
|
||||
@@ -1500,6 +1513,41 @@ def generate_parse_metrics(
|
||||
),
|
||||
}
|
||||
|
||||
class EntityRefResolver(BaseResolver):
|
||||
def __call__(self, *args) -> str:
|
||||
package = None
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
ref_invalid_args(self.model, args)
|
||||
self.validate_args(name, package)
|
||||
self.model.refs.append(list(args))
|
||||
return ""
|
||||
|
||||
def validate_args(self, name, package):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingException(
|
||||
f"In the entity associated with {self.model.original_file_path} "
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
|
||||
def generate_parse_entities(
|
||||
entity: ParsedEntity,
|
||||
config: RuntimeConfig,
|
||||
manifest: Manifest,
|
||||
package_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
project = config.load_dependencies()[package_name]
|
||||
return {
|
||||
"ref": EntityRefResolver(
|
||||
None,
|
||||
entity,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
}
|
||||
|
||||
# This class is currently used by the schema parser in order
|
||||
# to limit the number of macros in the context by using
|
||||
|
||||
@@ -94,7 +94,7 @@ class Connection(ExtensibleDbtClassMixin, Replaceable):
|
||||
self._handle.resolve(self)
|
||||
except RecursionError as exc:
|
||||
raise InternalException(
|
||||
"A connection's open() method attempted to read the " "handle value"
|
||||
"A connection's open() method attempted to read the handle value"
|
||||
) from exc
|
||||
return self._handle
|
||||
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import hashlib
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from mashumaro.types import SerializableType
|
||||
from typing import List, Optional, Union, Dict, Any
|
||||
|
||||
from dbt.constants import MAXIMUM_SEED_SIZE
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum
|
||||
|
||||
from .util import SourceKey
|
||||
|
||||
|
||||
MAXIMUM_SEED_SIZE = 1 * 1024 * 1024
|
||||
MAXIMUM_SEED_SIZE_NAME = "1MB"
|
||||
|
||||
|
||||
class ParseFileType(StrEnum):
|
||||
Macro = "macro"
|
||||
Model = "model"
|
||||
@@ -229,6 +227,7 @@ class SchemaSourceFile(BaseSourceFile):
|
||||
sources: List[str] = field(default_factory=list)
|
||||
exposures: List[str] = field(default_factory=list)
|
||||
metrics: List[str] = field(default_factory=list)
|
||||
entities: List[str] = field(default_factory=list)
|
||||
# node patches contain models, seeds, snapshots, analyses
|
||||
ndp: List[str] = field(default_factory=list)
|
||||
# any macro patches in this file by macro unique_id.
|
||||
|
||||
@@ -7,6 +7,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedModelNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedEntity,
|
||||
ParsedResource,
|
||||
ParsedRPCNode,
|
||||
ParsedSqlNode,
|
||||
@@ -97,6 +98,7 @@ class CompiledSeedNode(CompiledNode):
|
||||
# keep this in sync with ParsedSeedNode!
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
||||
config: SeedConfig = field(default_factory=SeedConfig)
|
||||
root_path: Optional[str] = None
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
@@ -232,4 +234,5 @@ GraphMemberNode = Union[
|
||||
CompileResultNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedEntity,
|
||||
]
|
||||
|
||||
@@ -36,6 +36,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedGenericTestNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedEntity,
|
||||
HasUniqueID,
|
||||
UnpatchedSourceDefinition,
|
||||
ManifestNodes,
|
||||
@@ -216,8 +217,39 @@ class MetricLookup(dbtClassMixin):
|
||||
)
|
||||
return manifest.metrics[unique_id]
|
||||
|
||||
class EntityLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
# This handles both models/seeds/snapshots and sources/metrics/exposures
|
||||
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, search_name, package)
|
||||
|
||||
def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"):
|
||||
unique_id = self.get_unique_id(search_name, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_entity(self, entity: ParsedEntity):
|
||||
if entity.search_name not in self.storage:
|
||||
self.storage[entity.search_name] = {}
|
||||
|
||||
self.storage[entity.search_name][entity.package_name] = entity.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for entity in manifest.entities.values():
|
||||
if hasattr(entity, "name"):
|
||||
self.add_entity(entity)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> ParsedEntity:
|
||||
if unique_id not in manifest.entities:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Entity {unique_id} found in cache but not found in manifest"
|
||||
)
|
||||
return manifest.entities[unique_id]
|
||||
|
||||
# This handles both models/seeds/snapshots and sources/metrics/entities/exposures
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
self.storage: Dict[str, Dict[PackageName, List[Any]]] = {}
|
||||
@@ -467,6 +499,7 @@ class Disabled(Generic[D]):
|
||||
|
||||
MaybeMetricNode = Optional[Union[ParsedMetric, Disabled[ParsedMetric]]]
|
||||
|
||||
MaybeEntityNode = Optional[Union[ParsedEntity, Disabled[ParsedEntity]]]
|
||||
|
||||
MaybeDocumentation = Optional[ParsedDocumentation]
|
||||
|
||||
@@ -499,7 +532,7 @@ def _update_into(dest: MutableMapping[str, T], new_item: T):
|
||||
existing = dest[unique_id]
|
||||
if new_item.original_file_path != existing.original_file_path:
|
||||
raise dbt.exceptions.RuntimeException(
|
||||
f"cannot update a {new_item.resource_type} to have a new file " f"path!"
|
||||
f"cannot update a {new_item.resource_type} to have a new file path!"
|
||||
)
|
||||
dest[unique_id] = new_item
|
||||
|
||||
@@ -611,6 +644,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
|
||||
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
|
||||
metrics: MutableMapping[str, ParsedMetric] = field(default_factory=dict)
|
||||
entities: MutableMapping[str, ParsedEntity] = field(default_factory=dict)
|
||||
selectors: MutableMapping[str, Any] = field(default_factory=dict)
|
||||
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
|
||||
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
|
||||
@@ -632,6 +666,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_metric_lookup: Optional[MetricLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_entity_lookup: Optional[EntityLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_disabled_lookup: Optional[DisabledLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
@@ -682,6 +719,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
def update_metric(self, new_metric: ParsedMetric):
|
||||
_update_into(self.metrics, new_metric)
|
||||
|
||||
def update_entity(self, new_entity: ParsedEntity):
|
||||
_update_into(self.entities, new_entity)
|
||||
|
||||
def update_node(self, new_node: ManifestNode):
|
||||
_update_into(self.nodes, new_node)
|
||||
|
||||
@@ -697,6 +737,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.flat_graph = {
|
||||
"exposures": {k: v.to_dict(omit_none=False) for k, v in self.exposures.items()},
|
||||
"metrics": {k: v.to_dict(omit_none=False) for k, v in self.metrics.items()},
|
||||
"entities": {k: v.to_dict(omit_none=False) for k, v in self.entities.items()},
|
||||
"nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()},
|
||||
"sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()},
|
||||
}
|
||||
@@ -759,6 +800,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.nodes.values(),
|
||||
self.sources.values(),
|
||||
self.metrics.values(),
|
||||
self.entities.values(),
|
||||
)
|
||||
for resource in all_resources:
|
||||
resource_type_plural = resource.resource_type.pluralize()
|
||||
@@ -787,6 +829,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs={k: _deepcopy(v) for k, v in self.docs.items()},
|
||||
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
|
||||
metrics={k: _deepcopy(v) for k, v in self.metrics.items()},
|
||||
entities={k: _deepcopy(v) for k, v in self.entities.items()},
|
||||
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
|
||||
metadata=self.metadata,
|
||||
disabled={k: _deepcopy(v) for k, v in self.disabled.items()},
|
||||
@@ -803,6 +846,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.sources.values(),
|
||||
self.exposures.values(),
|
||||
self.metrics.values(),
|
||||
self.entities.values(),
|
||||
)
|
||||
)
|
||||
forward_edges, backward_edges = build_node_edges(edge_members)
|
||||
@@ -828,6 +872,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
docs=self.docs,
|
||||
exposures=self.exposures,
|
||||
metrics=self.metrics,
|
||||
entities=self.entities,
|
||||
selectors=self.selectors,
|
||||
metadata=self.metadata,
|
||||
disabled=self.disabled,
|
||||
@@ -849,6 +894,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return self.exposures[unique_id]
|
||||
elif unique_id in self.metrics:
|
||||
return self.metrics[unique_id]
|
||||
elif unique_id in self.entities:
|
||||
return self.entities[unique_id]
|
||||
else:
|
||||
# something terrible has happened
|
||||
raise dbt.exceptions.InternalException(
|
||||
@@ -885,6 +932,12 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._metric_lookup = MetricLookup(self)
|
||||
return self._metric_lookup
|
||||
|
||||
@property
|
||||
def entity_lookup(self) -> EntityLookup:
|
||||
if self._entity_lookup is None:
|
||||
self._entity_lookup = EntityLookup(self)
|
||||
return self._entity_lookup
|
||||
|
||||
def rebuild_ref_lookup(self):
|
||||
self._ref_lookup = RefableLookup(self)
|
||||
|
||||
@@ -985,6 +1038,32 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
def resolve_entity(
|
||||
self,
|
||||
target_entity_name: str,
|
||||
target_entity_package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeEntityNode:
|
||||
|
||||
entity: Optional[ParsedEntity] = None
|
||||
disabled: Optional[List[ParsedEntity]] = None
|
||||
|
||||
candidates = _search_packages(current_project, node_package, target_entity_package)
|
||||
for pkg in candidates:
|
||||
entity = self.entity_lookup.find(target_entity_name, pkg, self)
|
||||
|
||||
if entity is not None and entity.config.enabled:
|
||||
return entity
|
||||
|
||||
# it's possible that the node is disabled
|
||||
if disabled is None:
|
||||
disabled = self.disabled_lookup.find(f"{target_entity_name}", pkg)
|
||||
if disabled:
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
|
||||
# Called by DocsRuntimeContext.doc
|
||||
def resolve_doc(
|
||||
self,
|
||||
@@ -1011,6 +1090,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
adapter,
|
||||
other: "WritableManifest",
|
||||
selected: AbstractSet[UniqueID],
|
||||
favor_state: bool = False,
|
||||
) -> None:
|
||||
"""Given the selected unique IDs and a writable manifest, update this
|
||||
manifest by replacing any unselected nodes with their counterpart.
|
||||
@@ -1025,7 +1105,10 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
node.resource_type in refables
|
||||
and not node.is_ephemeral
|
||||
and unique_id not in selected
|
||||
and not adapter.get_relation(current.database, current.schema, current.identifier)
|
||||
and (
|
||||
not adapter.get_relation(current.database, current.schema, current.identifier)
|
||||
or favor_state
|
||||
)
|
||||
):
|
||||
merged.add(unique_id)
|
||||
self.nodes[unique_id] = node.replace(deferred=True)
|
||||
@@ -1095,6 +1178,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, ParsedMetric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, ParsedEntity):
|
||||
source_file.entities.append(node.unique_id)
|
||||
if isinstance(node, ParsedExposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
@@ -1110,6 +1195,11 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.metrics[metric.unique_id] = metric
|
||||
source_file.metrics.append(metric.unique_id)
|
||||
|
||||
def add_entity(self, source_file: SchemaSourceFile, entity: ParsedEntity):
|
||||
_check_duplicates(entity, self.entities)
|
||||
self.entities[entity.unique_id] = entity
|
||||
source_file.entities.append(entity.unique_id)
|
||||
|
||||
def add_disabled_nofile(self, node: GraphMemberNode):
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
if node.unique_id in self.disabled:
|
||||
@@ -1125,6 +1215,8 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
source_file.add_test(node.unique_id, test_from)
|
||||
if isinstance(node, ParsedMetric):
|
||||
source_file.metrics.append(node.unique_id)
|
||||
if isinstance(node, ParsedEntity):
|
||||
source_file.entities.append(node.unique_id)
|
||||
if isinstance(node, ParsedExposure):
|
||||
source_file.exposures.append(node.unique_id)
|
||||
else:
|
||||
@@ -1152,6 +1244,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self.docs,
|
||||
self.exposures,
|
||||
self.metrics,
|
||||
self.entities,
|
||||
self.selectors,
|
||||
self.files,
|
||||
self.metadata,
|
||||
@@ -1164,6 +1257,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._source_lookup,
|
||||
self._ref_lookup,
|
||||
self._metric_lookup,
|
||||
self._entity_lookup,
|
||||
self._disabled_lookup,
|
||||
self._analysis_lookup,
|
||||
)
|
||||
@@ -1183,7 +1277,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 7)
|
||||
@schema_version("manifest", 8)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1205,6 +1299,9 @@ class WritableManifest(ArtifactMixin):
|
||||
metrics: Mapping[UniqueID, ParsedMetric] = field(
|
||||
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
||||
)
|
||||
entities: Mapping[UniqueID, ParsedEntity] = field(
|
||||
metadata=dict(description=("The entities defined in the dbt project and its dependencies"))
|
||||
)
|
||||
selectors: Mapping[UniqueID, Any] = field(
|
||||
metadata=dict(description=("The selectors defined in selectors.yml"))
|
||||
)
|
||||
@@ -1229,7 +1326,7 @@ class WritableManifest(ArtifactMixin):
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(self):
|
||||
return [("manifest", 4), ("manifest", 5), ("manifest", 6)]
|
||||
return [("manifest", 4), ("manifest", 5), ("manifest", 6), ("manifest", 7)]
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
for unique_id, node in dct["nodes"].items():
|
||||
|
||||
@@ -367,6 +367,9 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
class MetricConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
@dataclass
|
||||
class EntityConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
|
||||
@dataclass
|
||||
class ExposureConfig(BaseConfig):
|
||||
@@ -604,6 +607,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
||||
|
||||
RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = {
|
||||
NodeType.Metric: MetricConfig,
|
||||
NodeType.Entity: EntityConfig,
|
||||
NodeType.Exposure: ExposureConfig,
|
||||
NodeType.Source: SourceConfig,
|
||||
NodeType.Seed: SeedConfig,
|
||||
|
||||
@@ -18,7 +18,7 @@ from typing import (
|
||||
from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin
|
||||
|
||||
from dbt.clients.system import write_file
|
||||
from dbt.contracts.files import FileHash, MAXIMUM_SEED_SIZE_NAME
|
||||
from dbt.contracts.files import FileHash
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
UnparsedNode,
|
||||
UnparsedDocumentation,
|
||||
@@ -38,10 +38,17 @@ from dbt.contracts.graph.unparsed import (
|
||||
MaturityType,
|
||||
MetricFilter,
|
||||
MetricTime,
|
||||
EntityDimension
|
||||
)
|
||||
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
|
||||
from dbt.events.proto_types import NodeInfo
|
||||
from dbt.exceptions import warn_or_error
|
||||
from dbt.events.functions import warn_or_error
|
||||
from dbt.events.types import (
|
||||
SeedIncreased,
|
||||
SeedExceedsLimitSamePath,
|
||||
SeedExceedsLimitAndPathChanged,
|
||||
SeedExceedsLimitChecksumChanged,
|
||||
)
|
||||
from dbt import flags
|
||||
from dbt.node_types import ModelLanguage, NodeType
|
||||
|
||||
@@ -52,6 +59,7 @@ from .model_config import (
|
||||
TestConfig,
|
||||
SourceConfig,
|
||||
MetricConfig,
|
||||
EntityConfig,
|
||||
ExposureConfig,
|
||||
EmptySnapshotConfig,
|
||||
SnapshotConfig,
|
||||
@@ -203,6 +211,7 @@ class ParsedNodeDefaults(NodeInfoMixin, ParsedNodeMandatory):
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
description: str = field(default="")
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
@@ -246,7 +255,7 @@ class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
|
||||
@classmethod
|
||||
def _deserialize(cls, dct: Dict[str, int]):
|
||||
# The serialized ParsedNodes do not differ from each other
|
||||
# in fields that would allow 'from_dict' to distinguis
|
||||
# in fields that would allow 'from_dict' to distinguish
|
||||
# between them.
|
||||
resource_type = dct["resource_type"]
|
||||
if resource_type == "model":
|
||||
@@ -375,30 +384,28 @@ def same_seeds(first: ParsedNode, second: ParsedNode) -> bool:
|
||||
if first.checksum.name == "path":
|
||||
msg: str
|
||||
if second.checksum.name != "path":
|
||||
msg = (
|
||||
f"Found a seed ({first.package_name}.{first.name}) "
|
||||
f">{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was "
|
||||
f"<={MAXIMUM_SEED_SIZE_NAME}, so it has changed"
|
||||
warn_or_error(
|
||||
SeedIncreased(package_name=first.package_name, name=first.name), node=first
|
||||
)
|
||||
elif result:
|
||||
msg = (
|
||||
f"Found a seed ({first.package_name}.{first.name}) "
|
||||
f">{MAXIMUM_SEED_SIZE_NAME} in size at the same path, dbt "
|
||||
f"cannot tell if it has changed: assuming they are the same"
|
||||
warn_or_error(
|
||||
SeedExceedsLimitSamePath(package_name=first.package_name, name=first.name),
|
||||
node=first,
|
||||
)
|
||||
elif not result:
|
||||
msg = (
|
||||
f"Found a seed ({first.package_name}.{first.name}) "
|
||||
f">{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was in "
|
||||
f"a different location, assuming it has changed"
|
||||
warn_or_error(
|
||||
SeedExceedsLimitAndPathChanged(package_name=first.package_name, name=first.name),
|
||||
node=first,
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
f"Found a seed ({first.package_name}.{first.name}) "
|
||||
f">{MAXIMUM_SEED_SIZE_NAME} in size. The previous file had a "
|
||||
f"checksum type of {second.checksum.name}, so it has changed"
|
||||
warn_or_error(
|
||||
SeedExceedsLimitChecksumChanged(
|
||||
package_name=first.package_name,
|
||||
name=first.name,
|
||||
checksum_name=second.checksum.name,
|
||||
),
|
||||
node=first,
|
||||
)
|
||||
warn_or_error(msg, node=first)
|
||||
|
||||
return result
|
||||
|
||||
@@ -408,6 +415,9 @@ class ParsedSeedNode(ParsedNode):
|
||||
# keep this in sync with CompiledSeedNode!
|
||||
resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]})
|
||||
config: SeedConfig = field(default_factory=SeedConfig)
|
||||
# seeds need the root_path because the contents are not loaded initially
|
||||
# and we need the root_path to load the seed later
|
||||
root_path: Optional[str] = None
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
@@ -757,6 +767,7 @@ class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
@@ -837,7 +848,7 @@ class ParsedMetric(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
resource_type: NodeType = NodeType.Metric
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: MetricConfig = field(default_factory=MetricConfig)
|
||||
config: EntityConfig = field(default_factory=EntityConfig)
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
@@ -910,6 +921,60 @@ class ParsedMetric(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
and True
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class ParsedEntity(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
name: str
|
||||
model: str
|
||||
description: str
|
||||
dimensions: Dict[str, EntityDimension] = field(default_factory=dict)
|
||||
model_unique_id: Optional[str] = None
|
||||
resource_type: NodeType = NodeType.Metric
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: MetricConfig = field(default_factory=MetricConfig)
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
entities: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
def depends_on_nodes(self):
|
||||
return self.depends_on.nodes
|
||||
|
||||
@property
|
||||
def search_name(self):
|
||||
return self.name
|
||||
|
||||
def same_model(self, old: "ParsedEntity") -> bool:
|
||||
return self.model == old.model
|
||||
|
||||
def same_description(self, old: "ParsedEntity") -> bool:
|
||||
return self.description == old.description
|
||||
|
||||
def same_dimensions(self, old: "ParsedEntity") -> bool:
|
||||
return self.dimensions == old.dimensions
|
||||
|
||||
def same_config(self, old: "ParsedEntity") -> bool:
|
||||
return self.config.same_contents(
|
||||
self.unrendered_config,
|
||||
old.unrendered_config,
|
||||
)
|
||||
|
||||
def same_contents(self, old: Optional["ParsedEntity"]) -> bool:
|
||||
# existing when it didn't before is a change!
|
||||
# metadata/tags changes are not "changes"
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
self.same_model(old)
|
||||
and self.same_description(old)
|
||||
and self.same_dimensions(old)
|
||||
and self.same_config(old)
|
||||
and True
|
||||
)
|
||||
|
||||
ManifestNodes = Union[
|
||||
ParsedAnalysisNode,
|
||||
@@ -930,5 +995,6 @@ ParsedResource = Union[
|
||||
ParsedNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedEntity,
|
||||
ParsedSourceDefinition,
|
||||
]
|
||||
|
||||
@@ -24,7 +24,6 @@ from typing import Optional, List, Union, Dict, Any, Sequence
|
||||
@dataclass
|
||||
class UnparsedBaseNode(dbtClassMixin, Replaceable):
|
||||
package_name: str
|
||||
root_path: str
|
||||
path: str
|
||||
original_file_path: str
|
||||
|
||||
@@ -364,7 +363,6 @@ class SourcePatch(dbtClassMixin, Replaceable):
|
||||
@dataclass
|
||||
class UnparsedDocumentation(dbtClassMixin, Replaceable):
|
||||
package_name: str
|
||||
root_path: str
|
||||
path: str
|
||||
original_file_path: str
|
||||
|
||||
@@ -525,3 +523,47 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
|
||||
if data.get("model") is not None and data.get("calculation_method") == "derived":
|
||||
raise ValidationError("Derived metrics cannot have a 'model' property")
|
||||
|
||||
@dataclass
|
||||
class EntityDimension(dbtClassMixin, Mergeable):
|
||||
"""This class is used for the dimension information at the entity level. It
|
||||
closely matches the implementation of columns for models."""
|
||||
name: str
|
||||
description: str = ""
|
||||
column_name: Optional[str] = None
|
||||
date_type: Optional[str] = None
|
||||
default_timestamp: Optional[bool] = None
|
||||
primary_key: Optional[bool] = None
|
||||
time_grains: Optional[List[str]] = field(default_factory=list)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@dataclass
|
||||
class EntityInheritence(EntityDimension):
|
||||
"""This class is used for entity dimension inheritence. This class is optional
|
||||
but if it is present then include needs to be present. Exclude cannot be present
|
||||
without some idea of what is being included, whereas exclude is fully optional.
|
||||
The acceptable inputs for include are either a list of columns/dimensions or *
|
||||
to represent all fields. The acceptable inputs for exclude are a list of columns/
|
||||
dimensions
|
||||
"""
|
||||
include: Union[List[str],str] = field(default_factory=list)
|
||||
exclude: Optional[List[str]] = field(default_factory=list)
|
||||
|
||||
@dataclass
|
||||
class UnparsedEntity(dbtClassMixin, Replaceable):
|
||||
"""This class is used for entity information"""
|
||||
name: str
|
||||
model: str
|
||||
description: str = ""
|
||||
dimensions: Optional[Union[Optional[Sequence[EntityDimension]],Optional[EntityInheritence]]] = None
|
||||
# dimensions: Optional[Sequence[EntityDimension]] = None
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super(UnparsedEntity, cls).validate(data)
|
||||
errors = []
|
||||
## TODO: Add validation here around include/exclude and others
|
||||
@@ -12,9 +12,7 @@ from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Dict, Union, Any
|
||||
from mashumaro.types import SerializableType
|
||||
|
||||
PIN_PACKAGE_URL = (
|
||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions" # noqa
|
||||
)
|
||||
|
||||
DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
|
||||
|
||||
|
||||
@@ -210,6 +208,7 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
sources: Dict[str, Any] = field(default_factory=dict)
|
||||
tests: Dict[str, Any] = field(default_factory=dict)
|
||||
metrics: Dict[str, Any] = field(default_factory=dict)
|
||||
entities: Dict[str, Any] = field(default_factory=dict)
|
||||
exposures: Dict[str, Any] = field(default_factory=dict)
|
||||
vars: Optional[Dict[str, Any]] = field(
|
||||
default=None,
|
||||
|
||||
@@ -220,7 +220,9 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
||||
generated_at: datetime,
|
||||
args: Dict,
|
||||
):
|
||||
processed_results = [process_run_result(result) for result in results]
|
||||
processed_results = [
|
||||
process_run_result(result) for result in results if isinstance(result, RunResult)
|
||||
]
|
||||
meta = RunResultsMetadata(
|
||||
dbt_schema_version=str(cls.dbt_schema_version),
|
||||
generated_at=generated_at,
|
||||
@@ -339,7 +341,7 @@ def process_freshness_result(result: FreshnessNodeResult) -> FreshnessNodeOutput
|
||||
criteria = result.node.freshness
|
||||
if criteria is None:
|
||||
raise InternalException(
|
||||
"Somehow evaluated a freshness result for a source " "that has no freshness criteria!"
|
||||
"Somehow evaluated a freshness result for a source that has no freshness criteria!"
|
||||
)
|
||||
return SourceFreshnessOutput(
|
||||
unique_id=unique_id,
|
||||
|
||||
@@ -240,13 +240,32 @@ def rename_sql_attr(node_content: dict) -> dict:
|
||||
def upgrade_manifest_json(manifest: dict) -> dict:
|
||||
for node_content in manifest.get("nodes", {}).values():
|
||||
node_content = rename_sql_attr(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
for disabled in manifest.get("disabled", {}).values():
|
||||
# There can be multiple disabled nodes for the same unique_id
|
||||
# so make sure all the nodes get the attr renamed
|
||||
disabled = [rename_sql_attr(n) for n in disabled]
|
||||
for node_content in disabled:
|
||||
rename_sql_attr(node_content)
|
||||
if node_content["resource_type"] != "seed" and "root_path" in node_content:
|
||||
del node_content["root_path"]
|
||||
for metric_content in manifest.get("metrics", {}).values():
|
||||
# handle attr renames + value translation ("expression" -> "derived")
|
||||
metric_content = rename_metric_attr(metric_content)
|
||||
if "root_path" in metric_content:
|
||||
del metric_content["root_path"]
|
||||
for exposure_content in manifest.get("exposures", {}).values():
|
||||
if "root_path" in exposure_content:
|
||||
del exposure_content["root_path"]
|
||||
for source_content in manifest.get("sources", {}).values():
|
||||
if "root_path" in exposure_content:
|
||||
del source_content["root_path"]
|
||||
for macro_content in manifest.get("macros", {}).values():
|
||||
if "root_path" in macro_content:
|
||||
del macro_content["root_path"]
|
||||
for doc_content in manifest.get("docs", {}).values():
|
||||
if "root_path" in doc_content:
|
||||
del doc_content["root_path"]
|
||||
return manifest
|
||||
|
||||
|
||||
@@ -291,7 +310,7 @@ class VersionedSchema(dbtClassMixin):
|
||||
expected=str(cls.dbt_schema_version),
|
||||
found=previous_schema_version,
|
||||
)
|
||||
if get_manifest_schema_version(data) <= 6:
|
||||
if get_manifest_schema_version(data) <= 7:
|
||||
data = upgrade_manifest_json(data)
|
||||
return cls.from_dict(data) # type: ignore
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import abc
|
||||
from typing import Optional, Set, List, Dict, ClassVar
|
||||
|
||||
import dbt.exceptions
|
||||
from dbt import ui
|
||||
|
||||
import dbt.tracking
|
||||
|
||||
|
||||
class DBTDeprecation:
|
||||
_name: ClassVar[Optional[str]] = None
|
||||
_description: ClassVar[Optional[str]] = None
|
||||
_event: ClassVar[Optional[str]] = None
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -21,66 +21,50 @@ class DBTDeprecation:
|
||||
dbt.tracking.track_deprecation_warn({"deprecation_name": self.name})
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
if self._description is not None:
|
||||
return self._description
|
||||
raise NotImplementedError("description not implemented for {}".format(self))
|
||||
def event(self) -> abc.ABCMeta:
|
||||
if self._event is not None:
|
||||
module_path = dbt.events.types
|
||||
class_name = self._event
|
||||
|
||||
try:
|
||||
return getattr(module_path, class_name)
|
||||
except AttributeError:
|
||||
msg = f"Event Class `{class_name}` is not defined in `{module_path}`"
|
||||
raise NameError(msg)
|
||||
raise NotImplementedError("event not implemented for {}".format(self._event))
|
||||
|
||||
def show(self, *args, **kwargs) -> None:
|
||||
if self.name not in active_deprecations:
|
||||
desc = self.description.format(**kwargs)
|
||||
msg = ui.line_wrap_message(desc, prefix="Deprecated functionality\n\n")
|
||||
dbt.exceptions.warn_or_error(msg, log_fmt=ui.warning_tag("{}"))
|
||||
event = self.event(**kwargs)
|
||||
dbt.events.functions.warn_or_error(event)
|
||||
self.track_deprecation_warn()
|
||||
active_deprecations.add(self.name)
|
||||
|
||||
|
||||
class PackageRedirectDeprecation(DBTDeprecation):
|
||||
_name = "package-redirect"
|
||||
_description = """\
|
||||
The `{old_name}` package is deprecated in favor of `{new_name}`. Please update
|
||||
your `packages.yml` configuration to use `{new_name}` instead.
|
||||
"""
|
||||
_event = "PackageRedirectDeprecation"
|
||||
|
||||
|
||||
class PackageInstallPathDeprecation(DBTDeprecation):
|
||||
_name = "install-packages-path"
|
||||
_description = """\
|
||||
The default package install path has changed from `dbt_modules` to `dbt_packages`.
|
||||
Please update `clean-targets` in `dbt_project.yml` and check `.gitignore` as well.
|
||||
Or, set `packages-install-path: dbt_modules` if you'd like to keep the current value.
|
||||
"""
|
||||
_event = "PackageInstallPathDeprecation"
|
||||
|
||||
|
||||
class ConfigPathDeprecation(DBTDeprecation):
|
||||
_description = """\
|
||||
The `{deprecated_path}` config has been renamed to `{exp_path}`.
|
||||
Please update your `dbt_project.yml` configuration to reflect this change.
|
||||
"""
|
||||
|
||||
|
||||
class ConfigSourcePathDeprecation(ConfigPathDeprecation):
|
||||
class ConfigSourcePathDeprecation(DBTDeprecation):
|
||||
_name = "project-config-source-paths"
|
||||
_event = "ConfigSourcePathDeprecation"
|
||||
|
||||
|
||||
class ConfigDataPathDeprecation(ConfigPathDeprecation):
|
||||
class ConfigDataPathDeprecation(DBTDeprecation):
|
||||
_name = "project-config-data-paths"
|
||||
|
||||
|
||||
_adapter_renamed_description = """\
|
||||
The adapter function `adapter.{old_name}` is deprecated and will be removed in
|
||||
a future release of dbt. Please use `adapter.{new_name}` instead.
|
||||
|
||||
Documentation for {new_name} can be found here:
|
||||
|
||||
https://docs.getdbt.com/docs/adapter
|
||||
"""
|
||||
_event = "ConfigDataPathDeprecation"
|
||||
|
||||
|
||||
def renamed_method(old_name: str, new_name: str):
|
||||
class AdapterDeprecationWarning(DBTDeprecation):
|
||||
_name = "adapter:{}".format(old_name)
|
||||
_description = _adapter_renamed_description.format(old_name=old_name, new_name=new_name)
|
||||
_event = "AdapterDeprecationWarning"
|
||||
|
||||
dep = AdapterDeprecationWarning()
|
||||
deprecations_list.append(dep)
|
||||
@@ -89,26 +73,12 @@ def renamed_method(old_name: str, new_name: str):
|
||||
|
||||
class MetricAttributesRenamed(DBTDeprecation):
|
||||
_name = "metric-attr-renamed"
|
||||
_description = """\
|
||||
dbt-core v1.3 renamed attributes for metrics:
|
||||
\n 'sql' -> 'expression'
|
||||
\n 'type' -> 'calculation_method'
|
||||
\n 'type: expression' -> 'calculation_method: derived'
|
||||
\nThe old metric parameter names will be fully deprecated in v1.4.
|
||||
\nPlease remove them from the metric definition of metric '{metric_name}'
|
||||
\nRelevant issue here: https://github.com/dbt-labs/dbt-core/issues/5849
|
||||
"""
|
||||
_event = "MetricAttributesRenamed"
|
||||
|
||||
|
||||
class ExposureNameDeprecation(DBTDeprecation):
|
||||
_name = "exposure-name"
|
||||
_description = """\
|
||||
Starting in v1.3, the 'name' of an exposure should contain only letters, numbers, and underscores.
|
||||
Exposures support a new property, 'label', which may contain spaces, capital letters, and special characters.
|
||||
{exposure} does not follow this pattern.
|
||||
Please update the 'name', and use the 'label' property for a human-friendly title.
|
||||
This will raise an error in a future version of dbt-core.
|
||||
"""
|
||||
_event = "ExposureNameDeprecation"
|
||||
|
||||
|
||||
def warn(name, *args, **kwargs):
|
||||
@@ -125,12 +95,12 @@ def warn(name, *args, **kwargs):
|
||||
active_deprecations: Set[str] = set()
|
||||
|
||||
deprecations_list: List[DBTDeprecation] = [
|
||||
ExposureNameDeprecation(),
|
||||
PackageRedirectDeprecation(),
|
||||
PackageInstallPathDeprecation(),
|
||||
ConfigSourcePathDeprecation(),
|
||||
ConfigDataPathDeprecation(),
|
||||
PackageInstallPathDeprecation(),
|
||||
PackageRedirectDeprecation(),
|
||||
MetricAttributesRenamed(),
|
||||
ExposureNameDeprecation(),
|
||||
]
|
||||
|
||||
deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list}
|
||||
|
||||
@@ -74,7 +74,7 @@ class PinnedPackage(BasePackage):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def install(self, project):
|
||||
def install(self, project, renderer):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
|
||||
@@ -9,14 +9,9 @@ from dbt.contracts.project import (
|
||||
GitPackage,
|
||||
)
|
||||
from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path
|
||||
from dbt.exceptions import ExecutableError, warn_or_error, raise_dependency_error
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import EnsureGitInstalled
|
||||
from dbt import ui
|
||||
|
||||
PIN_PACKAGE_URL = (
|
||||
"https://docs.getdbt.com/docs/package-management#section-specifying-package-versions" # noqa
|
||||
)
|
||||
from dbt.exceptions import ExecutableError, raise_dependency_error
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import EnsureGitInstalled, DepsUnpinned
|
||||
|
||||
|
||||
def md5sum(s: str):
|
||||
@@ -62,14 +57,6 @@ class GitPinnedPackage(GitPackageMixin, PinnedPackage):
|
||||
else:
|
||||
return "revision {}".format(self.revision)
|
||||
|
||||
def unpinned_msg(self):
|
||||
if self.revision == "HEAD":
|
||||
return "not pinned, using HEAD (default branch)"
|
||||
elif self.revision in ("main", "master"):
|
||||
return f'pinned to the "{self.revision}" branch'
|
||||
else:
|
||||
return None
|
||||
|
||||
def _checkout(self):
|
||||
"""Performs a shallow clone of the repository into the downloads
|
||||
directory. This function can be called repeatedly. If the project has
|
||||
@@ -92,14 +79,8 @@ class GitPinnedPackage(GitPackageMixin, PinnedPackage):
|
||||
def _fetch_metadata(self, project, renderer) -> ProjectPackageMetadata:
|
||||
path = self._checkout()
|
||||
|
||||
if self.unpinned_msg() and self.warn_unpinned:
|
||||
warn_or_error(
|
||||
'The git package "{}" \n\tis {}.\n\tThis can introduce '
|
||||
"breaking changes into your project without warning!\n\nSee {}".format(
|
||||
self.git, self.unpinned_msg(), PIN_PACKAGE_URL
|
||||
),
|
||||
log_fmt=ui.yellow("WARNING: {}"),
|
||||
)
|
||||
if (self.revision == "HEAD" or self.revision in ("main", "master")) and self.warn_unpinned:
|
||||
warn_or_error(DepsUnpinned(git=self.git))
|
||||
loaded = Project.from_project_root(path, renderer)
|
||||
return ProjectPackageMetadata.from_project(loaded)
|
||||
|
||||
|
||||
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Normal file
BIN
core/dbt/docs/build/doctrees/environment.pickle
vendored
Normal file
Binary file not shown.
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
Normal file
BIN
core/dbt/docs/build/doctrees/index.doctree
vendored
Normal file
Binary file not shown.
4
core/dbt/docs/build/html/.buildinfo
vendored
Normal file
4
core/dbt/docs/build/html/.buildinfo
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Sphinx build info version 1
|
||||
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
|
||||
config: 1ee31fc16e025fb98598189ba2cb5fcb
|
||||
tags: 645f666f9bcd5a90fca523b33c5a78b7
|
||||
4
core/dbt/docs/build/html/_sources/index.rst.txt
vendored
Normal file
4
core/dbt/docs/build/html/_sources/index.rst.txt
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
dbt-core's API documentation
|
||||
============================
|
||||
|
||||
.. dbt_click:: dbt.cli.main:cli
|
||||
134
core/dbt/docs/build/html/_static/_sphinx_javascript_frameworks_compat.js
vendored
Normal file
134
core/dbt/docs/build/html/_static/_sphinx_javascript_frameworks_compat.js
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
/*
|
||||
* _sphinx_javascript_frameworks_compat.js
|
||||
* ~~~~~~~~~~
|
||||
*
|
||||
* Compatability shim for jQuery and underscores.js.
|
||||
*
|
||||
* WILL BE REMOVED IN Sphinx 6.0
|
||||
* xref RemovedInSphinx60Warning
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* select a different prefix for underscore
|
||||
*/
|
||||
$u = _.noConflict();
|
||||
|
||||
|
||||
/**
|
||||
* small helper function to urldecode strings
|
||||
*
|
||||
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL
|
||||
*/
|
||||
jQuery.urldecode = function(x) {
|
||||
if (!x) {
|
||||
return x
|
||||
}
|
||||
return decodeURIComponent(x.replace(/\+/g, ' '));
|
||||
};
|
||||
|
||||
/**
|
||||
* small helper function to urlencode strings
|
||||
*/
|
||||
jQuery.urlencode = encodeURIComponent;
|
||||
|
||||
/**
|
||||
* This function returns the parsed url parameters of the
|
||||
* current request. Multiple values per key are supported,
|
||||
* it will always return arrays of strings for the value parts.
|
||||
*/
|
||||
jQuery.getQueryParameters = function(s) {
|
||||
if (typeof s === 'undefined')
|
||||
s = document.location.search;
|
||||
var parts = s.substr(s.indexOf('?') + 1).split('&');
|
||||
var result = {};
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
var tmp = parts[i].split('=', 2);
|
||||
var key = jQuery.urldecode(tmp[0]);
|
||||
var value = jQuery.urldecode(tmp[1]);
|
||||
if (key in result)
|
||||
result[key].push(value);
|
||||
else
|
||||
result[key] = [value];
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* highlight a given string on a jquery object by wrapping it in
|
||||
* span elements with the given class name.
|
||||
*/
|
||||
jQuery.fn.highlightText = function(text, className) {
|
||||
function highlight(node, addItems) {
|
||||
if (node.nodeType === 3) {
|
||||
var val = node.nodeValue;
|
||||
var pos = val.toLowerCase().indexOf(text);
|
||||
if (pos >= 0 &&
|
||||
!jQuery(node.parentNode).hasClass(className) &&
|
||||
!jQuery(node.parentNode).hasClass("nohighlight")) {
|
||||
var span;
|
||||
var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
|
||||
if (isInSVG) {
|
||||
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||
} else {
|
||||
span = document.createElement("span");
|
||||
span.className = className;
|
||||
}
|
||||
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
|
||||
document.createTextNode(val.substr(pos + text.length)),
|
||||
node.nextSibling));
|
||||
node.nodeValue = val.substr(0, pos);
|
||||
if (isInSVG) {
|
||||
var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
|
||||
var bbox = node.parentElement.getBBox();
|
||||
rect.x.baseVal.value = bbox.x;
|
||||
rect.y.baseVal.value = bbox.y;
|
||||
rect.width.baseVal.value = bbox.width;
|
||||
rect.height.baseVal.value = bbox.height;
|
||||
rect.setAttribute('class', className);
|
||||
addItems.push({
|
||||
"parent": node.parentNode,
|
||||
"target": rect});
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (!jQuery(node).is("button, select, textarea")) {
|
||||
jQuery.each(node.childNodes, function() {
|
||||
highlight(this, addItems);
|
||||
});
|
||||
}
|
||||
}
|
||||
var addItems = [];
|
||||
var result = this.each(function() {
|
||||
highlight(this, addItems);
|
||||
});
|
||||
for (var i = 0; i < addItems.length; ++i) {
|
||||
jQuery(addItems[i].parent).before(addItems[i].target);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/*
|
||||
* backward compatibility for jQuery.browser
|
||||
* This will be supported until firefox bug is fixed.
|
||||
*/
|
||||
if (!jQuery.browser) {
|
||||
jQuery.uaMatch = function(ua) {
|
||||
ua = ua.toLowerCase();
|
||||
|
||||
var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
|
||||
/(webkit)[ \/]([\w.]+)/.exec(ua) ||
|
||||
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
|
||||
/(msie) ([\w.]+)/.exec(ua) ||
|
||||
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
|
||||
[];
|
||||
|
||||
return {
|
||||
browser: match[ 1 ] || "",
|
||||
version: match[ 2 ] || "0"
|
||||
};
|
||||
};
|
||||
jQuery.browser = {};
|
||||
jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
|
||||
}
|
||||
701
core/dbt/docs/build/html/_static/alabaster.css
vendored
Normal file
701
core/dbt/docs/build/html/_static/alabaster.css
vendored
Normal file
@@ -0,0 +1,701 @@
|
||||
@import url("basic.css");
|
||||
|
||||
/* -- page layout ----------------------------------------------------------- */
|
||||
|
||||
body {
|
||||
font-family: Georgia, serif;
|
||||
font-size: 17px;
|
||||
background-color: #fff;
|
||||
color: #000;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
|
||||
div.document {
|
||||
width: 940px;
|
||||
margin: 30px auto 0 auto;
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
float: left;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin: 0 0 0 220px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
width: 220px;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
hr {
|
||||
border: 1px solid #B1B4B6;
|
||||
}
|
||||
|
||||
div.body {
|
||||
background-color: #fff;
|
||||
color: #3E4349;
|
||||
padding: 0 30px 0 30px;
|
||||
}
|
||||
|
||||
div.body > .section {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.footer {
|
||||
width: 940px;
|
||||
margin: 20px auto 30px auto;
|
||||
font-size: 14px;
|
||||
color: #888;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
div.footer a {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
p.caption {
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
|
||||
div.relations {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
div.sphinxsidebar a {
|
||||
color: #444;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #999;
|
||||
}
|
||||
|
||||
div.sphinxsidebar a:hover {
|
||||
border-bottom: 1px solid #999;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper {
|
||||
padding: 18px 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper p.logo {
|
||||
padding: 0;
|
||||
margin: -10px 0 0 0px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper h1.logo {
|
||||
margin-top: -10px;
|
||||
text-align: center;
|
||||
margin-bottom: 5px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper h1.logo-name {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
div.sphinxsidebarwrapper p.blurb {
|
||||
margin-top: 0;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3,
|
||||
div.sphinxsidebar h4 {
|
||||
font-family: Georgia, serif;
|
||||
color: #444;
|
||||
font-size: 24px;
|
||||
font-weight: normal;
|
||||
margin: 0 0 5px 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h4 {
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3 a {
|
||||
color: #444;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.logo a,
|
||||
div.sphinxsidebar h3 a,
|
||||
div.sphinxsidebar p.logo a:hover,
|
||||
div.sphinxsidebar h3 a:hover {
|
||||
border: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p {
|
||||
color: #555;
|
||||
margin: 10px 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
margin: 10px 0;
|
||||
padding: 0;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul li.toctree-l1 > a {
|
||||
font-size: 120%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul li.toctree-l2 > a {
|
||||
font-size: 110%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar input {
|
||||
border: 1px solid #CCC;
|
||||
font-family: Georgia, serif;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
div.sphinxsidebar hr {
|
||||
border: none;
|
||||
height: 1px;
|
||||
color: #AAA;
|
||||
background: #AAA;
|
||||
|
||||
text-align: left;
|
||||
margin-left: 0;
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar .badge {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar .badge:hover {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
/* To address an issue with donation coming after search */
|
||||
div.sphinxsidebar h3.donation {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
/* -- body styles ----------------------------------------------------------- */
|
||||
|
||||
a {
|
||||
color: #004B6B;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #6D4100;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
div.body h1,
|
||||
div.body h2,
|
||||
div.body h3,
|
||||
div.body h4,
|
||||
div.body h5,
|
||||
div.body h6 {
|
||||
font-family: Georgia, serif;
|
||||
font-weight: normal;
|
||||
margin: 30px 0px 10px 0px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; }
|
||||
div.body h2 { font-size: 180%; }
|
||||
div.body h3 { font-size: 150%; }
|
||||
div.body h4 { font-size: 130%; }
|
||||
div.body h5 { font-size: 100%; }
|
||||
div.body h6 { font-size: 100%; }
|
||||
|
||||
a.headerlink {
|
||||
color: #DDD;
|
||||
padding: 0 4px;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a.headerlink:hover {
|
||||
color: #444;
|
||||
background: #EAEAEA;
|
||||
}
|
||||
|
||||
div.body p, div.body dd, div.body li {
|
||||
line-height: 1.4em;
|
||||
}
|
||||
|
||||
div.admonition {
|
||||
margin: 20px 0px;
|
||||
padding: 10px 30px;
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.admonition tt.xref, div.admonition code.xref, div.admonition a tt {
|
||||
background-color: #FBFBFB;
|
||||
border-bottom: 1px solid #fafafa;
|
||||
}
|
||||
|
||||
div.admonition p.admonition-title {
|
||||
font-family: Georgia, serif;
|
||||
font-weight: normal;
|
||||
font-size: 24px;
|
||||
margin: 0 0 10px 0;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
div.admonition p.last {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.highlight {
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
dt:target, .highlight {
|
||||
background: #FAF3E8;
|
||||
}
|
||||
|
||||
div.warning {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.danger {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||
box-shadow: 2px 2px 4px #D52C2C;
|
||||
}
|
||||
|
||||
div.error {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
-moz-box-shadow: 2px 2px 4px #D52C2C;
|
||||
-webkit-box-shadow: 2px 2px 4px #D52C2C;
|
||||
box-shadow: 2px 2px 4px #D52C2C;
|
||||
}
|
||||
|
||||
div.caution {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.attention {
|
||||
background-color: #FCC;
|
||||
border: 1px solid #FAA;
|
||||
}
|
||||
|
||||
div.important {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.note {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.tip {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.hint {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.seealso {
|
||||
background-color: #EEE;
|
||||
border: 1px solid #CCC;
|
||||
}
|
||||
|
||||
div.topic {
|
||||
background-color: #EEE;
|
||||
}
|
||||
|
||||
p.admonition-title {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
p.admonition-title:after {
|
||||
content: ":";
|
||||
}
|
||||
|
||||
pre, tt, code {
|
||||
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.hll {
|
||||
background-color: #FFC;
|
||||
margin: 0 -12px;
|
||||
padding: 0 12px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
img.screenshot {
|
||||
}
|
||||
|
||||
tt.descname, tt.descclassname, code.descname, code.descclassname {
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
tt.descname, code.descname {
|
||||
padding-right: 0.08em;
|
||||
}
|
||||
|
||||
img.screenshot {
|
||||
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||
box-shadow: 2px 2px 4px #EEE;
|
||||
}
|
||||
|
||||
table.docutils {
|
||||
border: 1px solid #888;
|
||||
-moz-box-shadow: 2px 2px 4px #EEE;
|
||||
-webkit-box-shadow: 2px 2px 4px #EEE;
|
||||
box-shadow: 2px 2px 4px #EEE;
|
||||
}
|
||||
|
||||
table.docutils td, table.docutils th {
|
||||
border: 1px solid #888;
|
||||
padding: 0.25em 0.7em;
|
||||
}
|
||||
|
||||
table.field-list, table.footnote {
|
||||
border: none;
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
table.footnote {
|
||||
margin: 15px 0;
|
||||
width: 100%;
|
||||
border: 1px solid #EEE;
|
||||
background: #FDFDFD;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
table.footnote + table.footnote {
|
||||
margin-top: -15px;
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
table.field-list th {
|
||||
padding: 0 0.8em 0 0;
|
||||
}
|
||||
|
||||
table.field-list td {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table.field-list p {
|
||||
margin-bottom: 0.8em;
|
||||
}
|
||||
|
||||
/* Cloned from
|
||||
* https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68
|
||||
*/
|
||||
.field-name {
|
||||
-moz-hyphens: manual;
|
||||
-ms-hyphens: manual;
|
||||
-webkit-hyphens: manual;
|
||||
hyphens: manual;
|
||||
}
|
||||
|
||||
table.footnote td.label {
|
||||
width: .1px;
|
||||
padding: 0.3em 0 0.3em 0.5em;
|
||||
}
|
||||
|
||||
table.footnote td {
|
||||
padding: 0.3em 0.5em;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
dl dd {
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
margin: 0 0 0 30px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
/* Matches the 30px from the narrow-screen "li > ul" selector below */
|
||||
margin: 10px 0 10px 30px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: #EEE;
|
||||
padding: 7px 30px;
|
||||
margin: 15px 0px;
|
||||
line-height: 1.3em;
|
||||
}
|
||||
|
||||
div.viewcode-block:target {
|
||||
background: #ffd;
|
||||
}
|
||||
|
||||
dl pre, blockquote pre, li pre {
|
||||
margin-left: 0;
|
||||
padding-left: 30px;
|
||||
}
|
||||
|
||||
tt, code {
|
||||
background-color: #ecf0f3;
|
||||
color: #222;
|
||||
/* padding: 1px 2px; */
|
||||
}
|
||||
|
||||
tt.xref, code.xref, a tt {
|
||||
background-color: #FBFBFB;
|
||||
border-bottom: 1px solid #fff;
|
||||
}
|
||||
|
||||
a.reference {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #004B6B;
|
||||
}
|
||||
|
||||
/* Don't put an underline on images */
|
||||
a.image-reference, a.image-reference:hover {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
a.reference:hover {
|
||||
border-bottom: 1px solid #6D4100;
|
||||
}
|
||||
|
||||
a.footnote-reference {
|
||||
text-decoration: none;
|
||||
font-size: 0.7em;
|
||||
vertical-align: top;
|
||||
border-bottom: 1px dotted #004B6B;
|
||||
}
|
||||
|
||||
a.footnote-reference:hover {
|
||||
border-bottom: 1px solid #6D4100;
|
||||
}
|
||||
|
||||
a:hover tt, a:hover code {
|
||||
background: #EEE;
|
||||
}
|
||||
|
||||
|
||||
@media screen and (max-width: 870px) {
|
||||
|
||||
div.sphinxsidebar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.document {
|
||||
width: 100%;
|
||||
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
margin-left: 0;
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin-top: 0;
|
||||
margin-right: 0;
|
||||
margin-bottom: 0;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
ul {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
li > ul {
|
||||
/* Matches the 30px from the "ul, ol" selector above */
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
.document {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.bodywrapper {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.github {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@media screen and (max-width: 875px) {
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 20px 30px;
|
||||
}
|
||||
|
||||
div.documentwrapper {
|
||||
float: none;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
display: block;
|
||||
float: none;
|
||||
width: 102.5%;
|
||||
margin: 50px -30px -20px -30px;
|
||||
padding: 10px 20px;
|
||||
background: #333;
|
||||
color: #FFF;
|
||||
}
|
||||
|
||||
div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p,
|
||||
div.sphinxsidebar h3 a {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
div.sphinxsidebar a {
|
||||
color: #AAA;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.logo {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.document {
|
||||
width: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.footer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.bodywrapper {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.body {
|
||||
min-height: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.rtd_doc_footer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.document {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.footer {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.github {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* misc. */
|
||||
|
||||
.revsys-inline {
|
||||
display: none!important;
|
||||
}
|
||||
|
||||
/* Make nested-list/multi-paragraph items look better in Releases changelog
|
||||
* pages. Without this, docutils' magical list fuckery causes inconsistent
|
||||
* formatting between different release sub-lists.
|
||||
*/
|
||||
div#changelog > div.section > ul > li > p:only-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* Hide fugly table cell borders in ..bibliography:: directive output */
|
||||
table.docutils.citation, table.docutils.citation td, table.docutils.citation th {
|
||||
border: none;
|
||||
/* Below needed in some edge cases; if not applied, bottom shadows appear */
|
||||
-moz-box-shadow: none;
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
|
||||
/* relbar */
|
||||
|
||||
.related {
|
||||
line-height: 30px;
|
||||
width: 100%;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.related.top {
|
||||
border-bottom: 1px solid #EEE;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.related.bottom {
|
||||
border-top: 1px solid #EEE;
|
||||
}
|
||||
|
||||
.related ul {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.related li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
nav#rellinks {
|
||||
float: right;
|
||||
}
|
||||
|
||||
nav#rellinks li+li:before {
|
||||
content: "|";
|
||||
}
|
||||
|
||||
nav#breadcrumbs li+li:before {
|
||||
content: "\00BB";
|
||||
}
|
||||
|
||||
/* Hide certain items when printing */
|
||||
@media print {
|
||||
div.related {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
900
core/dbt/docs/build/html/_static/basic.css
vendored
Normal file
900
core/dbt/docs/build/html/_static/basic.css
vendored
Normal file
@@ -0,0 +1,900 @@
|
||||
/*
|
||||
* basic.css
|
||||
* ~~~~~~~~~
|
||||
*
|
||||
* Sphinx stylesheet -- basic theme.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
/* -- main layout ----------------------------------------------------------- */
|
||||
|
||||
div.clearer {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
div.section::after {
|
||||
display: block;
|
||||
content: '';
|
||||
clear: left;
|
||||
}
|
||||
|
||||
/* -- relbar ---------------------------------------------------------------- */
|
||||
|
||||
div.related {
|
||||
width: 100%;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
div.related h3 {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.related ul {
|
||||
margin: 0;
|
||||
padding: 0 0 0 10px;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.related li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
div.related li.right {
|
||||
float: right;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
/* -- sidebar --------------------------------------------------------------- */
|
||||
|
||||
div.sphinxsidebarwrapper {
|
||||
padding: 10px 5px 0 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar {
|
||||
float: left;
|
||||
width: 230px;
|
||||
margin-left: -100%;
|
||||
font-size: 90%;
|
||||
word-wrap: break-word;
|
||||
overflow-wrap : break-word;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul ul,
|
||||
div.sphinxsidebar ul.want-points {
|
||||
margin-left: 20px;
|
||||
list-style: square;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul ul {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.sphinxsidebar form {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
div.sphinxsidebar input {
|
||||
border: 1px solid #98dbcc;
|
||||
font-family: sans-serif;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox form.search {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox input[type="text"] {
|
||||
float: left;
|
||||
width: 80%;
|
||||
padding: 0.25em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
div.sphinxsidebar #searchbox input[type="submit"] {
|
||||
float: left;
|
||||
width: 20%;
|
||||
border-left: none;
|
||||
padding: 0.25em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
|
||||
img {
|
||||
border: 0;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
/* -- search page ----------------------------------------------------------- */
|
||||
|
||||
ul.search {
|
||||
margin: 10px 0 0 20px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
ul.search li {
|
||||
padding: 5px 0 5px 20px;
|
||||
background-image: url(file.png);
|
||||
background-repeat: no-repeat;
|
||||
background-position: 0 7px;
|
||||
}
|
||||
|
||||
ul.search li a {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
ul.search li p.context {
|
||||
color: #888;
|
||||
margin: 2px 0 0 30px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
ul.keywordmatches li.goodmatch a {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* -- index page ------------------------------------------------------------ */
|
||||
|
||||
table.contentstable {
|
||||
width: 90%;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.contentstable p.biglink {
|
||||
line-height: 150%;
|
||||
}
|
||||
|
||||
a.biglink {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
span.linkdescr {
|
||||
font-style: italic;
|
||||
padding-top: 5px;
|
||||
font-size: 90%;
|
||||
}
|
||||
|
||||
/* -- general index --------------------------------------------------------- */
|
||||
|
||||
table.indextable {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table.indextable td {
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
table.indextable ul {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
table.indextable > tbody > tr > td > ul {
|
||||
padding-left: 0em;
|
||||
}
|
||||
|
||||
table.indextable tr.pcap {
|
||||
height: 10px;
|
||||
}
|
||||
|
||||
table.indextable tr.cap {
|
||||
margin-top: 10px;
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
|
||||
img.toggler {
|
||||
margin-right: 3px;
|
||||
margin-top: 3px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
div.modindex-jumpbox {
|
||||
border-top: 1px solid #ddd;
|
||||
border-bottom: 1px solid #ddd;
|
||||
margin: 1em 0 1em 0;
|
||||
padding: 0.4em;
|
||||
}
|
||||
|
||||
div.genindex-jumpbox {
|
||||
border-top: 1px solid #ddd;
|
||||
border-bottom: 1px solid #ddd;
|
||||
margin: 1em 0 1em 0;
|
||||
padding: 0.4em;
|
||||
}
|
||||
|
||||
/* -- domain module index --------------------------------------------------- */
|
||||
|
||||
table.modindextable td {
|
||||
padding: 2px;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
/* -- general body styles --------------------------------------------------- */
|
||||
|
||||
div.body {
|
||||
min-width: 360px;
|
||||
max-width: 800px;
|
||||
}
|
||||
|
||||
div.body p, div.body dd, div.body li, div.body blockquote {
|
||||
-moz-hyphens: auto;
|
||||
-ms-hyphens: auto;
|
||||
-webkit-hyphens: auto;
|
||||
hyphens: auto;
|
||||
}
|
||||
|
||||
a.headerlink {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
h1:hover > a.headerlink,
|
||||
h2:hover > a.headerlink,
|
||||
h3:hover > a.headerlink,
|
||||
h4:hover > a.headerlink,
|
||||
h5:hover > a.headerlink,
|
||||
h6:hover > a.headerlink,
|
||||
dt:hover > a.headerlink,
|
||||
caption:hover > a.headerlink,
|
||||
p.caption:hover > a.headerlink,
|
||||
div.code-block-caption:hover > a.headerlink {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
div.body p.caption {
|
||||
text-align: inherit;
|
||||
}
|
||||
|
||||
div.body td {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.first {
|
||||
margin-top: 0 !important;
|
||||
}
|
||||
|
||||
p.rubric {
|
||||
margin-top: 30px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
img.align-left, figure.align-left, .figure.align-left, object.align-left {
|
||||
clear: left;
|
||||
float: left;
|
||||
margin-right: 1em;
|
||||
}
|
||||
|
||||
img.align-right, figure.align-right, .figure.align-right, object.align-right {
|
||||
clear: right;
|
||||
float: right;
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
img.align-center, figure.align-center, .figure.align-center, object.align-center {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
img.align-default, figure.align-default, .figure.align-default {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.align-center {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-default {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
/* -- sidebars -------------------------------------------------------------- */
|
||||
|
||||
div.sidebar,
|
||||
aside.sidebar {
|
||||
margin: 0 0 0.5em 1em;
|
||||
border: 1px solid #ddb;
|
||||
padding: 7px;
|
||||
background-color: #ffe;
|
||||
width: 40%;
|
||||
float: right;
|
||||
clear: right;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
p.sidebar-title {
|
||||
font-weight: bold;
|
||||
}
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.admonition, div.topic, blockquote {
|
||||
clear: left;
|
||||
}
|
||||
|
||||
/* -- topics ---------------------------------------------------------------- */
|
||||
nav.contents,
|
||||
aside.topic,
|
||||
div.topic {
|
||||
border: 1px solid #ccc;
|
||||
padding: 7px;
|
||||
margin: 10px 0 10px 0;
|
||||
}
|
||||
|
||||
p.topic-title {
|
||||
font-size: 1.1em;
|
||||
font-weight: bold;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
/* -- admonitions ----------------------------------------------------------- */
|
||||
|
||||
div.admonition {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
padding: 7px;
|
||||
}
|
||||
|
||||
div.admonition dt {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
p.admonition-title {
|
||||
margin: 0px 10px 5px 0px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
div.body p.centered {
|
||||
text-align: center;
|
||||
margin-top: 25px;
|
||||
}
|
||||
|
||||
/* -- content of sidebars/topics/admonitions -------------------------------- */
|
||||
|
||||
div.sidebar > :last-child,
|
||||
aside.sidebar > :last-child,
|
||||
nav.contents > :last-child,
|
||||
aside.topic > :last-child,
|
||||
div.topic > :last-child,
|
||||
div.admonition > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
div.sidebar::after,
|
||||
aside.sidebar::after,
|
||||
nav.contents::after,
|
||||
aside.topic::after,
|
||||
div.topic::after,
|
||||
div.admonition::after,
|
||||
blockquote::after {
|
||||
display: block;
|
||||
content: '';
|
||||
clear: both;
|
||||
}
|
||||
|
||||
/* -- tables ---------------------------------------------------------------- */
|
||||
|
||||
table.docutils {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
border: 0;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table.align-center {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table.align-default {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table caption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
table caption span.caption-text {
|
||||
}
|
||||
|
||||
table.docutils td, table.docutils th {
|
||||
padding: 1px 8px 1px 5px;
|
||||
border-top: 0;
|
||||
border-left: 0;
|
||||
border-right: 0;
|
||||
border-bottom: 1px solid #aaa;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: left;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
table.citation {
|
||||
border-left: solid 1px gray;
|
||||
margin-left: 1px;
|
||||
}
|
||||
|
||||
table.citation td {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
th > :first-child,
|
||||
td > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
th > :last-child,
|
||||
td > :last-child {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
/* -- figures --------------------------------------------------------------- */
|
||||
|
||||
div.figure, figure {
|
||||
margin: 0.5em;
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
div.figure p.caption, figcaption {
|
||||
padding: 0.3em;
|
||||
}
|
||||
|
||||
div.figure p.caption span.caption-number,
|
||||
figcaption span.caption-number {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
div.figure p.caption span.caption-text,
|
||||
figcaption span.caption-text {
|
||||
}
|
||||
|
||||
/* -- field list styles ----------------------------------------------------- */
|
||||
|
||||
table.field-list td, table.field-list th {
|
||||
border: 0 !important;
|
||||
}
|
||||
|
||||
.field-list ul {
|
||||
margin: 0;
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
.field-list p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.field-name {
|
||||
-moz-hyphens: manual;
|
||||
-ms-hyphens: manual;
|
||||
-webkit-hyphens: manual;
|
||||
hyphens: manual;
|
||||
}
|
||||
|
||||
/* -- hlist styles ---------------------------------------------------------- */
|
||||
|
||||
table.hlist {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
table.hlist td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
/* -- object description styles --------------------------------------------- */
|
||||
|
||||
.sig {
|
||||
font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
|
||||
}
|
||||
|
||||
.sig-name, code.descname {
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.sig-name {
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
code.descname {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
|
||||
.sig-prename, code.descclassname {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.optional {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
|
||||
.sig-paren {
|
||||
font-size: larger;
|
||||
}
|
||||
|
||||
.sig-param.n {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* C++ specific styling */
|
||||
|
||||
.sig-inline.c-texpr,
|
||||
.sig-inline.cpp-texpr {
|
||||
font-family: unset;
|
||||
}
|
||||
|
||||
.sig.c .k, .sig.c .kt,
|
||||
.sig.cpp .k, .sig.cpp .kt {
|
||||
color: #0033B3;
|
||||
}
|
||||
|
||||
.sig.c .m,
|
||||
.sig.cpp .m {
|
||||
color: #1750EB;
|
||||
}
|
||||
|
||||
.sig.c .s, .sig.c .sc,
|
||||
.sig.cpp .s, .sig.cpp .sc {
|
||||
color: #067D17;
|
||||
}
|
||||
|
||||
|
||||
/* -- other body styles ----------------------------------------------------- */
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal;
|
||||
}
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha;
|
||||
}
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha;
|
||||
}
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman;
|
||||
}
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman;
|
||||
}
|
||||
|
||||
:not(li) > ol > li:first-child > :first-child,
|
||||
:not(li) > ul > li:first-child > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
:not(li) > ol > li:last-child > :last-child,
|
||||
:not(li) > ul > li:last-child > :last-child {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
ol.simple ol p,
|
||||
ol.simple ul p,
|
||||
ul.simple ol p,
|
||||
ul.simple ul p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
ol.simple > li:not(:first-child) > p,
|
||||
ul.simple > li:not(:first-child) > p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
ol.simple p,
|
||||
ul.simple p {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
aside.footnote > span,
|
||||
div.citation > span {
|
||||
float: left;
|
||||
}
|
||||
aside.footnote > span:last-of-type,
|
||||
div.citation > span:last-of-type {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
aside.footnote > p {
|
||||
margin-left: 2em;
|
||||
}
|
||||
div.citation > p {
|
||||
margin-left: 4em;
|
||||
}
|
||||
aside.footnote > p:last-of-type,
|
||||
div.citation > p:last-of-type {
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
aside.footnote > p:last-of-type:after,
|
||||
div.citation > p:last-of-type:after {
|
||||
content: "";
|
||||
clear: both;
|
||||
}
|
||||
|
||||
dl.field-list {
|
||||
display: grid;
|
||||
grid-template-columns: fit-content(30%) auto;
|
||||
}
|
||||
|
||||
dl.field-list > dt {
|
||||
font-weight: bold;
|
||||
word-break: break-word;
|
||||
padding-left: 0.5em;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
dl.field-list > dd {
|
||||
padding-left: 0.5em;
|
||||
margin-top: 0em;
|
||||
margin-left: 0em;
|
||||
margin-bottom: 0em;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
dd > :first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
dd ul, dd table {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
dd {
|
||||
margin-top: 3px;
|
||||
margin-bottom: 10px;
|
||||
margin-left: 30px;
|
||||
}
|
||||
|
||||
dl > dd:last-child,
|
||||
dl > dd:last-child > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
dt:target, span.highlighted {
|
||||
background-color: #fbe54e;
|
||||
}
|
||||
|
||||
rect.highlighted {
|
||||
fill: #fbe54e;
|
||||
}
|
||||
|
||||
dl.glossary dt {
|
||||
font-weight: bold;
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
.versionmodified {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.system-message {
|
||||
background-color: #fda;
|
||||
padding: 5px;
|
||||
border: 3px solid red;
|
||||
}
|
||||
|
||||
.footnote:target {
|
||||
background-color: #ffa;
|
||||
}
|
||||
|
||||
.line-block {
|
||||
display: block;
|
||||
margin-top: 1em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.line-block .line-block {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
margin-left: 1.5em;
|
||||
}
|
||||
|
||||
.guilabel, .menuselection {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
.accelerator {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.classifier {
|
||||
font-style: oblique;
|
||||
}
|
||||
|
||||
.classifier:before {
|
||||
font-style: normal;
|
||||
margin: 0 0.5em;
|
||||
content: ":";
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
abbr, acronym {
|
||||
border-bottom: dotted 1px;
|
||||
cursor: help;
|
||||
}
|
||||
|
||||
/* -- code displays --------------------------------------------------------- */
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
overflow-y: hidden; /* fixes display issues on Chrome browsers */
|
||||
}
|
||||
|
||||
pre, div[class*="highlight-"] {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
span.pre {
|
||||
-moz-hyphens: none;
|
||||
-ms-hyphens: none;
|
||||
-webkit-hyphens: none;
|
||||
hyphens: none;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
div[class*="highlight-"] {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
td.linenos pre {
|
||||
border: 0;
|
||||
background-color: transparent;
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
table.highlighttable {
|
||||
display: block;
|
||||
}
|
||||
|
||||
table.highlighttable tbody {
|
||||
display: block;
|
||||
}
|
||||
|
||||
table.highlighttable tr {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
table.highlighttable td {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table.highlighttable td.linenos {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
|
||||
table.highlighttable td.code {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.highlight .hll {
|
||||
display: block;
|
||||
}
|
||||
|
||||
div.highlight pre,
|
||||
table.highlighttable pre {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
div.code-block-caption + div {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
div.code-block-caption {
|
||||
margin-top: 1em;
|
||||
padding: 2px 5px;
|
||||
font-size: small;
|
||||
}
|
||||
|
||||
div.code-block-caption code {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
table.highlighttable td.linenos,
|
||||
span.linenos,
|
||||
div.highlight span.gp { /* gp: Generic.Prompt */
|
||||
user-select: none;
|
||||
-webkit-user-select: text; /* Safari fallback only */
|
||||
-webkit-user-select: none; /* Chrome/Safari */
|
||||
-moz-user-select: none; /* Firefox */
|
||||
-ms-user-select: none; /* IE10+ */
|
||||
}
|
||||
|
||||
div.code-block-caption span.caption-number {
|
||||
padding: 0.1em 0.3em;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
div.code-block-caption span.caption-text {
|
||||
}
|
||||
|
||||
div.literal-block-wrapper {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
code.xref, a code {
|
||||
background-color: transparent;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.viewcode-link {
|
||||
float: right;
|
||||
}
|
||||
|
||||
.viewcode-back {
|
||||
float: right;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
div.viewcode-block:target {
|
||||
margin: -1px -10px;
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
/* -- math display ---------------------------------------------------------- */
|
||||
|
||||
img.math {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
div.body div.math p {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
span.eqno {
|
||||
float: right;
|
||||
}
|
||||
|
||||
span.eqno a.headerlink {
|
||||
position: absolute;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
div.math:hover a.headerlink {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
/* -- printout stylesheet --------------------------------------------------- */
|
||||
|
||||
@media print {
|
||||
div.document,
|
||||
div.documentwrapper,
|
||||
div.bodywrapper {
|
||||
margin: 0 !important;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.sphinxsidebar,
|
||||
div.related,
|
||||
div.footer,
|
||||
#top-link {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
1
core/dbt/docs/build/html/_static/custom.css
vendored
Normal file
1
core/dbt/docs/build/html/_static/custom.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/* This file intentionally left blank. */
|
||||
156
core/dbt/docs/build/html/_static/doctools.js
vendored
Normal file
156
core/dbt/docs/build/html/_static/doctools.js
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
/*
|
||||
* doctools.js
|
||||
* ~~~~~~~~~~~
|
||||
*
|
||||
* Base JavaScript utilities for all Sphinx HTML documentation.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
|
||||
"TEXTAREA",
|
||||
"INPUT",
|
||||
"SELECT",
|
||||
"BUTTON",
|
||||
]);
|
||||
|
||||
const _ready = (callback) => {
|
||||
if (document.readyState !== "loading") {
|
||||
callback();
|
||||
} else {
|
||||
document.addEventListener("DOMContentLoaded", callback);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Small JavaScript module for the documentation.
|
||||
*/
|
||||
const Documentation = {
|
||||
init: () => {
|
||||
Documentation.initDomainIndexTable();
|
||||
Documentation.initOnKeyListeners();
|
||||
},
|
||||
|
||||
/**
|
||||
* i18n support
|
||||
*/
|
||||
TRANSLATIONS: {},
|
||||
PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
|
||||
LOCALE: "unknown",
|
||||
|
||||
// gettext and ngettext don't access this so that the functions
|
||||
// can safely bound to a different name (_ = Documentation.gettext)
|
||||
gettext: (string) => {
|
||||
const translated = Documentation.TRANSLATIONS[string];
|
||||
switch (typeof translated) {
|
||||
case "undefined":
|
||||
return string; // no translation
|
||||
case "string":
|
||||
return translated; // translation exists
|
||||
default:
|
||||
return translated[0]; // (singular, plural) translation tuple exists
|
||||
}
|
||||
},
|
||||
|
||||
ngettext: (singular, plural, n) => {
|
||||
const translated = Documentation.TRANSLATIONS[singular];
|
||||
if (typeof translated !== "undefined")
|
||||
return translated[Documentation.PLURAL_EXPR(n)];
|
||||
return n === 1 ? singular : plural;
|
||||
},
|
||||
|
||||
addTranslations: (catalog) => {
|
||||
Object.assign(Documentation.TRANSLATIONS, catalog.messages);
|
||||
Documentation.PLURAL_EXPR = new Function(
|
||||
"n",
|
||||
`return (${catalog.plural_expr})`
|
||||
);
|
||||
Documentation.LOCALE = catalog.locale;
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to focus on search bar
|
||||
*/
|
||||
focusSearchBar: () => {
|
||||
document.querySelectorAll("input[name=q]")[0]?.focus();
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialise the domain index toggle buttons
|
||||
*/
|
||||
initDomainIndexTable: () => {
|
||||
const toggler = (el) => {
|
||||
const idNumber = el.id.substr(7);
|
||||
const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
|
||||
if (el.src.substr(-9) === "minus.png") {
|
||||
el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
|
||||
toggledRows.forEach((el) => (el.style.display = "none"));
|
||||
} else {
|
||||
el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
|
||||
toggledRows.forEach((el) => (el.style.display = ""));
|
||||
}
|
||||
};
|
||||
|
||||
const togglerElements = document.querySelectorAll("img.toggler");
|
||||
togglerElements.forEach((el) =>
|
||||
el.addEventListener("click", (event) => toggler(event.currentTarget))
|
||||
);
|
||||
togglerElements.forEach((el) => (el.style.display = ""));
|
||||
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
|
||||
},
|
||||
|
||||
initOnKeyListeners: () => {
|
||||
// only install a listener if it is really needed
|
||||
if (
|
||||
!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
|
||||
!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
|
||||
)
|
||||
return;
|
||||
|
||||
document.addEventListener("keydown", (event) => {
|
||||
// bail for input elements
|
||||
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||
// bail with special keys
|
||||
if (event.altKey || event.ctrlKey || event.metaKey) return;
|
||||
|
||||
if (!event.shiftKey) {
|
||||
switch (event.key) {
|
||||
case "ArrowLeft":
|
||||
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||
|
||||
const prevLink = document.querySelector('link[rel="prev"]');
|
||||
if (prevLink && prevLink.href) {
|
||||
window.location.href = prevLink.href;
|
||||
event.preventDefault();
|
||||
}
|
||||
break;
|
||||
case "ArrowRight":
|
||||
if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
|
||||
|
||||
const nextLink = document.querySelector('link[rel="next"]');
|
||||
if (nextLink && nextLink.href) {
|
||||
window.location.href = nextLink.href;
|
||||
event.preventDefault();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// some keyboard layouts may need Shift to get /
|
||||
switch (event.key) {
|
||||
case "/":
|
||||
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
|
||||
Documentation.focusSearchBar();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
// quick alias for translations
|
||||
const _ = Documentation.gettext;
|
||||
|
||||
_ready(Documentation.init);
|
||||
14
core/dbt/docs/build/html/_static/documentation_options.js
vendored
Normal file
14
core/dbt/docs/build/html/_static/documentation_options.js
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
var DOCUMENTATION_OPTIONS = {
|
||||
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
|
||||
VERSION: '',
|
||||
LANGUAGE: 'en',
|
||||
COLLAPSE_INDEX: false,
|
||||
BUILDER: 'html',
|
||||
FILE_SUFFIX: '.html',
|
||||
LINK_SUFFIX: '.html',
|
||||
HAS_SOURCE: true,
|
||||
SOURCELINK_SUFFIX: '.txt',
|
||||
NAVIGATION_WITH_KEYS: false,
|
||||
SHOW_SEARCH_SUMMARY: true,
|
||||
ENABLE_SEARCH_SHORTCUTS: true,
|
||||
};
|
||||
BIN
core/dbt/docs/build/html/_static/file.png
vendored
Normal file
BIN
core/dbt/docs/build/html/_static/file.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 286 B |
10881
core/dbt/docs/build/html/_static/jquery-3.6.0.js
vendored
Normal file
10881
core/dbt/docs/build/html/_static/jquery-3.6.0.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
core/dbt/docs/build/html/_static/jquery.js
vendored
Normal file
2
core/dbt/docs/build/html/_static/jquery.js
vendored
Normal file
File diff suppressed because one or more lines are too long
199
core/dbt/docs/build/html/_static/language_data.js
vendored
Normal file
199
core/dbt/docs/build/html/_static/language_data.js
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
/*
|
||||
* language_data.js
|
||||
* ~~~~~~~~~~~~~~~~
|
||||
*
|
||||
* This script contains the language-specific data used by searchtools.js,
|
||||
* namely the list of stopwords, stemmer, scorer and splitter.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
|
||||
var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
|
||||
|
||||
|
||||
/* Non-minified version is copied as a separate JS file, is available */
|
||||
|
||||
/**
|
||||
* Porter Stemmer
|
||||
*/
|
||||
var Stemmer = function() {
|
||||
|
||||
var step2list = {
|
||||
ational: 'ate',
|
||||
tional: 'tion',
|
||||
enci: 'ence',
|
||||
anci: 'ance',
|
||||
izer: 'ize',
|
||||
bli: 'ble',
|
||||
alli: 'al',
|
||||
entli: 'ent',
|
||||
eli: 'e',
|
||||
ousli: 'ous',
|
||||
ization: 'ize',
|
||||
ation: 'ate',
|
||||
ator: 'ate',
|
||||
alism: 'al',
|
||||
iveness: 'ive',
|
||||
fulness: 'ful',
|
||||
ousness: 'ous',
|
||||
aliti: 'al',
|
||||
iviti: 'ive',
|
||||
biliti: 'ble',
|
||||
logi: 'log'
|
||||
};
|
||||
|
||||
var step3list = {
|
||||
icate: 'ic',
|
||||
ative: '',
|
||||
alize: 'al',
|
||||
iciti: 'ic',
|
||||
ical: 'ic',
|
||||
ful: '',
|
||||
ness: ''
|
||||
};
|
||||
|
||||
var c = "[^aeiou]"; // consonant
|
||||
var v = "[aeiouy]"; // vowel
|
||||
var C = c + "[^aeiouy]*"; // consonant sequence
|
||||
var V = v + "[aeiou]*"; // vowel sequence
|
||||
|
||||
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
|
||||
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
|
||||
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
|
||||
var s_v = "^(" + C + ")?" + v; // vowel in stem
|
||||
|
||||
this.stemWord = function (w) {
|
||||
var stem;
|
||||
var suffix;
|
||||
var firstch;
|
||||
var origword = w;
|
||||
|
||||
if (w.length < 3)
|
||||
return w;
|
||||
|
||||
var re;
|
||||
var re2;
|
||||
var re3;
|
||||
var re4;
|
||||
|
||||
firstch = w.substr(0,1);
|
||||
if (firstch == "y")
|
||||
w = firstch.toUpperCase() + w.substr(1);
|
||||
|
||||
// Step 1a
|
||||
re = /^(.+?)(ss|i)es$/;
|
||||
re2 = /^(.+?)([^s])s$/;
|
||||
|
||||
if (re.test(w))
|
||||
w = w.replace(re,"$1$2");
|
||||
else if (re2.test(w))
|
||||
w = w.replace(re2,"$1$2");
|
||||
|
||||
// Step 1b
|
||||
re = /^(.+?)eed$/;
|
||||
re2 = /^(.+?)(ed|ing)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(fp[1])) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
}
|
||||
else if (re2.test(w)) {
|
||||
var fp = re2.exec(w);
|
||||
stem = fp[1];
|
||||
re2 = new RegExp(s_v);
|
||||
if (re2.test(stem)) {
|
||||
w = stem;
|
||||
re2 = /(at|bl|iz)$/;
|
||||
re3 = new RegExp("([^aeiouylsz])\\1$");
|
||||
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||
if (re2.test(w))
|
||||
w = w + "e";
|
||||
else if (re3.test(w)) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
else if (re4.test(w))
|
||||
w = w + "e";
|
||||
}
|
||||
}
|
||||
|
||||
// Step 1c
|
||||
re = /^(.+?)y$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(s_v);
|
||||
if (re.test(stem))
|
||||
w = stem + "i";
|
||||
}
|
||||
|
||||
// Step 2
|
||||
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
suffix = fp[2];
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(stem))
|
||||
w = stem + step2list[suffix];
|
||||
}
|
||||
|
||||
// Step 3
|
||||
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
suffix = fp[2];
|
||||
re = new RegExp(mgr0);
|
||||
if (re.test(stem))
|
||||
w = stem + step3list[suffix];
|
||||
}
|
||||
|
||||
// Step 4
|
||||
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
|
||||
re2 = /^(.+?)(s|t)(ion)$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(mgr1);
|
||||
if (re.test(stem))
|
||||
w = stem;
|
||||
}
|
||||
else if (re2.test(w)) {
|
||||
var fp = re2.exec(w);
|
||||
stem = fp[1] + fp[2];
|
||||
re2 = new RegExp(mgr1);
|
||||
if (re2.test(stem))
|
||||
w = stem;
|
||||
}
|
||||
|
||||
// Step 5
|
||||
re = /^(.+?)e$/;
|
||||
if (re.test(w)) {
|
||||
var fp = re.exec(w);
|
||||
stem = fp[1];
|
||||
re = new RegExp(mgr1);
|
||||
re2 = new RegExp(meq1);
|
||||
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
|
||||
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
|
||||
w = stem;
|
||||
}
|
||||
re = /ll$/;
|
||||
re2 = new RegExp(mgr1);
|
||||
if (re.test(w) && re2.test(w)) {
|
||||
re = /.$/;
|
||||
w = w.replace(re,"");
|
||||
}
|
||||
|
||||
// and turn initial Y back to y
|
||||
if (firstch == "y")
|
||||
w = firstch.toLowerCase() + w.substr(1);
|
||||
return w;
|
||||
}
|
||||
}
|
||||
|
||||
BIN
core/dbt/docs/build/html/_static/minus.png
vendored
Normal file
BIN
core/dbt/docs/build/html/_static/minus.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 B |
BIN
core/dbt/docs/build/html/_static/plus.png
vendored
Normal file
BIN
core/dbt/docs/build/html/_static/plus.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 B |
83
core/dbt/docs/build/html/_static/pygments.css
vendored
Normal file
83
core/dbt/docs/build/html/_static/pygments.css
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
pre { line-height: 125%; }
|
||||
td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
.highlight .hll { background-color: #ffffcc }
|
||||
.highlight { background: #f8f8f8; }
|
||||
.highlight .c { color: #8f5902; font-style: italic } /* Comment */
|
||||
.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */
|
||||
.highlight .g { color: #000000 } /* Generic */
|
||||
.highlight .k { color: #004461; font-weight: bold } /* Keyword */
|
||||
.highlight .l { color: #000000 } /* Literal */
|
||||
.highlight .n { color: #000000 } /* Name */
|
||||
.highlight .o { color: #582800 } /* Operator */
|
||||
.highlight .x { color: #000000 } /* Other */
|
||||
.highlight .p { color: #000000; font-weight: bold } /* Punctuation */
|
||||
.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */
|
||||
.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */
|
||||
.highlight .cp { color: #8f5902 } /* Comment.Preproc */
|
||||
.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */
|
||||
.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */
|
||||
.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */
|
||||
.highlight .gd { color: #a40000 } /* Generic.Deleted */
|
||||
.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */
|
||||
.highlight .gr { color: #ef2929 } /* Generic.Error */
|
||||
.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||
.highlight .gi { color: #00A000 } /* Generic.Inserted */
|
||||
.highlight .go { color: #888888 } /* Generic.Output */
|
||||
.highlight .gp { color: #745334 } /* Generic.Prompt */
|
||||
.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */
|
||||
.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||
.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */
|
||||
.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */
|
||||
.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */
|
||||
.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */
|
||||
.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */
|
||||
.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */
|
||||
.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */
|
||||
.highlight .ld { color: #000000 } /* Literal.Date */
|
||||
.highlight .m { color: #990000 } /* Literal.Number */
|
||||
.highlight .s { color: #4e9a06 } /* Literal.String */
|
||||
.highlight .na { color: #c4a000 } /* Name.Attribute */
|
||||
.highlight .nb { color: #004461 } /* Name.Builtin */
|
||||
.highlight .nc { color: #000000 } /* Name.Class */
|
||||
.highlight .no { color: #000000 } /* Name.Constant */
|
||||
.highlight .nd { color: #888888 } /* Name.Decorator */
|
||||
.highlight .ni { color: #ce5c00 } /* Name.Entity */
|
||||
.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */
|
||||
.highlight .nf { color: #000000 } /* Name.Function */
|
||||
.highlight .nl { color: #f57900 } /* Name.Label */
|
||||
.highlight .nn { color: #000000 } /* Name.Namespace */
|
||||
.highlight .nx { color: #000000 } /* Name.Other */
|
||||
.highlight .py { color: #000000 } /* Name.Property */
|
||||
.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */
|
||||
.highlight .nv { color: #000000 } /* Name.Variable */
|
||||
.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */
|
||||
.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */
|
||||
.highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */
|
||||
.highlight .mb { color: #990000 } /* Literal.Number.Bin */
|
||||
.highlight .mf { color: #990000 } /* Literal.Number.Float */
|
||||
.highlight .mh { color: #990000 } /* Literal.Number.Hex */
|
||||
.highlight .mi { color: #990000 } /* Literal.Number.Integer */
|
||||
.highlight .mo { color: #990000 } /* Literal.Number.Oct */
|
||||
.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */
|
||||
.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */
|
||||
.highlight .sc { color: #4e9a06 } /* Literal.String.Char */
|
||||
.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */
|
||||
.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */
|
||||
.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */
|
||||
.highlight .se { color: #4e9a06 } /* Literal.String.Escape */
|
||||
.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */
|
||||
.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */
|
||||
.highlight .sx { color: #4e9a06 } /* Literal.String.Other */
|
||||
.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */
|
||||
.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */
|
||||
.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */
|
||||
.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */
|
||||
.highlight .fm { color: #000000 } /* Name.Function.Magic */
|
||||
.highlight .vc { color: #000000 } /* Name.Variable.Class */
|
||||
.highlight .vg { color: #000000 } /* Name.Variable.Global */
|
||||
.highlight .vi { color: #000000 } /* Name.Variable.Instance */
|
||||
.highlight .vm { color: #000000 } /* Name.Variable.Magic */
|
||||
.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */
|
||||
566
core/dbt/docs/build/html/_static/searchtools.js
vendored
Normal file
566
core/dbt/docs/build/html/_static/searchtools.js
vendored
Normal file
@@ -0,0 +1,566 @@
|
||||
/*
|
||||
* searchtools.js
|
||||
* ~~~~~~~~~~~~~~~~
|
||||
*
|
||||
* Sphinx JavaScript utilities for the full-text search.
|
||||
*
|
||||
* :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS.
|
||||
* :license: BSD, see LICENSE for details.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Simple result scoring code.
|
||||
*/
|
||||
if (typeof Scorer === "undefined") {
|
||||
var Scorer = {
|
||||
// Implement the following function to further tweak the score for each result
|
||||
// The function takes a result array [docname, title, anchor, descr, score, filename]
|
||||
// and returns the new score.
|
||||
/*
|
||||
score: result => {
|
||||
const [docname, title, anchor, descr, score, filename] = result
|
||||
return score
|
||||
},
|
||||
*/
|
||||
|
||||
// query matches the full name of an object
|
||||
objNameMatch: 11,
|
||||
// or matches in the last dotted part of the object name
|
||||
objPartialMatch: 6,
|
||||
// Additive scores depending on the priority of the object
|
||||
objPrio: {
|
||||
0: 15, // used to be importantResults
|
||||
1: 5, // used to be objectResults
|
||||
2: -5, // used to be unimportantResults
|
||||
},
|
||||
// Used when the priority is not in the mapping.
|
||||
objPrioDefault: 0,
|
||||
|
||||
// query found in title
|
||||
title: 15,
|
||||
partialTitle: 7,
|
||||
// query found in terms
|
||||
term: 5,
|
||||
partialTerm: 2,
|
||||
};
|
||||
}
|
||||
|
||||
const _removeChildren = (element) => {
|
||||
while (element && element.lastChild) element.removeChild(element.lastChild);
|
||||
};
|
||||
|
||||
/**
|
||||
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
|
||||
*/
|
||||
const _escapeRegExp = (string) =>
|
||||
string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
|
||||
const _displayItem = (item, searchTerms) => {
|
||||
const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
|
||||
const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT;
|
||||
const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
|
||||
const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
|
||||
const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
|
||||
|
||||
const [docName, title, anchor, descr, score, _filename] = item;
|
||||
|
||||
let listItem = document.createElement("li");
|
||||
let requestUrl;
|
||||
let linkUrl;
|
||||
if (docBuilder === "dirhtml") {
|
||||
// dirhtml builder
|
||||
let dirname = docName + "/";
|
||||
if (dirname.match(/\/index\/$/))
|
||||
dirname = dirname.substring(0, dirname.length - 6);
|
||||
else if (dirname === "index/") dirname = "";
|
||||
requestUrl = docUrlRoot + dirname;
|
||||
linkUrl = requestUrl;
|
||||
} else {
|
||||
// normal html builders
|
||||
requestUrl = docUrlRoot + docName + docFileSuffix;
|
||||
linkUrl = docName + docLinkSuffix;
|
||||
}
|
||||
let linkEl = listItem.appendChild(document.createElement("a"));
|
||||
linkEl.href = linkUrl + anchor;
|
||||
linkEl.dataset.score = score;
|
||||
linkEl.innerHTML = title;
|
||||
if (descr)
|
||||
listItem.appendChild(document.createElement("span")).innerHTML =
|
||||
" (" + descr + ")";
|
||||
else if (showSearchSummary)
|
||||
fetch(requestUrl)
|
||||
.then((responseData) => responseData.text())
|
||||
.then((data) => {
|
||||
if (data)
|
||||
listItem.appendChild(
|
||||
Search.makeSearchSummary(data, searchTerms)
|
||||
);
|
||||
});
|
||||
Search.output.appendChild(listItem);
|
||||
};
|
||||
const _finishSearch = (resultCount) => {
|
||||
Search.stopPulse();
|
||||
Search.title.innerText = _("Search Results");
|
||||
if (!resultCount)
|
||||
Search.status.innerText = Documentation.gettext(
|
||||
"Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||
);
|
||||
else
|
||||
Search.status.innerText = _(
|
||||
`Search finished, found ${resultCount} page(s) matching the search query.`
|
||||
);
|
||||
};
|
||||
const _displayNextItem = (
|
||||
results,
|
||||
resultCount,
|
||||
searchTerms
|
||||
) => {
|
||||
// results left, load the summary and display it
|
||||
// this is intended to be dynamic (don't sub resultsCount)
|
||||
if (results.length) {
|
||||
_displayItem(results.pop(), searchTerms);
|
||||
setTimeout(
|
||||
() => _displayNextItem(results, resultCount, searchTerms),
|
||||
5
|
||||
);
|
||||
}
|
||||
// search finished, update title and status message
|
||||
else _finishSearch(resultCount);
|
||||
};
|
||||
|
||||
/**
|
||||
* Default splitQuery function. Can be overridden in ``sphinx.search`` with a
|
||||
* custom function per language.
|
||||
*
|
||||
* The regular expression works by splitting the string on consecutive characters
|
||||
* that are not Unicode letters, numbers, underscores, or emoji characters.
|
||||
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
|
||||
*/
|
||||
if (typeof splitQuery === "undefined") {
|
||||
var splitQuery = (query) => query
|
||||
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
|
||||
.filter(term => term) // remove remaining empty strings
|
||||
}
|
||||
|
||||
/**
|
||||
* Search Module
|
||||
*/
|
||||
const Search = {
|
||||
_index: null,
|
||||
_queued_query: null,
|
||||
_pulse_status: -1,
|
||||
|
||||
htmlToText: (htmlString) => {
|
||||
const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
|
||||
htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() });
|
||||
const docContent = htmlElement.querySelector('[role="main"]');
|
||||
if (docContent !== undefined) return docContent.textContent;
|
||||
console.warn(
|
||||
"Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template."
|
||||
);
|
||||
return "";
|
||||
},
|
||||
|
||||
init: () => {
|
||||
const query = new URLSearchParams(window.location.search).get("q");
|
||||
document
|
||||
.querySelectorAll('input[name="q"]')
|
||||
.forEach((el) => (el.value = query));
|
||||
if (query) Search.performSearch(query);
|
||||
},
|
||||
|
||||
loadIndex: (url) =>
|
||||
(document.body.appendChild(document.createElement("script")).src = url),
|
||||
|
||||
setIndex: (index) => {
|
||||
Search._index = index;
|
||||
if (Search._queued_query !== null) {
|
||||
const query = Search._queued_query;
|
||||
Search._queued_query = null;
|
||||
Search.query(query);
|
||||
}
|
||||
},
|
||||
|
||||
hasIndex: () => Search._index !== null,
|
||||
|
||||
deferQuery: (query) => (Search._queued_query = query),
|
||||
|
||||
stopPulse: () => (Search._pulse_status = -1),
|
||||
|
||||
startPulse: () => {
|
||||
if (Search._pulse_status >= 0) return;
|
||||
|
||||
const pulse = () => {
|
||||
Search._pulse_status = (Search._pulse_status + 1) % 4;
|
||||
Search.dots.innerText = ".".repeat(Search._pulse_status);
|
||||
if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
|
||||
};
|
||||
pulse();
|
||||
},
|
||||
|
||||
/**
|
||||
* perform a search for something (or wait until index is loaded)
|
||||
*/
|
||||
performSearch: (query) => {
|
||||
// create the required interface elements
|
||||
const searchText = document.createElement("h2");
|
||||
searchText.textContent = _("Searching");
|
||||
const searchSummary = document.createElement("p");
|
||||
searchSummary.classList.add("search-summary");
|
||||
searchSummary.innerText = "";
|
||||
const searchList = document.createElement("ul");
|
||||
searchList.classList.add("search");
|
||||
|
||||
const out = document.getElementById("search-results");
|
||||
Search.title = out.appendChild(searchText);
|
||||
Search.dots = Search.title.appendChild(document.createElement("span"));
|
||||
Search.status = out.appendChild(searchSummary);
|
||||
Search.output = out.appendChild(searchList);
|
||||
|
||||
const searchProgress = document.getElementById("search-progress");
|
||||
// Some themes don't use the search progress node
|
||||
if (searchProgress) {
|
||||
searchProgress.innerText = _("Preparing search...");
|
||||
}
|
||||
Search.startPulse();
|
||||
|
||||
// index already loaded, the browser was quick!
|
||||
if (Search.hasIndex()) Search.query(query);
|
||||
else Search.deferQuery(query);
|
||||
},
|
||||
|
||||
/**
|
||||
* execute search (requires search index to be loaded)
|
||||
*/
|
||||
query: (query) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
const allTitles = Search._index.alltitles;
|
||||
const indexEntries = Search._index.indexentries;
|
||||
|
||||
// stem the search terms and add them to the correct list
|
||||
const stemmer = new Stemmer();
|
||||
const searchTerms = new Set();
|
||||
const excludedTerms = new Set();
|
||||
const highlightTerms = new Set();
|
||||
const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
|
||||
splitQuery(query.trim()).forEach((queryTerm) => {
|
||||
const queryTermLower = queryTerm.toLowerCase();
|
||||
|
||||
// maybe skip this "word"
|
||||
// stopwords array is from language_data.js
|
||||
if (
|
||||
stopwords.indexOf(queryTermLower) !== -1 ||
|
||||
queryTerm.match(/^\d+$/)
|
||||
)
|
||||
return;
|
||||
|
||||
// stem the word
|
||||
let word = stemmer.stemWord(queryTermLower);
|
||||
// select the correct list
|
||||
if (word[0] === "-") excludedTerms.add(word.substr(1));
|
||||
else {
|
||||
searchTerms.add(word);
|
||||
highlightTerms.add(queryTermLower);
|
||||
}
|
||||
});
|
||||
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
|
||||
localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
|
||||
}
|
||||
|
||||
// console.debug("SEARCH: searching for:");
|
||||
// console.info("required: ", [...searchTerms]);
|
||||
// console.info("excluded: ", [...excludedTerms]);
|
||||
|
||||
// array of [docname, title, anchor, descr, score, filename]
|
||||
let results = [];
|
||||
_removeChildren(document.getElementById("search-progress"));
|
||||
|
||||
const queryLower = query.toLowerCase();
|
||||
for (const [title, foundTitles] of Object.entries(allTitles)) {
|
||||
if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) {
|
||||
for (const [file, id] of foundTitles) {
|
||||
let score = Math.round(100 * queryLower.length / title.length)
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file] !== title ? `${titles[file]} > ${title}` : title,
|
||||
id !== null ? "#" + id : "",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// search for explicit entries in index directives
|
||||
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
|
||||
if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
|
||||
for (const [file, id] of foundEntries) {
|
||||
let score = Math.round(100 * queryLower.length / entry.length)
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file],
|
||||
id ? "#" + id : "",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// lookup as object
|
||||
objectTerms.forEach((term) =>
|
||||
results.push(...Search.performObjectSearch(term, objectTerms))
|
||||
);
|
||||
|
||||
// lookup as search terms in fulltext
|
||||
results.push(...Search.performTermsSearch(searchTerms, excludedTerms));
|
||||
|
||||
// let the scorer override scores with a custom scoring function
|
||||
if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item)));
|
||||
|
||||
// now sort the results by score (in opposite order of appearance, since the
|
||||
// display function below uses pop() to retrieve items) and then
|
||||
// alphabetically
|
||||
results.sort((a, b) => {
|
||||
const leftScore = a[4];
|
||||
const rightScore = b[4];
|
||||
if (leftScore === rightScore) {
|
||||
// same score: sort alphabetically
|
||||
const leftTitle = a[1].toLowerCase();
|
||||
const rightTitle = b[1].toLowerCase();
|
||||
if (leftTitle === rightTitle) return 0;
|
||||
return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
|
||||
}
|
||||
return leftScore > rightScore ? 1 : -1;
|
||||
});
|
||||
|
||||
// remove duplicate search results
|
||||
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
|
||||
let seen = new Set();
|
||||
results = results.reverse().reduce((acc, result) => {
|
||||
let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
|
||||
if (!seen.has(resultStr)) {
|
||||
acc.push(result);
|
||||
seen.add(resultStr);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
results = results.reverse();
|
||||
|
||||
// for debugging
|
||||
//Search.lastresults = results.slice(); // a copy
|
||||
// console.info("search results:", Search.lastresults);
|
||||
|
||||
// print the results
|
||||
_displayNextItem(results, results.length, searchTerms);
|
||||
},
|
||||
|
||||
/**
|
||||
* search for object names
|
||||
*/
|
||||
performObjectSearch: (object, objectTerms) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const objects = Search._index.objects;
|
||||
const objNames = Search._index.objnames;
|
||||
const titles = Search._index.titles;
|
||||
|
||||
const results = [];
|
||||
|
||||
const objectSearchCallback = (prefix, match) => {
|
||||
const name = match[4]
|
||||
const fullname = (prefix ? prefix + "." : "") + name;
|
||||
const fullnameLower = fullname.toLowerCase();
|
||||
if (fullnameLower.indexOf(object) < 0) return;
|
||||
|
||||
let score = 0;
|
||||
const parts = fullnameLower.split(".");
|
||||
|
||||
// check for different match types: exact matches of full name or
|
||||
// "last name" (i.e. last dotted part)
|
||||
if (fullnameLower === object || parts.slice(-1)[0] === object)
|
||||
score += Scorer.objNameMatch;
|
||||
else if (parts.slice(-1)[0].indexOf(object) > -1)
|
||||
score += Scorer.objPartialMatch; // matches in last name
|
||||
|
||||
const objName = objNames[match[1]][2];
|
||||
const title = titles[match[0]];
|
||||
|
||||
// If more than one term searched for, we require other words to be
|
||||
// found in the name/title/description
|
||||
const otherTerms = new Set(objectTerms);
|
||||
otherTerms.delete(object);
|
||||
if (otherTerms.size > 0) {
|
||||
const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
|
||||
if (
|
||||
[...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
|
||||
)
|
||||
return;
|
||||
}
|
||||
|
||||
let anchor = match[3];
|
||||
if (anchor === "") anchor = fullname;
|
||||
else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
|
||||
|
||||
const descr = objName + _(", in ") + title;
|
||||
|
||||
// add custom score for some objects according to scorer
|
||||
if (Scorer.objPrio.hasOwnProperty(match[2]))
|
||||
score += Scorer.objPrio[match[2]];
|
||||
else score += Scorer.objPrioDefault;
|
||||
|
||||
results.push([
|
||||
docNames[match[0]],
|
||||
fullname,
|
||||
"#" + anchor,
|
||||
descr,
|
||||
score,
|
||||
filenames[match[0]],
|
||||
]);
|
||||
};
|
||||
Object.keys(objects).forEach((prefix) =>
|
||||
objects[prefix].forEach((array) =>
|
||||
objectSearchCallback(prefix, array)
|
||||
)
|
||||
);
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* search for full-text terms in the index
|
||||
*/
|
||||
performTermsSearch: (searchTerms, excludedTerms) => {
|
||||
// prepare search
|
||||
const terms = Search._index.terms;
|
||||
const titleTerms = Search._index.titleterms;
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
|
||||
const scoreMap = new Map();
|
||||
const fileMap = new Map();
|
||||
|
||||
// perform the search on the required terms
|
||||
searchTerms.forEach((word) => {
|
||||
const files = [];
|
||||
const arr = [
|
||||
{ files: terms[word], score: Scorer.term },
|
||||
{ files: titleTerms[word], score: Scorer.title },
|
||||
];
|
||||
// add support for partial matches
|
||||
if (word.length > 2) {
|
||||
const escapedWord = _escapeRegExp(word);
|
||||
Object.keys(terms).forEach((term) => {
|
||||
if (term.match(escapedWord) && !terms[word])
|
||||
arr.push({ files: terms[term], score: Scorer.partialTerm });
|
||||
});
|
||||
Object.keys(titleTerms).forEach((term) => {
|
||||
if (term.match(escapedWord) && !titleTerms[word])
|
||||
arr.push({ files: titleTerms[word], score: Scorer.partialTitle });
|
||||
});
|
||||
}
|
||||
|
||||
// no match but word was a required one
|
||||
if (arr.every((record) => record.files === undefined)) return;
|
||||
|
||||
// found search word in contents
|
||||
arr.forEach((record) => {
|
||||
if (record.files === undefined) return;
|
||||
|
||||
let recordFiles = record.files;
|
||||
if (recordFiles.length === undefined) recordFiles = [recordFiles];
|
||||
files.push(...recordFiles);
|
||||
|
||||
// set score for the word in each file
|
||||
recordFiles.forEach((file) => {
|
||||
if (!scoreMap.has(file)) scoreMap.set(file, {});
|
||||
scoreMap.get(file)[word] = record.score;
|
||||
});
|
||||
});
|
||||
|
||||
// create the mapping
|
||||
files.forEach((file) => {
|
||||
if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1)
|
||||
fileMap.get(file).push(word);
|
||||
else fileMap.set(file, [word]);
|
||||
});
|
||||
});
|
||||
|
||||
// now check if the files don't contain excluded terms
|
||||
const results = [];
|
||||
for (const [file, wordList] of fileMap) {
|
||||
// check if all requirements are matched
|
||||
|
||||
// as search terms with length < 3 are discarded
|
||||
const filteredTermCount = [...searchTerms].filter(
|
||||
(term) => term.length > 2
|
||||
).length;
|
||||
if (
|
||||
wordList.length !== searchTerms.size &&
|
||||
wordList.length !== filteredTermCount
|
||||
)
|
||||
continue;
|
||||
|
||||
// ensure that none of the excluded terms is in the search result
|
||||
if (
|
||||
[...excludedTerms].some(
|
||||
(term) =>
|
||||
terms[term] === file ||
|
||||
titleTerms[term] === file ||
|
||||
(terms[term] || []).includes(file) ||
|
||||
(titleTerms[term] || []).includes(file)
|
||||
)
|
||||
)
|
||||
break;
|
||||
|
||||
// select one (max) score for the file.
|
||||
const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w]));
|
||||
// add result to the result list
|
||||
results.push([
|
||||
docNames[file],
|
||||
titles[file],
|
||||
"",
|
||||
null,
|
||||
score,
|
||||
filenames[file],
|
||||
]);
|
||||
}
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to return a node containing the
|
||||
* search summary for a given text. keywords is a list
|
||||
* of stemmed words.
|
||||
*/
|
||||
makeSearchSummary: (htmlText, keywords) => {
|
||||
const text = Search.htmlToText(htmlText);
|
||||
if (text === "") return null;
|
||||
|
||||
const textLower = text.toLowerCase();
|
||||
const actualStartPosition = [...keywords]
|
||||
.map((k) => textLower.indexOf(k.toLowerCase()))
|
||||
.filter((i) => i > -1)
|
||||
.slice(-1)[0];
|
||||
const startWithContext = Math.max(actualStartPosition - 120, 0);
|
||||
|
||||
const top = startWithContext === 0 ? "" : "...";
|
||||
const tail = startWithContext + 240 < text.length ? "..." : "";
|
||||
|
||||
let summary = document.createElement("p");
|
||||
summary.classList.add("context");
|
||||
summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
|
||||
|
||||
return summary;
|
||||
},
|
||||
};
|
||||
|
||||
_ready(Search.init);
|
||||
144
core/dbt/docs/build/html/_static/sphinx_highlight.js
vendored
Normal file
144
core/dbt/docs/build/html/_static/sphinx_highlight.js
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
/* Highlighting utilities for Sphinx HTML documentation. */
|
||||
"use strict";
|
||||
|
||||
const SPHINX_HIGHLIGHT_ENABLED = true
|
||||
|
||||
/**
|
||||
* highlight a given string on a node by wrapping it in
|
||||
* span elements with the given class name.
|
||||
*/
|
||||
const _highlight = (node, addItems, text, className) => {
|
||||
if (node.nodeType === Node.TEXT_NODE) {
|
||||
const val = node.nodeValue;
|
||||
const parent = node.parentNode;
|
||||
const pos = val.toLowerCase().indexOf(text);
|
||||
if (
|
||||
pos >= 0 &&
|
||||
!parent.classList.contains(className) &&
|
||||
!parent.classList.contains("nohighlight")
|
||||
) {
|
||||
let span;
|
||||
|
||||
const closestNode = parent.closest("body, svg, foreignObject");
|
||||
const isInSVG = closestNode && closestNode.matches("svg");
|
||||
if (isInSVG) {
|
||||
span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
|
||||
} else {
|
||||
span = document.createElement("span");
|
||||
span.classList.add(className);
|
||||
}
|
||||
|
||||
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
|
||||
parent.insertBefore(
|
||||
span,
|
||||
parent.insertBefore(
|
||||
document.createTextNode(val.substr(pos + text.length)),
|
||||
node.nextSibling
|
||||
)
|
||||
);
|
||||
node.nodeValue = val.substr(0, pos);
|
||||
|
||||
if (isInSVG) {
|
||||
const rect = document.createElementNS(
|
||||
"http://www.w3.org/2000/svg",
|
||||
"rect"
|
||||
);
|
||||
const bbox = parent.getBBox();
|
||||
rect.x.baseVal.value = bbox.x;
|
||||
rect.y.baseVal.value = bbox.y;
|
||||
rect.width.baseVal.value = bbox.width;
|
||||
rect.height.baseVal.value = bbox.height;
|
||||
rect.setAttribute("class", className);
|
||||
addItems.push({ parent: parent, target: rect });
|
||||
}
|
||||
}
|
||||
} else if (node.matches && !node.matches("button, select, textarea")) {
|
||||
node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
|
||||
}
|
||||
};
|
||||
const _highlightText = (thisNode, text, className) => {
|
||||
let addItems = [];
|
||||
_highlight(thisNode, addItems, text, className);
|
||||
addItems.forEach((obj) =>
|
||||
obj.parent.insertAdjacentElement("beforebegin", obj.target)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Small JavaScript module for the documentation.
|
||||
*/
|
||||
const SphinxHighlight = {
|
||||
|
||||
/**
|
||||
* highlight the search words provided in localstorage in the text
|
||||
*/
|
||||
highlightSearchWords: () => {
|
||||
if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
|
||||
|
||||
// get and clear terms from localstorage
|
||||
const url = new URL(window.location);
|
||||
const highlight =
|
||||
localStorage.getItem("sphinx_highlight_terms")
|
||||
|| url.searchParams.get("highlight")
|
||||
|| "";
|
||||
localStorage.removeItem("sphinx_highlight_terms")
|
||||
url.searchParams.delete("highlight");
|
||||
window.history.replaceState({}, "", url);
|
||||
|
||||
// get individual terms from highlight string
|
||||
const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
|
||||
if (terms.length === 0) return; // nothing to do
|
||||
|
||||
// There should never be more than one element matching "div.body"
|
||||
const divBody = document.querySelectorAll("div.body");
|
||||
const body = divBody.length ? divBody[0] : document.querySelector("body");
|
||||
window.setTimeout(() => {
|
||||
terms.forEach((term) => _highlightText(body, term, "highlighted"));
|
||||
}, 10);
|
||||
|
||||
const searchBox = document.getElementById("searchbox");
|
||||
if (searchBox === null) return;
|
||||
searchBox.appendChild(
|
||||
document
|
||||
.createRange()
|
||||
.createContextualFragment(
|
||||
'<p class="highlight-link">' +
|
||||
'<a href="javascript:SphinxHighlight.hideSearchWords()">' +
|
||||
_("Hide Search Matches") +
|
||||
"</a></p>"
|
||||
)
|
||||
);
|
||||
},
|
||||
|
||||
/**
|
||||
* helper function to hide the search marks again
|
||||
*/
|
||||
hideSearchWords: () => {
|
||||
document
|
||||
.querySelectorAll("#searchbox .highlight-link")
|
||||
.forEach((el) => el.remove());
|
||||
document
|
||||
.querySelectorAll("span.highlighted")
|
||||
.forEach((el) => el.classList.remove("highlighted"));
|
||||
localStorage.removeItem("sphinx_highlight_terms")
|
||||
},
|
||||
|
||||
initEscapeListener: () => {
|
||||
// only install a listener if it is really needed
|
||||
if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
|
||||
|
||||
document.addEventListener("keydown", (event) => {
|
||||
// bail for input elements
|
||||
if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
|
||||
// bail with special keys
|
||||
if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
|
||||
if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
|
||||
SphinxHighlight.hideSearchWords();
|
||||
event.preventDefault();
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
_ready(SphinxHighlight.highlightSearchWords);
|
||||
_ready(SphinxHighlight.initEscapeListener);
|
||||
2042
core/dbt/docs/build/html/_static/underscore-1.13.1.js
vendored
Normal file
2042
core/dbt/docs/build/html/_static/underscore-1.13.1.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6
core/dbt/docs/build/html/_static/underscore.js
vendored
Normal file
6
core/dbt/docs/build/html/_static/underscore.js
vendored
Normal file
File diff suppressed because one or more lines are too long
102
core/dbt/docs/build/html/genindex.html
vendored
Normal file
102
core/dbt/docs/build/html/genindex.html
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Index — dbt-core documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
<script src="_static/jquery.js"></script>
|
||||
<script src="_static/underscore.js"></script>
|
||||
<script src="_static/_sphinx_javascript_frameworks_compat.js"></script>
|
||||
<script src="_static/doctools.js"></script>
|
||||
<script src="_static/sphinx_highlight.js"></script>
|
||||
<link rel="index" title="Index" href="#" />
|
||||
<link rel="search" title="Search" href="search.html" />
|
||||
|
||||
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
|
||||
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
|
||||
|
||||
</head><body>
|
||||
|
||||
|
||||
<div class="document">
|
||||
<div class="documentwrapper">
|
||||
<div class="bodywrapper">
|
||||
|
||||
|
||||
<div class="body" role="main">
|
||||
|
||||
|
||||
<h1 id="index">Index</h1>
|
||||
|
||||
<div class="genindex-jumpbox">
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
|
||||
<div class="sphinxsidebarwrapper">
|
||||
<h1 class="logo"><a href="index.html">dbt-core</a></h1>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<h3>Navigation</h3>
|
||||
|
||||
<div class="relations">
|
||||
<h3>Related Topics</h3>
|
||||
<ul>
|
||||
<li><a href="index.html">Documentation overview</a><ul>
|
||||
</ul></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div id="searchbox" style="display: none" role="search">
|
||||
<h3 id="searchlabel">Quick search</h3>
|
||||
<div class="searchformwrapper">
|
||||
<form class="search" action="search.html" method="get">
|
||||
<input type="text" name="q" aria-labelledby="searchlabel" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"/>
|
||||
<input type="submit" value="Go" />
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<script>document.getElementById('searchbox').style.display = "block"</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
<div class="footer">
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 5.3.0</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
855
core/dbt/docs/build/html/index.html
vendored
Normal file
855
core/dbt/docs/build/html/index.html
vendored
Normal file
@@ -0,0 +1,855 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.19: https://docutils.sourceforge.io/" />
|
||||
|
||||
<title>dbt-core’s API documentation — dbt-core documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
<script src="_static/jquery.js"></script>
|
||||
<script src="_static/underscore.js"></script>
|
||||
<script src="_static/_sphinx_javascript_frameworks_compat.js"></script>
|
||||
<script src="_static/doctools.js"></script>
|
||||
<script src="_static/sphinx_highlight.js"></script>
|
||||
<link rel="index" title="Index" href="genindex.html" />
|
||||
<link rel="search" title="Search" href="search.html" />
|
||||
|
||||
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
|
||||
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
|
||||
|
||||
</head><body>
|
||||
|
||||
|
||||
<div class="document">
|
||||
<div class="documentwrapper">
|
||||
<div class="bodywrapper">
|
||||
|
||||
|
||||
<div class="body" role="main">
|
||||
|
||||
<section id="dbt-core-s-api-documentation">
|
||||
<h1>dbt-core’s API documentation<a class="headerlink" href="#dbt-core-s-api-documentation" title="Permalink to this heading">¶</a></h1>
|
||||
<section id="dbt-section">
|
||||
<h2>Command: build<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="build|defer">
|
||||
<h3>defer<a class="headerlink" href="#build|defer" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If set, defer to the state variable for resolving unselected nodes.</p>
|
||||
</section>
|
||||
<section id="build|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#build|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="build|fail_fast">
|
||||
<h3>fail_fast<a class="headerlink" href="#build|fail_fast" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Stop execution on first failure.</p>
|
||||
</section>
|
||||
<section id="build|full_refresh">
|
||||
<h3>full_refresh<a class="headerlink" href="#build|full_refresh" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If specified, dbt will drop incremental models and fully-recalculate the incremental table from the model definition.</p>
|
||||
</section>
|
||||
<section id="build|indirect_selection">
|
||||
<h3>indirect_selection<a class="headerlink" href="#build|indirect_selection" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: choice: [‘eager’, ‘cautious’]</p>
|
||||
<p>Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.</p>
|
||||
</section>
|
||||
<section id="build|log_path">
|
||||
<h3>log_path<a class="headerlink" href="#build|log_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘log-path’. Only applies this setting for the current run. Overrides the ‘DBT_LOG_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="build|models">
|
||||
<h3>models<a class="headerlink" href="#build|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="build|profile">
|
||||
<h3>profile<a class="headerlink" href="#build|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="build|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#build|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="build|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#build|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="build|selector">
|
||||
<h3>selector<a class="headerlink" href="#build|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="build|show">
|
||||
<h3>show<a class="headerlink" href="#build|show" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Show a sample of the loaded data in the terminal</p>
|
||||
</section>
|
||||
<section id="build|state">
|
||||
<h3>state<a class="headerlink" href="#build|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="build|store_failures">
|
||||
<h3>store_failures<a class="headerlink" href="#build|store_failures" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Store test results (failing rows) in the database</p>
|
||||
</section>
|
||||
<section id="build|target">
|
||||
<h3>target<a class="headerlink" href="#build|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="build|target_path">
|
||||
<h3>target_path<a class="headerlink" href="#build|target_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘target-path’. Only applies this setting for the current run. Overrides the ‘DBT_TARGET_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="build|threads">
|
||||
<h3>threads<a class="headerlink" href="#build|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="build|vars">
|
||||
<h3>vars<a class="headerlink" href="#build|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="build|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#build|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
<h2>Command: clean<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="clean|profile">
|
||||
<h3>profile<a class="headerlink" href="#clean|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="clean|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#clean|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="clean|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#clean|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="clean|target">
|
||||
<h3>target<a class="headerlink" href="#clean|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="clean|vars">
|
||||
<h3>vars<a class="headerlink" href="#clean|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<h2>Command: compile<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="compile|defer">
|
||||
<h3>defer<a class="headerlink" href="#compile|defer" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If set, defer to the state variable for resolving unselected nodes.</p>
|
||||
</section>
|
||||
<section id="compile|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#compile|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="compile|full_refresh">
|
||||
<h3>full_refresh<a class="headerlink" href="#compile|full_refresh" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If specified, dbt will drop incremental models and fully-recalculate the incremental table from the model definition.</p>
|
||||
</section>
|
||||
<section id="compile|log_path">
|
||||
<h3>log_path<a class="headerlink" href="#compile|log_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘log-path’. Only applies this setting for the current run. Overrides the ‘DBT_LOG_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="compile|models">
|
||||
<h3>models<a class="headerlink" href="#compile|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="compile|parse_only">
|
||||
<h3>parse_only<a class="headerlink" href="#compile|parse_only" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>TODO: No help text currently available</p>
|
||||
</section>
|
||||
<section id="compile|profile">
|
||||
<h3>profile<a class="headerlink" href="#compile|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="compile|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#compile|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="compile|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#compile|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="compile|selector">
|
||||
<h3>selector<a class="headerlink" href="#compile|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="compile|state">
|
||||
<h3>state<a class="headerlink" href="#compile|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="compile|target">
|
||||
<h3>target<a class="headerlink" href="#compile|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="compile|target_path">
|
||||
<h3>target_path<a class="headerlink" href="#compile|target_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘target-path’. Only applies this setting for the current run. Overrides the ‘DBT_TARGET_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="compile|threads">
|
||||
<h3>threads<a class="headerlink" href="#compile|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="compile|vars">
|
||||
<h3>vars<a class="headerlink" href="#compile|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="compile|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#compile|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
<h2>Command: debug<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="debug|config_dir">
|
||||
<h3>config_dir<a class="headerlink" href="#debug|config_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>If specified, DBT will show path information for this project</p>
|
||||
</section>
|
||||
<section id="debug|profile">
|
||||
<h3>profile<a class="headerlink" href="#debug|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="debug|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#debug|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="debug|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#debug|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="debug|target">
|
||||
<h3>target<a class="headerlink" href="#debug|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="debug|vars">
|
||||
<h3>vars<a class="headerlink" href="#debug|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="debug|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#debug|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
<h2>Command: deps<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="deps|profile">
|
||||
<h3>profile<a class="headerlink" href="#deps|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="deps|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#deps|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="deps|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#deps|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="deps|target">
|
||||
<h3>target<a class="headerlink" href="#deps|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="deps|vars">
|
||||
<h3>vars<a class="headerlink" href="#deps|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<h2>Command: docs<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<h2>Command: init<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="init|profile">
|
||||
<h3>profile<a class="headerlink" href="#init|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="init|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#init|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="init|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#init|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="init|skip_profile_setup">
|
||||
<h3>skip_profile_setup<a class="headerlink" href="#init|skip_profile_setup" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Skip interative profile setup.</p>
|
||||
</section>
|
||||
<section id="init|target">
|
||||
<h3>target<a class="headerlink" href="#init|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="init|vars">
|
||||
<h3>vars<a class="headerlink" href="#init|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<h2>Command: list<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="list|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#list|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="list|indirect_selection">
|
||||
<h3>indirect_selection<a class="headerlink" href="#list|indirect_selection" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: choice: [‘eager’, ‘cautious’]</p>
|
||||
<p>Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.</p>
|
||||
</section>
|
||||
<section id="list|models">
|
||||
<h3>models<a class="headerlink" href="#list|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="list|output">
|
||||
<h3>output<a class="headerlink" href="#list|output" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: choice: [‘json’, ‘name’, ‘path’, ‘selector’]</p>
|
||||
<p>TODO: No current help text</p>
|
||||
</section>
|
||||
<section id="list|output_keys">
|
||||
<h3>output_keys<a class="headerlink" href="#list|output_keys" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>TODO: No current help text</p>
|
||||
</section>
|
||||
<section id="list|profile">
|
||||
<h3>profile<a class="headerlink" href="#list|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="list|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#list|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="list|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#list|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="list|resource_type">
|
||||
<h3>resource_type<a class="headerlink" href="#list|resource_type" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: choice: [‘metric’, ‘source’, ‘analysis’, ‘model’, ‘test’, ‘exposure’, ‘snapshot’, ‘seed’, ‘default’, ‘all’]</p>
|
||||
<p>TODO: No current help text</p>
|
||||
</section>
|
||||
<section id="list|selector">
|
||||
<h3>selector<a class="headerlink" href="#list|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="list|state">
|
||||
<h3>state<a class="headerlink" href="#list|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="list|target">
|
||||
<h3>target<a class="headerlink" href="#list|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="list|vars">
|
||||
<h3>vars<a class="headerlink" href="#list|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<h2>Command: parse<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="parse|compile">
|
||||
<h3>compile<a class="headerlink" href="#parse|compile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>TODO: No help text currently available</p>
|
||||
</section>
|
||||
<section id="parse|log_path">
|
||||
<h3>log_path<a class="headerlink" href="#parse|log_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘log-path’. Only applies this setting for the current run. Overrides the ‘DBT_LOG_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="parse|profile">
|
||||
<h3>profile<a class="headerlink" href="#parse|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="parse|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#parse|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="parse|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#parse|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="parse|target">
|
||||
<h3>target<a class="headerlink" href="#parse|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="parse|target_path">
|
||||
<h3>target_path<a class="headerlink" href="#parse|target_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘target-path’. Only applies this setting for the current run. Overrides the ‘DBT_TARGET_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="parse|threads">
|
||||
<h3>threads<a class="headerlink" href="#parse|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="parse|vars">
|
||||
<h3>vars<a class="headerlink" href="#parse|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="parse|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#parse|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
<section id="parse|write_manifest">
|
||||
<h3>write_manifest<a class="headerlink" href="#parse|write_manifest" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>TODO: No help text currently available</p>
|
||||
</section>
|
||||
<h2>Command: run<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="run|defer">
|
||||
<h3>defer<a class="headerlink" href="#run|defer" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If set, defer to the state variable for resolving unselected nodes.</p>
|
||||
</section>
|
||||
<section id="run|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#run|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="run|fail_fast">
|
||||
<h3>fail_fast<a class="headerlink" href="#run|fail_fast" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Stop execution on first failure.</p>
|
||||
</section>
|
||||
<section id="run|full_refresh">
|
||||
<h3>full_refresh<a class="headerlink" href="#run|full_refresh" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If specified, dbt will drop incremental models and fully-recalculate the incremental table from the model definition.</p>
|
||||
</section>
|
||||
<section id="run|log_path">
|
||||
<h3>log_path<a class="headerlink" href="#run|log_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘log-path’. Only applies this setting for the current run. Overrides the ‘DBT_LOG_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="run|models">
|
||||
<h3>models<a class="headerlink" href="#run|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="run|profile">
|
||||
<h3>profile<a class="headerlink" href="#run|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="run|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#run|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="run|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#run|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="run|selector">
|
||||
<h3>selector<a class="headerlink" href="#run|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="run|state">
|
||||
<h3>state<a class="headerlink" href="#run|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="run|target">
|
||||
<h3>target<a class="headerlink" href="#run|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="run|target_path">
|
||||
<h3>target_path<a class="headerlink" href="#run|target_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘target-path’. Only applies this setting for the current run. Overrides the ‘DBT_TARGET_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="run|threads">
|
||||
<h3>threads<a class="headerlink" href="#run|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="run|vars">
|
||||
<h3>vars<a class="headerlink" href="#run|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="run|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#run|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
<h2>Command: run_operation<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="run-operation|args">
|
||||
<h3>args<a class="headerlink" href="#run-operation|args" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply arguments to the macro. This dictionary will be mapped to the keyword arguments defined in the selected macro. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="run-operation|profile">
|
||||
<h3>profile<a class="headerlink" href="#run-operation|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="run-operation|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#run-operation|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="run-operation|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#run-operation|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="run-operation|target">
|
||||
<h3>target<a class="headerlink" href="#run-operation|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="run-operation|vars">
|
||||
<h3>vars<a class="headerlink" href="#run-operation|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<h2>Command: seed<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="seed|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#seed|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="seed|full_refresh">
|
||||
<h3>full_refresh<a class="headerlink" href="#seed|full_refresh" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If specified, dbt will drop incremental models and fully-recalculate the incremental table from the model definition.</p>
|
||||
</section>
|
||||
<section id="seed|log_path">
|
||||
<h3>log_path<a class="headerlink" href="#seed|log_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘log-path’. Only applies this setting for the current run. Overrides the ‘DBT_LOG_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="seed|models">
|
||||
<h3>models<a class="headerlink" href="#seed|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="seed|profile">
|
||||
<h3>profile<a class="headerlink" href="#seed|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="seed|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#seed|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="seed|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#seed|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="seed|selector">
|
||||
<h3>selector<a class="headerlink" href="#seed|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="seed|show">
|
||||
<h3>show<a class="headerlink" href="#seed|show" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Show a sample of the loaded data in the terminal</p>
|
||||
</section>
|
||||
<section id="seed|state">
|
||||
<h3>state<a class="headerlink" href="#seed|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="seed|target">
|
||||
<h3>target<a class="headerlink" href="#seed|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="seed|target_path">
|
||||
<h3>target_path<a class="headerlink" href="#seed|target_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘target-path’. Only applies this setting for the current run. Overrides the ‘DBT_TARGET_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="seed|threads">
|
||||
<h3>threads<a class="headerlink" href="#seed|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="seed|vars">
|
||||
<h3>vars<a class="headerlink" href="#seed|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="seed|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#seed|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
<h2>Command: snapshot<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="snapshot|defer">
|
||||
<h3>defer<a class="headerlink" href="#snapshot|defer" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If set, defer to the state variable for resolving unselected nodes.</p>
|
||||
</section>
|
||||
<section id="snapshot|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#snapshot|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="snapshot|models">
|
||||
<h3>models<a class="headerlink" href="#snapshot|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="snapshot|profile">
|
||||
<h3>profile<a class="headerlink" href="#snapshot|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="snapshot|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#snapshot|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="snapshot|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#snapshot|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="snapshot|selector">
|
||||
<h3>selector<a class="headerlink" href="#snapshot|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="snapshot|state">
|
||||
<h3>state<a class="headerlink" href="#snapshot|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="snapshot|target">
|
||||
<h3>target<a class="headerlink" href="#snapshot|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="snapshot|threads">
|
||||
<h3>threads<a class="headerlink" href="#snapshot|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="snapshot|vars">
|
||||
<h3>vars<a class="headerlink" href="#snapshot|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<h2>Command: source<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<h2>Command: test<a class="headerlink" href="#dbt-section" title="Permalink to this heading">¶</a></h2>
|
||||
<section id="test|defer">
|
||||
<h3>defer<a class="headerlink" href="#test|defer" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>If set, defer to the state variable for resolving unselected nodes.</p>
|
||||
</section>
|
||||
<section id="test|exclude">
|
||||
<h3>exclude<a class="headerlink" href="#test|exclude" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to exclude.</p>
|
||||
</section>
|
||||
<section id="test|fail_fast">
|
||||
<h3>fail_fast<a class="headerlink" href="#test|fail_fast" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Stop execution on first failure.</p>
|
||||
</section>
|
||||
<section id="test|indirect_selection">
|
||||
<h3>indirect_selection<a class="headerlink" href="#test|indirect_selection" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: choice: [‘eager’, ‘cautious’]</p>
|
||||
<p>Select all tests that are adjacent to selected resources, even if they those resources have been explicitly selected.</p>
|
||||
</section>
|
||||
<section id="test|log_path">
|
||||
<h3>log_path<a class="headerlink" href="#test|log_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘log-path’. Only applies this setting for the current run. Overrides the ‘DBT_LOG_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="test|models">
|
||||
<h3>models<a class="headerlink" href="#test|models" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Specify the nodes to include.</p>
|
||||
</section>
|
||||
<section id="test|profile">
|
||||
<h3>profile<a class="headerlink" href="#test|profile" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which profile to load. Overrides setting in dbt_project.yml.</p>
|
||||
</section>
|
||||
<section id="test|profiles_dir">
|
||||
<h3>profiles_dir<a class="headerlink" href="#test|profiles_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the profiles.yml file. If not set, dbt will look in the current working directory first, then HOME/.dbt/</p>
|
||||
</section>
|
||||
<section id="test|project_dir">
|
||||
<h3>project_dir<a class="headerlink" href="#test|project_dir" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.</p>
|
||||
</section>
|
||||
<section id="test|selector">
|
||||
<h3>selector<a class="headerlink" href="#test|selector" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>The selector name to use, as defined in selectors.yml</p>
|
||||
</section>
|
||||
<section id="test|state">
|
||||
<h3>state<a class="headerlink" href="#test|state" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>If set, use the given directory as the source for json files to compare with this project.</p>
|
||||
</section>
|
||||
<section id="test|store_failures">
|
||||
<h3>store_failures<a class="headerlink" href="#test|store_failures" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Store test results (failing rows) in the database</p>
|
||||
</section>
|
||||
<section id="test|target">
|
||||
<h3>target<a class="headerlink" href="#test|target" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: string</p>
|
||||
<p>Which target to load for the given profile</p>
|
||||
</section>
|
||||
<section id="test|target_path">
|
||||
<h3>target_path<a class="headerlink" href="#test|target_path" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: path</p>
|
||||
<p>Configure the ‘target-path’. Only applies this setting for the current run. Overrides the ‘DBT_TARGET_PATH’ if it is set.</p>
|
||||
</section>
|
||||
<section id="test|threads">
|
||||
<h3>threads<a class="headerlink" href="#test|threads" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: int</p>
|
||||
<p>Specify number of threads to use while executing models. Overrides settings in profiles.yml.</p>
|
||||
</section>
|
||||
<section id="test|vars">
|
||||
<h3>vars<a class="headerlink" href="#test|vars" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: YAML</p>
|
||||
<p>Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. ‘{my_variable: my_value}’</p>
|
||||
</section>
|
||||
<section id="test|version_check">
|
||||
<h3>version_check<a class="headerlink" href="#test|version_check" title="Permalink to this heading">¶</a></h3>
|
||||
<p>Type: boolean</p>
|
||||
<p>Ensure dbt’s version matches the one specified in the dbt_project.yml file (‘require-dbt-version’)</p>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
|
||||
<div class="sphinxsidebarwrapper">
|
||||
<h1 class="logo"><a href="#">dbt-core</a></h1>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<h3>Navigation</h3>
|
||||
|
||||
<div class="relations">
|
||||
<h3>Related Topics</h3>
|
||||
<ul>
|
||||
<li><a href="#">Documentation overview</a><ul>
|
||||
</ul></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div id="searchbox" style="display: none" role="search">
|
||||
<h3 id="searchlabel">Quick search</h3>
|
||||
<div class="searchformwrapper">
|
||||
<form class="search" action="search.html" method="get">
|
||||
<input type="text" name="q" aria-labelledby="searchlabel" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"/>
|
||||
<input type="submit" value="Go" />
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<script>document.getElementById('searchbox').style.display = "block"</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
<div class="footer">
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 5.3.0</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
|
||||
|
|
||||
<a href="_sources/index.rst.txt"
|
||||
rel="nofollow">Page source</a>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
BIN
core/dbt/docs/build/html/objects.inv
vendored
Normal file
BIN
core/dbt/docs/build/html/objects.inv
vendored
Normal file
Binary file not shown.
121
core/dbt/docs/build/html/search.html
vendored
Normal file
121
core/dbt/docs/build/html/search.html
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Search — dbt-core documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="_static/pygments.css" />
|
||||
<link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
|
||||
|
||||
<script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
|
||||
<script src="_static/jquery.js"></script>
|
||||
<script src="_static/underscore.js"></script>
|
||||
<script src="_static/_sphinx_javascript_frameworks_compat.js"></script>
|
||||
<script src="_static/doctools.js"></script>
|
||||
<script src="_static/sphinx_highlight.js"></script>
|
||||
<script src="_static/searchtools.js"></script>
|
||||
<script src="_static/language_data.js"></script>
|
||||
<link rel="index" title="Index" href="genindex.html" />
|
||||
<link rel="search" title="Search" href="#" />
|
||||
<script src="searchindex.js" defer></script>
|
||||
|
||||
|
||||
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
|
||||
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
|
||||
|
||||
|
||||
</head><body>
|
||||
|
||||
|
||||
<div class="document">
|
||||
<div class="documentwrapper">
|
||||
<div class="bodywrapper">
|
||||
|
||||
|
||||
<div class="body" role="main">
|
||||
|
||||
<h1 id="search-documentation">Search</h1>
|
||||
|
||||
<noscript>
|
||||
<div class="admonition warning">
|
||||
<p>
|
||||
Please activate JavaScript to enable the search
|
||||
functionality.
|
||||
</p>
|
||||
</div>
|
||||
</noscript>
|
||||
|
||||
|
||||
<p>
|
||||
Searching for multiple words only shows matches that contain
|
||||
all words.
|
||||
</p>
|
||||
|
||||
|
||||
<form action="" method="get">
|
||||
<input type="text" name="q" aria-labelledby="search-documentation" value="" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"/>
|
||||
<input type="submit" value="search" />
|
||||
<span id="search-progress" style="padding-left: 10px"></span>
|
||||
</form>
|
||||
|
||||
|
||||
|
||||
<div id="search-results">
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
|
||||
<div class="sphinxsidebarwrapper">
|
||||
<h1 class="logo"><a href="index.html">dbt-core</a></h1>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<h3>Navigation</h3>
|
||||
|
||||
<div class="relations">
|
||||
<h3>Related Topics</h3>
|
||||
<ul>
|
||||
<li><a href="index.html">Documentation overview</a><ul>
|
||||
</ul></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
<div class="footer">
|
||||
©2022, dbt Labs.
|
||||
|
||||
|
|
||||
Powered by <a href="http://sphinx-doc.org/">Sphinx 5.3.0</a>
|
||||
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
1
core/dbt/docs/build/html/searchindex.js
vendored
Normal file
1
core/dbt/docs/build/html/searchindex.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -7,7 +7,7 @@ import typing as t
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
sys.path.insert(0, os.path.abspath("../.."))
|
||||
sys.path.insert(0, os.path.abspath("../../.."))
|
||||
sys.path.insert(0, os.path.abspath("./_ext"))
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
@@ -8,9 +8,10 @@ The event module provides types that represent what is happening in dbt in `even
|
||||
When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions.
|
||||
|
||||
# Adding a New Event
|
||||
New events need to have a proto message definition created in core/dbt/events/types.proto. Every message must include EventInfo as the first field, named "info" and numbered 1. To update the proto_types.py file, in the core/dbt/events directory: ```protoc --python_betterproto_out . types.proto```
|
||||
|
||||
A matching class needs to be created in the core/dbt/events/types.py file, which will have two superclasses, the "Level" mixin and the generated class from proto_types.py. These classes will also generally have two methods, a "code" method that returns the event code, and a "message" method that is used to construct the "msg" from the event fields. In addition the "Level" mixin will provide a "level_tag" method to set the level (which can also be overridden using the "info" convenience function from functions.py)
|
||||
* Add a new message in types.proto with an EventInfo field first
|
||||
* run the protoc compiler to update proto_types.py: ```protoc --python_betterproto_out . types.proto```
|
||||
* Add a wrapping class in core/dbt/event/types.py with a Level superclass and the superclass from proto_types.py, plus code and message methods
|
||||
* Add the class to tests/unit/test_events.py
|
||||
|
||||
Note that no attributes can exist in these event classes except for fields defined in the protobuf definitions, because the betterproto metaclass will throw an error. Betterproto provides a to_dict() method to convert the generated classes to a dictionary and from that to json. However some attributes will successfully convert to dictionaries but not to serialized protobufs, so we need to test both output formats.
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from dbt.events.types import (
|
||||
)
|
||||
|
||||
|
||||
# N.B. No guarantees for what type param msg is.
|
||||
@dataclass
|
||||
class AdapterLogger:
|
||||
name: str
|
||||
|
||||
@@ -49,7 +49,9 @@ class BaseEvent:
|
||||
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
self.info.level = self.level_tag()
|
||||
if not self.info.level:
|
||||
self.info.level = self.level_tag()
|
||||
assert self.info.level in ["info", "warn", "error", "debug", "test"]
|
||||
if not hasattr(self.info, "msg") or not self.info.msg:
|
||||
self.info.msg = self.message()
|
||||
self.info.invocation_id = get_invocation_id()
|
||||
@@ -60,13 +62,25 @@ class BaseEvent:
|
||||
self.info.code = self.code()
|
||||
self.info.name = type(self).__name__
|
||||
|
||||
def level_tag(self):
|
||||
raise Exception("level_tag() not implemented for event")
|
||||
def level_tag(self) -> str:
|
||||
return "debug"
|
||||
|
||||
# This is here because although we know that info should always
|
||||
# exist, mypy doesn't.
|
||||
def log_level(self) -> str:
|
||||
return self.info.level # type: ignore
|
||||
|
||||
def message(self):
|
||||
raise Exception("message() not implemented for event")
|
||||
|
||||
|
||||
# DynamicLevel requires that the level be supplied on the
|
||||
# event construction call using the "info" function from functions.py
|
||||
@dataclass # type: ignore[misc]
|
||||
class DynamicLevel(BaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestLevel(BaseEvent):
|
||||
__test__ = False
|
||||
@@ -99,6 +113,23 @@ class ErrorLevel(BaseEvent):
|
||||
return "error"
|
||||
|
||||
|
||||
# Included to ensure classes with str-type message members are initialized correctly.
|
||||
@dataclass # type: ignore[misc]
|
||||
class AdapterEventStringFunctor:
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
if not isinstance(self.base_msg, str):
|
||||
self.base_msg = str(self.base_msg)
|
||||
|
||||
|
||||
@dataclass # type: ignore[misc]
|
||||
class EventStringFunctor:
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
if not isinstance(self.msg, str):
|
||||
self.msg = str(self.msg)
|
||||
|
||||
|
||||
# prevents an event from going to the file
|
||||
# This should rarely be used in core code. It is currently
|
||||
# only used in integration tests and for the 'clean' command.
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import betterproto
|
||||
from colorama import Style
|
||||
|
||||
from dbt.events.base_types import NoStdOut, BaseEvent, NoFile, Cache
|
||||
from dbt.events.types import EventBufferFull, MainReportVersion, EmptyLine
|
||||
from dbt.events.proto_types import EventInfo
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
import dbt.flags as flags
|
||||
from dbt.constants import SECRET_ENV_PREFIX, METADATA_ENV_PREFIX
|
||||
|
||||
from dbt.constants import METADATA_ENV_PREFIX
|
||||
|
||||
from dbt.logger import make_log_dir_if_missing, GLOBAL_LOGGER
|
||||
from datetime import datetime
|
||||
@@ -18,7 +22,8 @@ from logging.handlers import RotatingFileHandler
|
||||
import os
|
||||
import uuid
|
||||
import threading
|
||||
from typing import List, Optional, Union, Callable, Dict
|
||||
from typing import Optional, Union, Callable, Dict
|
||||
|
||||
from collections import deque
|
||||
|
||||
LOG_VERSION = 3
|
||||
@@ -108,19 +113,6 @@ def stop_capture_stdout_logs() -> None:
|
||||
]
|
||||
|
||||
|
||||
def env_secrets() -> List[str]:
|
||||
return [v for k, v in os.environ.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()]
|
||||
|
||||
|
||||
def scrub_secrets(msg: str, secrets: List[str]) -> str:
|
||||
scrubbed = msg
|
||||
|
||||
for secret in secrets:
|
||||
scrubbed = scrubbed.replace(secret, "*****")
|
||||
|
||||
return scrubbed
|
||||
|
||||
|
||||
# returns a dictionary representation of the event fields.
|
||||
# the message may contain secrets which must be scrubbed at the usage site.
|
||||
def event_to_json(
|
||||
@@ -168,7 +160,7 @@ def create_debug_text_log_line(e: BaseEvent) -> str:
|
||||
ts: str = get_ts().strftime("%H:%M:%S.%f")
|
||||
scrubbed_msg: str = scrub_secrets(e.message(), env_secrets())
|
||||
# Make the levels all 5 characters so they line up
|
||||
level: str = f"{e.level_tag():<5}"
|
||||
level: str = f"{e.log_level():<5}"
|
||||
thread = ""
|
||||
if threading.current_thread().name:
|
||||
thread_name = threading.current_thread().name
|
||||
@@ -200,26 +192,35 @@ def create_log_line(e: BaseEvent, file_output=False) -> Optional[str]:
|
||||
|
||||
# allows for reuse of this obnoxious if else tree.
|
||||
# do not use for exceptions, it doesn't pass along exc_info, stack_info, or extra
|
||||
def send_to_logger(l: Union[Logger, logbook.Logger], level_tag: str, log_line: str):
|
||||
def send_to_logger(l: Union[Logger, logbook.Logger], level: str, log_line: str):
|
||||
if not log_line:
|
||||
return
|
||||
if level_tag == "test":
|
||||
if level == "test":
|
||||
# TODO after implmenting #3977 send to new test level
|
||||
l.debug(log_line)
|
||||
elif level_tag == "debug":
|
||||
elif level == "debug":
|
||||
l.debug(log_line)
|
||||
elif level_tag == "info":
|
||||
elif level == "info":
|
||||
l.info(log_line)
|
||||
elif level_tag == "warn":
|
||||
elif level == "warn":
|
||||
l.warning(log_line)
|
||||
elif level_tag == "error":
|
||||
elif level == "error":
|
||||
l.error(log_line)
|
||||
else:
|
||||
raise AssertionError(
|
||||
f"While attempting to log {log_line}, encountered the unhandled level: {level_tag}"
|
||||
f"While attempting to log {log_line}, encountered the unhandled level: {level}"
|
||||
)
|
||||
|
||||
|
||||
def warn_or_error(event, node=None):
|
||||
if flags.WARN_ERROR:
|
||||
from dbt.exceptions import raise_compiler_error
|
||||
|
||||
raise_compiler_error(scrub_secrets(event.info.msg, env_secrets()), node)
|
||||
else:
|
||||
fire_event(event)
|
||||
|
||||
|
||||
# an alternative to fire_event which only creates and logs the event value
|
||||
# if the condition is met. Does nothing otherwise.
|
||||
def fire_event_if(conditional: bool, lazy_e: Callable[[], BaseEvent]) -> None:
|
||||
@@ -244,7 +245,7 @@ def fire_event(e: BaseEvent) -> None:
|
||||
# destination
|
||||
log_line = create_log_line(e)
|
||||
if log_line:
|
||||
send_to_logger(GLOBAL_LOGGER, e.level_tag(), log_line)
|
||||
send_to_logger(GLOBAL_LOGGER, level=e.log_level(), log_line=log_line)
|
||||
return # exit the function to avoid using the current logger as well
|
||||
|
||||
# always logs debug level regardless of user input
|
||||
@@ -252,19 +253,19 @@ def fire_event(e: BaseEvent) -> None:
|
||||
log_line = create_log_line(e, file_output=True)
|
||||
# doesn't send exceptions to exception logger
|
||||
if log_line:
|
||||
send_to_logger(FILE_LOG, level_tag=e.level_tag(), log_line=log_line)
|
||||
send_to_logger(FILE_LOG, level=e.log_level(), log_line=log_line)
|
||||
|
||||
if not isinstance(e, NoStdOut):
|
||||
# explicitly checking the debug flag here so that potentially expensive-to-construct
|
||||
# log messages are not constructed if debug messages are never shown.
|
||||
if e.level_tag() == "debug" and not flags.DEBUG:
|
||||
if e.log_level() == "debug" and not flags.DEBUG:
|
||||
return # eat the message in case it was one of the expensive ones
|
||||
if e.level_tag() != "error" and flags.QUIET:
|
||||
if e.log_level() != "error" and flags.QUIET:
|
||||
return # eat all non-exception messages in quiet mode
|
||||
|
||||
log_line = create_log_line(e)
|
||||
if log_line:
|
||||
send_to_logger(STDOUT_LOG, level_tag=e.level_tag(), log_line=log_line)
|
||||
send_to_logger(STDOUT_LOG, level=e.log_level(), log_line=log_line)
|
||||
|
||||
|
||||
def get_metadata_vars() -> Dict[str, str]:
|
||||
@@ -325,3 +326,11 @@ def add_to_event_history(event):
|
||||
def reset_event_history():
|
||||
global EVENT_HISTORY
|
||||
EVENT_HISTORY = deque(maxlen=flags.EVENT_BUFFER_SIZE)
|
||||
|
||||
|
||||
# Currently used to set the level in EventInfo, so logging events can
|
||||
# provide more than one "level". Might be used in the future to set
|
||||
# more fields in EventInfo, once some of that information is no longer global
|
||||
def info(level="info"):
|
||||
info = EventInfo(level=level)
|
||||
return info
|
||||
|
||||
16
core/dbt/events/helpers.py
Normal file
16
core/dbt/events/helpers.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import os
|
||||
from typing import List
|
||||
from dbt.constants import SECRET_ENV_PREFIX
|
||||
|
||||
|
||||
def env_secrets() -> List[str]:
|
||||
return [v for k, v in os.environ.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()]
|
||||
|
||||
|
||||
def scrub_secrets(msg: str, secrets: List[str]) -> str:
|
||||
scrubbed = msg
|
||||
|
||||
for secret in secrets:
|
||||
scrubbed = scrubbed.replace(secret, "*****")
|
||||
|
||||
return scrubbed
|
||||
@@ -23,6 +23,7 @@ class EventInfo(betterproto.Message):
|
||||
extra: Dict[str, str] = betterproto.map_field(
|
||||
9, betterproto.TYPE_STRING, betterproto.TYPE_STRING
|
||||
)
|
||||
category: str = betterproto.string_field(10)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -52,7 +53,6 @@ class NodeInfo(betterproto.Message):
|
||||
class RunResultMsg(betterproto.Message):
|
||||
"""RunResult"""
|
||||
|
||||
# status: Union[RunStatus, TestStatus, FreshnessStatus]
|
||||
status: str = betterproto.string_field(1)
|
||||
message: str = betterproto.string_field(2)
|
||||
timing_info: List["TimingInfoMsg"] = betterproto.message_field(3)
|
||||
@@ -281,6 +281,65 @@ class ProjectCreated(betterproto.Message):
|
||||
slack_url: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageRedirectDeprecation(betterproto.Message):
|
||||
"""D001"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
old_name: str = betterproto.string_field(2)
|
||||
new_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageInstallPathDeprecation(betterproto.Message):
|
||||
"""D002"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigSourcePathDeprecation(betterproto.Message):
|
||||
"""D003"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
deprecated_path: str = betterproto.string_field(2)
|
||||
exp_path: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfigDataPathDeprecation(betterproto.Message):
|
||||
"""D004"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
deprecated_path: str = betterproto.string_field(2)
|
||||
exp_path: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterDeprecationWarning(betterproto.Message):
|
||||
"""D005"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
old_name: str = betterproto.string_field(2)
|
||||
new_name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricAttributesRenamed(betterproto.Message):
|
||||
"""D006"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
metric_name: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExposureNameDeprecation(betterproto.Message):
|
||||
"""D007"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
exposure: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterEventDebug(betterproto.Message):
|
||||
"""E001"""
|
||||
@@ -340,7 +399,7 @@ class ConnectionReused(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectionLeftOpen(betterproto.Message):
|
||||
class ConnectionLeftOpenInCleanup(betterproto.Message):
|
||||
"""E007"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -348,7 +407,7 @@ class ConnectionLeftOpen(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectionClosed(betterproto.Message):
|
||||
class ConnectionClosedInCleanup(betterproto.Message):
|
||||
"""E008"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -365,7 +424,7 @@ class RollbackFailed(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectionClosed2(betterproto.Message):
|
||||
class ConnectionClosed(betterproto.Message):
|
||||
"""E010"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -373,7 +432,7 @@ class ConnectionClosed2(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectionLeftOpen2(betterproto.Message):
|
||||
class ConnectionLeftOpen(betterproto.Message):
|
||||
"""E011"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -629,6 +688,14 @@ class CodeExecutionStatus(betterproto.Message):
|
||||
elapsed: float = betterproto.float_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogGenerationError(betterproto.Message):
|
||||
"""E040"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
exc: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WriteCatalogFailure(betterproto.Message):
|
||||
"""E041"""
|
||||
@@ -841,7 +908,6 @@ class PartialParsingDeletedMetric(betterproto.Message):
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
unique_id: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManifestWrongMetadataVersion(betterproto.Message):
|
||||
"""I022"""
|
||||
@@ -1066,20 +1132,128 @@ class PartialParsingDeletedExposure(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class InvalidDisabledSourceInTestNode(betterproto.Message):
|
||||
class InvalidDisabledTargetInTestNode(betterproto.Message):
|
||||
"""I050"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
resource_type_title: str = betterproto.string_field(2)
|
||||
unique_id: str = betterproto.string_field(3)
|
||||
original_file_path: str = betterproto.string_field(4)
|
||||
target_kind: str = betterproto.string_field(5)
|
||||
target_name: str = betterproto.string_field(6)
|
||||
target_package: str = betterproto.string_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class InvalidRefInTestNode(betterproto.Message):
|
||||
class UnusedResourceConfigPath(betterproto.Message):
|
||||
"""I051"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
unused_config_paths: List[str] = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeedIncreased(betterproto.Message):
|
||||
"""I052"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
package_name: str = betterproto.string_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeedExceedsLimitSamePath(betterproto.Message):
|
||||
"""I053"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
package_name: str = betterproto.string_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeedExceedsLimitAndPathChanged(betterproto.Message):
|
||||
"""I054"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
package_name: str = betterproto.string_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeedExceedsLimitChecksumChanged(betterproto.Message):
|
||||
"""I055"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
package_name: str = betterproto.string_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
checksum_name: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnusedTables(betterproto.Message):
|
||||
"""I056"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
unused_tables: List[str] = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WrongResourceSchemaFile(betterproto.Message):
|
||||
"""I057"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
patch_name: str = betterproto.string_field(2)
|
||||
resource_type: str = betterproto.string_field(3)
|
||||
plural_resource_type: str = betterproto.string_field(4)
|
||||
yaml_key: str = betterproto.string_field(5)
|
||||
file_path: str = betterproto.string_field(6)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NoNodeForYamlKey(betterproto.Message):
|
||||
"""I058"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
patch_name: str = betterproto.string_field(2)
|
||||
yaml_key: str = betterproto.string_field(3)
|
||||
file_path: str = betterproto.string_field(4)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroPatchNotFound(betterproto.Message):
|
||||
"""I059"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
patch_name: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NodeNotFoundOrDisabled(betterproto.Message):
|
||||
"""I060"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
original_file_path: str = betterproto.string_field(2)
|
||||
unique_id: str = betterproto.string_field(3)
|
||||
resource_type_title: str = betterproto.string_field(4)
|
||||
target_name: str = betterproto.string_field(5)
|
||||
target_kind: str = betterproto.string_field(6)
|
||||
target_package: str = betterproto.string_field(7)
|
||||
disabled: str = betterproto.string_field(8)
|
||||
|
||||
|
||||
@dataclass
|
||||
class JinjaLogWarning(betterproto.Message):
|
||||
"""I061"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
|
||||
@dataclass
|
||||
class PartialParsingDeletedEntity(betterproto.Message):
|
||||
"""I062"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
unique_id: str = betterproto.string_field(2)
|
||||
|
||||
@dataclass
|
||||
class GitSparseCheckoutSubdirectory(betterproto.Message):
|
||||
@@ -1166,7 +1340,7 @@ class SelectorReportInvalidSelector(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroEventInfo(betterproto.Message):
|
||||
class JinjaLogInfo(betterproto.Message):
|
||||
"""M011"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1174,7 +1348,7 @@ class MacroEventInfo(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroEventDebug(betterproto.Message):
|
||||
class JinjaLogDebug(betterproto.Message):
|
||||
"""M012"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1309,6 +1483,23 @@ class DepsSetDownloadDirectory(betterproto.Message):
|
||||
path: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DepsUnpinned(betterproto.Message):
|
||||
"""M029"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
revision: str = betterproto.string_field(2)
|
||||
git: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NoNodesForSelectionCriteria(betterproto.Message):
|
||||
"""M030"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
spec_raw: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunningOperationCaughtError(betterproto.Message):
|
||||
"""Q001"""
|
||||
@@ -1357,57 +1548,21 @@ class SQLRunnerException(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintErrorTestResult(betterproto.Message):
|
||||
class LogTestResult(betterproto.Message):
|
||||
"""Q007"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
num_models: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
status: str = betterproto.string_field(4)
|
||||
index: int = betterproto.int32_field(5)
|
||||
num_models: int = betterproto.int32_field(6)
|
||||
execution_time: float = betterproto.float_field(7)
|
||||
num_failures: int = betterproto.int32_field(8)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintPassTestResult(betterproto.Message):
|
||||
"""Q008"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
num_models: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintWarnTestResult(betterproto.Message):
|
||||
"""Q009"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
num_models: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
num_failures: int = betterproto.int32_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintFailureTestResult(betterproto.Message):
|
||||
"""Q010"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
name: str = betterproto.string_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
num_models: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
num_failures: int = betterproto.int32_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintStartLine(betterproto.Message):
|
||||
class LogStartLine(betterproto.Message):
|
||||
"""Q011"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1418,7 +1573,7 @@ class PrintStartLine(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintModelResultLine(betterproto.Message):
|
||||
class LogModelResult(betterproto.Message):
|
||||
"""Q012"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1427,40 +1582,11 @@ class PrintModelResultLine(betterproto.Message):
|
||||
status: str = betterproto.string_field(4)
|
||||
index: int = betterproto.int32_field(5)
|
||||
total: int = betterproto.int32_field(6)
|
||||
execution_time: float = betterproto.float_field(7)
|
||||
execution_time: int = betterproto.int32_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintModelErrorResultLine(betterproto.Message):
|
||||
"""Q013"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
description: str = betterproto.string_field(3)
|
||||
status: str = betterproto.string_field(4)
|
||||
index: int = betterproto.int32_field(5)
|
||||
total: int = betterproto.int32_field(6)
|
||||
execution_time: float = betterproto.float_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintSnapshotErrorResultLine(betterproto.Message):
|
||||
"""Q014"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
description: str = betterproto.string_field(3)
|
||||
status: str = betterproto.string_field(4)
|
||||
index: int = betterproto.int32_field(5)
|
||||
total: int = betterproto.int32_field(6)
|
||||
execution_time: float = betterproto.float_field(7)
|
||||
cfg: Dict[str, str] = betterproto.map_field(
|
||||
8, betterproto.TYPE_STRING, betterproto.TYPE_STRING
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintSnapshotResultLine(betterproto.Message):
|
||||
class LogSnapshotResult(betterproto.Message):
|
||||
"""Q015"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1476,87 +1602,36 @@ class PrintSnapshotResultLine(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintSeedErrorResultLine(betterproto.Message):
|
||||
class LogSeedResult(betterproto.Message):
|
||||
"""Q016"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
status: str = betterproto.string_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
total: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
schema: str = betterproto.string_field(7)
|
||||
relation: str = betterproto.string_field(8)
|
||||
result_message: str = betterproto.string_field(4)
|
||||
index: int = betterproto.int32_field(5)
|
||||
total: int = betterproto.int32_field(6)
|
||||
execution_time: float = betterproto.float_field(7)
|
||||
schema: str = betterproto.string_field(8)
|
||||
relation: str = betterproto.string_field(9)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintSeedResultLine(betterproto.Message):
|
||||
"""Q017"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
status: str = betterproto.string_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
total: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
schema: str = betterproto.string_field(7)
|
||||
relation: str = betterproto.string_field(8)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintFreshnessErrorLine(betterproto.Message):
|
||||
class LogFreshnessResult(betterproto.Message):
|
||||
"""Q018"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
index: int = betterproto.int32_field(3)
|
||||
total: int = betterproto.int32_field(4)
|
||||
execution_time: float = betterproto.float_field(5)
|
||||
source_name: str = betterproto.string_field(6)
|
||||
table_name: str = betterproto.string_field(7)
|
||||
status: str = betterproto.string_field(2)
|
||||
node_info: "NodeInfo" = betterproto.message_field(3)
|
||||
index: int = betterproto.int32_field(4)
|
||||
total: int = betterproto.int32_field(5)
|
||||
execution_time: float = betterproto.float_field(6)
|
||||
source_name: str = betterproto.string_field(7)
|
||||
table_name: str = betterproto.string_field(8)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintFreshnessErrorStaleLine(betterproto.Message):
|
||||
"""Q019"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
index: int = betterproto.int32_field(3)
|
||||
total: int = betterproto.int32_field(4)
|
||||
execution_time: float = betterproto.float_field(5)
|
||||
source_name: str = betterproto.string_field(6)
|
||||
table_name: str = betterproto.string_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintFreshnessWarnLine(betterproto.Message):
|
||||
"""Q020"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
index: int = betterproto.int32_field(3)
|
||||
total: int = betterproto.int32_field(4)
|
||||
execution_time: float = betterproto.float_field(5)
|
||||
source_name: str = betterproto.string_field(6)
|
||||
table_name: str = betterproto.string_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintFreshnessPassLine(betterproto.Message):
|
||||
"""Q021"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
node_info: "NodeInfo" = betterproto.message_field(2)
|
||||
index: int = betterproto.int32_field(3)
|
||||
total: int = betterproto.int32_field(4)
|
||||
execution_time: float = betterproto.float_field(5)
|
||||
source_name: str = betterproto.string_field(6)
|
||||
table_name: str = betterproto.string_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintCancelLine(betterproto.Message):
|
||||
class LogCancelLine(betterproto.Message):
|
||||
"""Q022"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1642,7 +1717,7 @@ class NodeExecuting(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintHookStartLine(betterproto.Message):
|
||||
class LogHookStartLine(betterproto.Message):
|
||||
"""Q032"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1653,7 +1728,7 @@ class PrintHookStartLine(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintHookEndLine(betterproto.Message):
|
||||
class LogHookEndLine(betterproto.Message):
|
||||
"""Q033"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1678,6 +1753,13 @@ class SkippingDetails(betterproto.Message):
|
||||
total: int = betterproto.int32_field(7)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NothingToDo(betterproto.Message):
|
||||
"""Q035"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunningOperationUncaughtError(betterproto.Message):
|
||||
"""Q036"""
|
||||
@@ -1697,6 +1779,13 @@ class EndRunResult(betterproto.Message):
|
||||
success: bool = betterproto.bool_field(5)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NoNodesSelected(betterproto.Message):
|
||||
"""Q038"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatchableExceptionOnRun(betterproto.Message):
|
||||
"""W002"""
|
||||
@@ -1824,7 +1913,7 @@ class TimingInfoCollected(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintDebugStackTrace(betterproto.Message):
|
||||
class LogDebugStackTrace(betterproto.Message):
|
||||
"""Z011"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -1991,7 +2080,7 @@ class EndOfRunSummary(betterproto.Message):
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrintSkipBecauseError(betterproto.Message):
|
||||
class LogSkipBecauseError(betterproto.Message):
|
||||
"""Z034"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
@@ -2066,34 +2155,16 @@ class TrackingInitializeFailure(betterproto.Message):
|
||||
exc_info: str = betterproto.string_field(2)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GeneralWarningMsg(betterproto.Message):
|
||||
"""Z046"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
log_fmt: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GeneralWarningException(betterproto.Message):
|
||||
"""Z047"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
exc: str = betterproto.string_field(2)
|
||||
log_fmt: str = betterproto.string_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventBufferFull(betterproto.Message):
|
||||
"""Z048"""
|
||||
"""Z045"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunResultWarningMessage(betterproto.Message):
|
||||
"""Z049"""
|
||||
"""Z046"""
|
||||
|
||||
info: "EventInfo" = betterproto.message_field(1)
|
||||
msg: str = betterproto.string_field(2)
|
||||
|
||||
@@ -61,18 +61,3 @@ class UnitTestInfo(InfoLevel, NoFile, pl.UnitTestInfo):
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Unit Test: {self.msg}"
|
||||
|
||||
|
||||
# since mypy doesn't run on every file we need to suggest to mypy that every
|
||||
# class gets instantiated. But we don't actually want to run this code.
|
||||
# making the conditional `if False` causes mypy to skip it as dead code so
|
||||
# we need to skirt around that by computing something it doesn't check statically.
|
||||
#
|
||||
# TODO remove these lines once we run mypy everywhere.
|
||||
if 1 == 0:
|
||||
IntegrationTestInfo(msg="")
|
||||
IntegrationTestDebug(msg="")
|
||||
IntegrationTestWarn(msg="")
|
||||
IntegrationTestError(msg="")
|
||||
IntegrationTestException(msg="")
|
||||
UnitTestInfo(msg="")
|
||||
|
||||
@@ -15,6 +15,7 @@ message EventInfo {
|
||||
string thread = 7;
|
||||
google.protobuf.Timestamp ts = 8;
|
||||
map<string, string> extra = 9;
|
||||
string category = 10;
|
||||
}
|
||||
|
||||
// TimingInfo
|
||||
@@ -38,7 +39,6 @@ message NodeInfo {
|
||||
|
||||
// RunResult
|
||||
message RunResultMsg {
|
||||
// status: Union[RunStatus, TestStatus, FreshnessStatus]
|
||||
string status = 1;
|
||||
string message = 2;
|
||||
repeated TimingInfoMsg timing_info = 3;
|
||||
@@ -213,6 +213,53 @@ message ProjectCreated {
|
||||
string slack_url = 4;
|
||||
}
|
||||
|
||||
// D - Deprecation
|
||||
|
||||
// D001
|
||||
message PackageRedirectDeprecation {
|
||||
EventInfo info = 1;
|
||||
string old_name = 2;
|
||||
string new_name = 3;
|
||||
}
|
||||
|
||||
// D002
|
||||
message PackageInstallPathDeprecation {
|
||||
EventInfo info = 1;
|
||||
}
|
||||
|
||||
// D003
|
||||
message ConfigSourcePathDeprecation {
|
||||
EventInfo info = 1;
|
||||
string deprecated_path = 2;
|
||||
string exp_path = 3;
|
||||
}
|
||||
|
||||
// D004
|
||||
message ConfigDataPathDeprecation {
|
||||
EventInfo info = 1;
|
||||
string deprecated_path = 2;
|
||||
string exp_path = 3;
|
||||
}
|
||||
|
||||
//D005
|
||||
message AdapterDeprecationWarning {
|
||||
EventInfo info = 1;
|
||||
string old_name = 2;
|
||||
string new_name = 3;
|
||||
}
|
||||
|
||||
//D006
|
||||
message MetricAttributesRenamed {
|
||||
EventInfo info = 1;
|
||||
string metric_name = 2;
|
||||
}
|
||||
|
||||
//D007
|
||||
message ExposureNameDeprecation {
|
||||
EventInfo info = 1;
|
||||
string exposure = 2;
|
||||
}
|
||||
|
||||
// E - DB Adapter
|
||||
|
||||
// E001
|
||||
@@ -262,13 +309,13 @@ message ConnectionReused {
|
||||
}
|
||||
|
||||
// E007
|
||||
message ConnectionLeftOpen {
|
||||
message ConnectionLeftOpenInCleanup {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
|
||||
// E008
|
||||
message ConnectionClosed {
|
||||
message ConnectionClosedInCleanup {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
@@ -281,13 +328,13 @@ message RollbackFailed {
|
||||
}
|
||||
|
||||
// E010
|
||||
message ConnectionClosed2 {
|
||||
message ConnectionClosed {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
|
||||
// E011
|
||||
message ConnectionLeftOpen2 {
|
||||
message ConnectionLeftOpen {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
@@ -455,7 +502,6 @@ message AdapterImportError {
|
||||
message PluginLoadError {
|
||||
EventInfo info = 1;
|
||||
string exc_info = 2;
|
||||
|
||||
}
|
||||
|
||||
// E037
|
||||
@@ -478,7 +524,11 @@ message CodeExecutionStatus {
|
||||
float elapsed = 3;
|
||||
}
|
||||
|
||||
// Skipped E040
|
||||
// E040
|
||||
message CatalogGenerationError {
|
||||
EventInfo info = 1;
|
||||
string exc = 2;
|
||||
}
|
||||
|
||||
// E041
|
||||
message WriteCatalogFailure {
|
||||
@@ -806,18 +856,99 @@ message PartialParsingDeletedExposure {
|
||||
}
|
||||
|
||||
// I050
|
||||
message InvalidDisabledSourceInTestNode {
|
||||
message InvalidDisabledTargetInTestNode {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
string resource_type_title = 2;
|
||||
string unique_id = 3;
|
||||
string original_file_path = 4;
|
||||
string target_kind = 5;
|
||||
string target_name = 6;
|
||||
string target_package = 7;
|
||||
}
|
||||
|
||||
// I051
|
||||
message InvalidRefInTestNode {
|
||||
message UnusedResourceConfigPath {
|
||||
EventInfo info = 1;
|
||||
repeated string unused_config_paths = 2;
|
||||
}
|
||||
|
||||
// I052
|
||||
message SeedIncreased {
|
||||
EventInfo info = 1;
|
||||
string package_name = 2;
|
||||
string name = 3;
|
||||
}
|
||||
|
||||
// I053
|
||||
message SeedExceedsLimitSamePath {
|
||||
EventInfo info = 1;
|
||||
string package_name = 2;
|
||||
string name = 3;
|
||||
}
|
||||
|
||||
// I054
|
||||
message SeedExceedsLimitAndPathChanged {
|
||||
EventInfo info = 1;
|
||||
string package_name = 2;
|
||||
string name = 3;
|
||||
}
|
||||
|
||||
// I055
|
||||
message SeedExceedsLimitChecksumChanged {
|
||||
EventInfo info = 1;
|
||||
string package_name = 2;
|
||||
string name = 3;
|
||||
string checksum_name = 4;
|
||||
}
|
||||
|
||||
// I056
|
||||
message UnusedTables {
|
||||
EventInfo info = 1;
|
||||
repeated string unused_tables = 2;
|
||||
}
|
||||
|
||||
// I057
|
||||
message WrongResourceSchemaFile {
|
||||
EventInfo info = 1;
|
||||
string patch_name = 2;
|
||||
string resource_type = 3;
|
||||
string plural_resource_type = 4;
|
||||
string yaml_key = 5;
|
||||
string file_path = 6;
|
||||
}
|
||||
|
||||
// I058
|
||||
message NoNodeForYamlKey {
|
||||
EventInfo info = 1;
|
||||
string patch_name = 2;
|
||||
string yaml_key = 3;
|
||||
string file_path = 4;
|
||||
}
|
||||
|
||||
// I059
|
||||
message MacroPatchNotFound {
|
||||
EventInfo info = 1;
|
||||
string patch_name = 2;
|
||||
}
|
||||
|
||||
// I060
|
||||
message NodeNotFoundOrDisabled {
|
||||
EventInfo info = 1;
|
||||
string original_file_path = 2;
|
||||
string unique_id = 3;
|
||||
string resource_type_title = 4;
|
||||
string target_name = 5;
|
||||
string target_kind = 6;
|
||||
string target_package = 7;
|
||||
string disabled = 8;
|
||||
}
|
||||
|
||||
// I061
|
||||
message JinjaLogWarning {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
}
|
||||
|
||||
|
||||
// M - Deps generation
|
||||
|
||||
// M001
|
||||
@@ -885,13 +1016,13 @@ message SelectorReportInvalidSelector {
|
||||
}
|
||||
|
||||
// M011
|
||||
message MacroEventInfo {
|
||||
message JinjaLogInfo {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
}
|
||||
|
||||
// M012
|
||||
message MacroEventDebug {
|
||||
message JinjaLogDebug {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
}
|
||||
@@ -992,6 +1123,19 @@ message DepsSetDownloadDirectory {
|
||||
string path = 2;
|
||||
}
|
||||
|
||||
// M029
|
||||
message DepsUnpinned {
|
||||
EventInfo info = 1;
|
||||
string revision = 2;
|
||||
string git = 3;
|
||||
}
|
||||
|
||||
// M030
|
||||
message NoNodesForSelectionCriteria {
|
||||
EventInfo info = 1;
|
||||
string spec_raw = 2;
|
||||
}
|
||||
|
||||
// Q - Node execution
|
||||
|
||||
// Q001
|
||||
@@ -1030,49 +1174,23 @@ message SQLRunnerException {
|
||||
}
|
||||
|
||||
// Q007
|
||||
message PrintErrorTestResult {
|
||||
message LogTestResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
int32 index = 4;
|
||||
int32 num_models = 5;
|
||||
float execution_time = 6;
|
||||
string status = 4;
|
||||
int32 index = 5;
|
||||
int32 num_models = 6;
|
||||
float execution_time = 7;
|
||||
int32 num_failures = 8;
|
||||
}
|
||||
|
||||
// Q008
|
||||
message PrintPassTestResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
int32 index = 4;
|
||||
int32 num_models = 5;
|
||||
float execution_time = 6;
|
||||
}
|
||||
|
||||
// Q009
|
||||
message PrintWarnTestResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
int32 index = 4;
|
||||
int32 num_models = 5;
|
||||
float execution_time = 6;
|
||||
int32 num_failures = 7;
|
||||
}
|
||||
// Skipped Q008, Q009, Q010
|
||||
|
||||
// Q010
|
||||
message PrintFailureTestResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string name = 3;
|
||||
int32 index = 4;
|
||||
int32 num_models = 5;
|
||||
float execution_time = 6;
|
||||
int32 num_failures = 7;
|
||||
}
|
||||
|
||||
// Q011
|
||||
message PrintStartLine {
|
||||
message LogStartLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string description = 3;
|
||||
@@ -1081,41 +1199,20 @@ message PrintStartLine {
|
||||
}
|
||||
|
||||
// Q012
|
||||
message PrintModelResultLine {
|
||||
message LogModelResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string description = 3;
|
||||
string status = 4;
|
||||
int32 index = 5;
|
||||
int32 total = 6;
|
||||
float execution_time = 7;
|
||||
int32 execution_time = 7;
|
||||
}
|
||||
|
||||
// Q013
|
||||
message PrintModelErrorResultLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string description = 3;
|
||||
string status = 4;
|
||||
int32 index = 5;
|
||||
int32 total = 6;
|
||||
float execution_time = 7;
|
||||
}
|
||||
|
||||
// Q014
|
||||
message PrintSnapshotErrorResultLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string description = 3;
|
||||
string status = 4;
|
||||
int32 index = 5;
|
||||
int32 total = 6;
|
||||
float execution_time = 7;
|
||||
map<string, string> cfg = 8;
|
||||
}
|
||||
// skipped Q013, Q014
|
||||
|
||||
// Q015
|
||||
message PrintSnapshotResultLine {
|
||||
message LogSnapshotResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string description = 3;
|
||||
@@ -1127,75 +1224,38 @@ message PrintSnapshotResultLine {
|
||||
}
|
||||
|
||||
// Q016
|
||||
message PrintSeedErrorResultLine {
|
||||
message LogSeedResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string status = 3;
|
||||
int32 index = 4;
|
||||
int32 total = 5;
|
||||
float execution_time = 6;
|
||||
string schema = 7;
|
||||
string relation = 8;
|
||||
string result_message = 4;
|
||||
int32 index = 5;
|
||||
int32 total = 6;
|
||||
float execution_time = 7;
|
||||
string schema = 8;
|
||||
string relation = 9;
|
||||
}
|
||||
|
||||
// Q017
|
||||
message PrintSeedResultLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string status = 3;
|
||||
int32 index = 4;
|
||||
int32 total = 5;
|
||||
float execution_time = 6;
|
||||
string schema = 7;
|
||||
string relation = 8;
|
||||
}
|
||||
// Skipped Q017
|
||||
|
||||
// Q018
|
||||
message PrintFreshnessErrorLine {
|
||||
message LogFreshnessResult {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
int32 index = 3;
|
||||
int32 total = 4;
|
||||
float execution_time = 5;
|
||||
string source_name = 6;
|
||||
string table_name = 7;
|
||||
string status = 2;
|
||||
NodeInfo node_info = 3;
|
||||
int32 index = 4;
|
||||
int32 total = 5;
|
||||
float execution_time = 6;
|
||||
string source_name = 7;
|
||||
string table_name = 8;
|
||||
}
|
||||
|
||||
// Q019
|
||||
message PrintFreshnessErrorStaleLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
int32 index = 3;
|
||||
int32 total = 4;
|
||||
float execution_time = 5;
|
||||
string source_name = 6;
|
||||
string table_name = 7;
|
||||
}
|
||||
|
||||
// Q020
|
||||
message PrintFreshnessWarnLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
int32 index = 3;
|
||||
int32 total = 4;
|
||||
float execution_time = 5;
|
||||
string source_name = 6;
|
||||
string table_name = 7;
|
||||
}
|
||||
// Skipped Q019, Q020, Q021
|
||||
|
||||
// Q021
|
||||
message PrintFreshnessPassLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
int32 index = 3;
|
||||
int32 total = 4;
|
||||
float execution_time = 5;
|
||||
string source_name = 6;
|
||||
string table_name = 7;
|
||||
}
|
||||
|
||||
// Q022
|
||||
message PrintCancelLine {
|
||||
message LogCancelLine {
|
||||
EventInfo info = 1;
|
||||
string conn_name = 2;
|
||||
}
|
||||
@@ -1261,7 +1321,7 @@ message NodeExecuting {
|
||||
}
|
||||
|
||||
// Q032
|
||||
message PrintHookStartLine {
|
||||
message LogHookStartLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string statement = 3;
|
||||
@@ -1270,7 +1330,7 @@ message PrintHookStartLine {
|
||||
}
|
||||
|
||||
// Q033
|
||||
message PrintHookEndLine {
|
||||
message LogHookEndLine {
|
||||
EventInfo info = 1;
|
||||
NodeInfo node_info = 2;
|
||||
string statement = 3;
|
||||
@@ -1291,7 +1351,10 @@ message SkippingDetails {
|
||||
int32 total = 7;
|
||||
}
|
||||
|
||||
// Skipped Q035
|
||||
// Q035
|
||||
message NothingToDo {
|
||||
EventInfo info = 1;
|
||||
}
|
||||
|
||||
// Q036
|
||||
message RunningOperationUncaughtError {
|
||||
@@ -1308,6 +1371,11 @@ message EndRunResult {
|
||||
bool success = 5;
|
||||
}
|
||||
|
||||
// Q038
|
||||
message NoNodesSelected {
|
||||
EventInfo info = 1;
|
||||
}
|
||||
|
||||
// W - Node testing
|
||||
|
||||
// Skipped W001
|
||||
@@ -1411,7 +1479,7 @@ message TimingInfoCollected {
|
||||
}
|
||||
|
||||
// Z011
|
||||
message PrintDebugStackTrace {
|
||||
message LogDebugStackTrace {
|
||||
EventInfo info = 1;
|
||||
string exc_info = 2;
|
||||
}
|
||||
@@ -1538,7 +1606,7 @@ message EndOfRunSummary {
|
||||
// Skipped Z031, Z032, Z033
|
||||
|
||||
// Z034
|
||||
message PrintSkipBecauseError {
|
||||
message LogSkipBecauseError {
|
||||
EventInfo info = 1;
|
||||
string schema = 2;
|
||||
string relation = 3;
|
||||
@@ -1593,28 +1661,12 @@ message TrackingInitializeFailure {
|
||||
string exc_info = 2;
|
||||
}
|
||||
|
||||
// Skipped Z045
|
||||
|
||||
// Z046
|
||||
message GeneralWarningMsg {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
string log_fmt = 3;
|
||||
}
|
||||
|
||||
// Z047
|
||||
message GeneralWarningException {
|
||||
EventInfo info = 1;
|
||||
string exc = 2;
|
||||
string log_fmt = 3;
|
||||
}
|
||||
|
||||
// Z048
|
||||
// Z045
|
||||
message EventBufferFull {
|
||||
EventInfo info = 1;
|
||||
}
|
||||
|
||||
// Z049
|
||||
// Z046
|
||||
message RunResultWarningMessage {
|
||||
EventInfo info = 1;
|
||||
string msg = 2;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,11 +2,9 @@ import builtins
|
||||
import functools
|
||||
from typing import NoReturn, Optional, Mapping, Any
|
||||
|
||||
from dbt.events.functions import fire_event, scrub_secrets, env_secrets
|
||||
from dbt.events.types import GeneralWarningMsg, GeneralWarningException
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import JinjaLogWarning
|
||||
from dbt.node_types import NodeType
|
||||
from dbt import flags
|
||||
from dbt.ui import line_wrap_message, warning_tag
|
||||
|
||||
import dbt.dataclass_schema
|
||||
|
||||
@@ -570,74 +568,11 @@ def doc_target_not_found(
|
||||
raise_compiler_error(msg, model)
|
||||
|
||||
|
||||
def _get_target_failure_msg(
|
||||
def get_not_found_or_disabled_msg(
|
||||
original_file_path,
|
||||
unique_id,
|
||||
resource_type_title,
|
||||
target_name: str,
|
||||
target_model_package: Optional[str],
|
||||
include_path: bool,
|
||||
reason: str,
|
||||
target_kind: str,
|
||||
) -> str:
|
||||
target_package_string = ""
|
||||
if target_model_package is not None:
|
||||
target_package_string = "in package '{}' ".format(target_model_package)
|
||||
|
||||
source_path_string = ""
|
||||
if include_path:
|
||||
source_path_string = " ({})".format(original_file_path)
|
||||
|
||||
return "{} '{}'{} depends on a {} named '{}' {}which {}".format(
|
||||
resource_type_title,
|
||||
unique_id,
|
||||
source_path_string,
|
||||
target_kind,
|
||||
target_name,
|
||||
target_package_string,
|
||||
reason,
|
||||
)
|
||||
|
||||
|
||||
def get_target_not_found_or_disabled_msg(
|
||||
node,
|
||||
target_name: str,
|
||||
target_package: Optional[str],
|
||||
disabled: Optional[bool] = None,
|
||||
) -> str:
|
||||
if disabled is None:
|
||||
reason = "was not found or is disabled"
|
||||
elif disabled is True:
|
||||
reason = "is disabled"
|
||||
else:
|
||||
reason = "was not found"
|
||||
return _get_target_failure_msg(
|
||||
node.original_file_path,
|
||||
node.unique_id,
|
||||
node.resource_type.title(),
|
||||
target_name,
|
||||
target_package,
|
||||
include_path=True,
|
||||
reason=reason,
|
||||
target_kind="node",
|
||||
)
|
||||
|
||||
|
||||
def ref_target_not_found(
|
||||
model,
|
||||
target_model_name: str,
|
||||
target_model_package: Optional[str],
|
||||
disabled: Optional[bool] = None,
|
||||
) -> NoReturn:
|
||||
msg = get_target_not_found_or_disabled_msg(
|
||||
model, target_model_name, target_model_package, disabled
|
||||
)
|
||||
raise_compiler_error(msg, model)
|
||||
|
||||
|
||||
def get_not_found_or_disabled_msg(
|
||||
node,
|
||||
target_name: str,
|
||||
target_kind: str,
|
||||
target_package: Optional[str] = None,
|
||||
disabled: Optional[bool] = None,
|
||||
@@ -648,15 +583,19 @@ def get_not_found_or_disabled_msg(
|
||||
reason = "is disabled"
|
||||
else:
|
||||
reason = "was not found"
|
||||
return _get_target_failure_msg(
|
||||
node.original_file_path,
|
||||
node.unique_id,
|
||||
node.resource_type.title(),
|
||||
|
||||
target_package_string = ""
|
||||
if target_package is not None:
|
||||
target_package_string = "in package '{}' ".format(target_package)
|
||||
|
||||
return "{} '{}' ({}) depends on a {} named '{}' {}which {}".format(
|
||||
resource_type_title,
|
||||
unique_id,
|
||||
original_file_path,
|
||||
target_kind,
|
||||
target_name,
|
||||
target_package,
|
||||
include_path=True,
|
||||
reason=reason,
|
||||
target_kind=target_kind,
|
||||
target_package_string,
|
||||
reason,
|
||||
)
|
||||
|
||||
|
||||
@@ -668,7 +607,9 @@ def target_not_found(
|
||||
disabled: Optional[bool] = None,
|
||||
) -> NoReturn:
|
||||
msg = get_not_found_or_disabled_msg(
|
||||
node=node,
|
||||
original_file_path=node.original_file_path,
|
||||
unique_id=node.unique_id,
|
||||
resource_type_title=node.resource_type.title(),
|
||||
target_name=target_name,
|
||||
target_kind=target_kind,
|
||||
target_package=target_package,
|
||||
@@ -976,9 +917,7 @@ def raise_patch_targets_not_found(patches):
|
||||
|
||||
def _fix_dupe_msg(path_1: str, path_2: str, name: str, type_name: str) -> str:
|
||||
if path_1 == path_2:
|
||||
return (
|
||||
f"remove one of the {type_name} entries for {name} in this file:\n" f" - {path_1!s}\n"
|
||||
)
|
||||
return f"remove one of the {type_name} entries for {name} in this file:\n - {path_1!s}\n"
|
||||
else:
|
||||
return (
|
||||
f"remove the {type_name} entry for {name} in one of these files:\n"
|
||||
@@ -1043,19 +982,6 @@ def raise_unrecognized_credentials_type(typename, supported_types):
|
||||
)
|
||||
|
||||
|
||||
def warn_invalid_patch(patch, resource_type):
|
||||
msg = line_wrap_message(
|
||||
f"""\
|
||||
'{patch.name}' is a {resource_type} node, but it is
|
||||
specified in the {patch.yaml_key} section of
|
||||
{patch.original_file_path}.
|
||||
To fix this error, place the `{patch.name}`
|
||||
specification under the {resource_type.pluralize()} key instead.
|
||||
"""
|
||||
)
|
||||
warn_or_error(msg, log_fmt=warning_tag("{}"))
|
||||
|
||||
|
||||
def raise_not_implemented(msg):
|
||||
raise NotImplementedException("ERROR: {}".format(msg))
|
||||
|
||||
@@ -1069,24 +995,8 @@ def raise_duplicate_alias(
|
||||
raise AliasException(f'Got duplicate keys: ({key_names}) all map to "{canonical_key}"')
|
||||
|
||||
|
||||
def warn_or_error(msg, node=None, log_fmt=None):
|
||||
if flags.WARN_ERROR:
|
||||
raise_compiler_error(scrub_secrets(msg, env_secrets()), node)
|
||||
else:
|
||||
fire_event(GeneralWarningMsg(msg=msg, log_fmt=log_fmt))
|
||||
|
||||
|
||||
def warn_or_raise(exc, log_fmt=None):
|
||||
if flags.WARN_ERROR:
|
||||
raise exc
|
||||
else:
|
||||
fire_event(GeneralWarningException(exc=str(exc), log_fmt=log_fmt))
|
||||
|
||||
|
||||
def warn(msg, node=None):
|
||||
# there's no reason to expose log_fmt to macros - it's only useful for
|
||||
# handling colors
|
||||
warn_or_error(msg, node=node)
|
||||
dbt.events.functions.warn_or_error(JinjaLogWarning(msg=msg), node=node)
|
||||
return ""
|
||||
|
||||
|
||||
|
||||
@@ -113,6 +113,7 @@ def env_set_path(key: str) -> Optional[Path]:
|
||||
|
||||
MACRO_DEBUGGING = env_set_truthy("DBT_MACRO_DEBUGGING")
|
||||
DEFER_MODE = env_set_truthy("DBT_DEFER_TO_STATE")
|
||||
FAVOR_STATE_MODE = env_set_truthy("DBT_FAVOR_STATE_STATE")
|
||||
ARTIFACT_STATE_PATH = env_set_path("DBT_ARTIFACT_STATE_PATH")
|
||||
ENABLE_LEGACY_LOGGER = env_set_truthy("DBT_ENABLE_LEGACY_LOGGER")
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from .selector_spec import (
|
||||
|
||||
INTERSECTION_DELIMITER = ","
|
||||
|
||||
DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*"]
|
||||
DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*", "entity:*"]
|
||||
DEFAULT_EXCLUDES: List[str] = []
|
||||
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ class Graph:
|
||||
for node in include_nodes:
|
||||
if node not in new_graph:
|
||||
raise ValueError(
|
||||
"Couldn't find model '{}' -- does it exist or is " "it disabled?".format(node)
|
||||
"Couldn't find model '{}' -- does it exist or is it disabled?".format(node)
|
||||
)
|
||||
|
||||
return Graph(new_graph)
|
||||
|
||||
@@ -5,7 +5,7 @@ from queue import PriorityQueue
|
||||
from typing import Dict, Set, List, Generator, Optional
|
||||
|
||||
from .graph import UniqueId
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedExposure, ParsedMetric
|
||||
from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedExposure, ParsedMetric, ParsedEntity
|
||||
from dbt.contracts.graph.compiled import GraphMemberNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.node_types import NodeType
|
||||
@@ -48,7 +48,7 @@ class GraphQueue:
|
||||
if node.resource_type != NodeType.Model:
|
||||
return False
|
||||
# must be a Model - tell mypy this won't be a Source or Exposure or Metric
|
||||
assert not isinstance(node, (ParsedSourceDefinition, ParsedExposure, ParsedMetric))
|
||||
assert not isinstance(node, (ParsedSourceDefinition, ParsedExposure, ParsedMetric, ParsedEntity))
|
||||
if node.is_ephemeral:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -5,13 +5,12 @@ from .queue import GraphQueue
|
||||
from .selector_methods import MethodManager
|
||||
from .selector_spec import SelectionCriteria, SelectionSpec, IndirectSelection
|
||||
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import SelectorReportInvalidSelector
|
||||
from dbt.events.functions import fire_event, warn_or_error
|
||||
from dbt.events.types import SelectorReportInvalidSelector, NoNodesForSelectionCriteria
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.exceptions import (
|
||||
InternalException,
|
||||
InvalidSelectorException,
|
||||
warn_or_error,
|
||||
)
|
||||
from dbt.contracts.graph.compiled import GraphMemberNode
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
@@ -24,11 +23,6 @@ def get_package_names(nodes):
|
||||
return set([node.split(".")[1] for node in nodes])
|
||||
|
||||
|
||||
def alert_non_existence(raw_spec, nodes):
|
||||
if len(nodes) == 0:
|
||||
warn_or_error(f"The selection criterion '{str(raw_spec)}' does not match" f" any nodes")
|
||||
|
||||
|
||||
def can_select_indirectly(node):
|
||||
"""If a node is not selected itself, but its parent(s) are, it may qualify
|
||||
for indirect selection.
|
||||
@@ -142,8 +136,8 @@ class NodeSelector(MethodManager):
|
||||
|
||||
direct_nodes = self.incorporate_indirect_nodes(initial_direct, indirect_nodes)
|
||||
|
||||
if spec.expect_exists:
|
||||
alert_non_existence(spec.raw, direct_nodes)
|
||||
if spec.expect_exists and len(direct_nodes) == 0:
|
||||
warn_or_error(NoNodesForSelectionCriteria(spec_raw=str(spec.raw)))
|
||||
|
||||
return direct_nodes, indirect_nodes
|
||||
|
||||
@@ -167,6 +161,9 @@ class NodeSelector(MethodManager):
|
||||
elif unique_id in self.manifest.metrics:
|
||||
metric = self.manifest.metrics[unique_id]
|
||||
return metric.config.enabled
|
||||
elif unique_id in self.manifest.entities:
|
||||
metric = self.manifest.entities[unique_id]
|
||||
return metric.config.enabled
|
||||
node = self.manifest.nodes[unique_id]
|
||||
return not node.empty and node.config.enabled
|
||||
|
||||
@@ -186,6 +183,8 @@ class NodeSelector(MethodManager):
|
||||
node = self.manifest.exposures[unique_id]
|
||||
elif unique_id in self.manifest.metrics:
|
||||
node = self.manifest.metrics[unique_id]
|
||||
elif unique_id in self.manifest.entities:
|
||||
node = self.manifest.entities[unique_id]
|
||||
else:
|
||||
raise InternalException(f"Node {unique_id} not found in the manifest!")
|
||||
return self.node_is_match(node)
|
||||
|
||||
@@ -19,6 +19,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedSingularTestNode,
|
||||
ParsedExposure,
|
||||
ParsedMetric,
|
||||
ParsedEntity,
|
||||
ParsedGenericTestNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
@@ -48,6 +49,7 @@ class MethodName(StrEnum):
|
||||
State = "state"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Entity = "entity"
|
||||
Result = "result"
|
||||
SourceStatus = "source_status"
|
||||
|
||||
@@ -76,7 +78,7 @@ def is_selected_node(fqn: List[str], node_selector: str):
|
||||
return True
|
||||
|
||||
|
||||
SelectorTarget = Union[ParsedSourceDefinition, ManifestNode, ParsedExposure, ParsedMetric]
|
||||
SelectorTarget = Union[ParsedSourceDefinition, ManifestNode, ParsedExposure, ParsedMetric, ParsedEntity]
|
||||
|
||||
|
||||
class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
@@ -127,6 +129,16 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
continue
|
||||
yield unique_id, metric
|
||||
|
||||
def entity_nodes(
|
||||
self, included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, ParsedEntity]]:
|
||||
|
||||
for key, metric in self.manifest.entities.items():
|
||||
unique_id = UniqueId(key)
|
||||
if unique_id not in included_nodes:
|
||||
continue
|
||||
yield unique_id, metric
|
||||
|
||||
def all_nodes(
|
||||
self, included_nodes: Set[UniqueId]
|
||||
) -> Iterator[Tuple[UniqueId, SelectorTarget]]:
|
||||
@@ -135,6 +147,7 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
self.source_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes),
|
||||
self.metric_nodes(included_nodes),
|
||||
self.entity_nodes(included_nodes),
|
||||
)
|
||||
|
||||
def configurable_nodes(
|
||||
@@ -145,11 +158,12 @@ class SelectorMethod(metaclass=abc.ABCMeta):
|
||||
def non_source_nodes(
|
||||
self,
|
||||
included_nodes: Set[UniqueId],
|
||||
) -> Iterator[Tuple[UniqueId, Union[ParsedExposure, ManifestNode, ParsedMetric]]]:
|
||||
) -> Iterator[Tuple[UniqueId, Union[ParsedExposure, ManifestNode, ParsedMetric, ParsedEntity]]]:
|
||||
yield from chain(
|
||||
self.parsed_nodes(included_nodes),
|
||||
self.exposure_nodes(included_nodes),
|
||||
self.metric_nodes(included_nodes),
|
||||
self.entity_nodes(included_nodes),
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -278,6 +292,30 @@ class MetricSelectorMethod(SelectorMethod):
|
||||
|
||||
yield node
|
||||
|
||||
class EntitySelectorMethod(SelectorMethod):
|
||||
"""TODO: Add a description of what this selector method is doing"""
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
parts = selector.split(".")
|
||||
target_package = SELECTOR_GLOB
|
||||
if len(parts) == 1:
|
||||
target_name = parts[0]
|
||||
elif len(parts) == 2:
|
||||
target_package, target_name = parts
|
||||
else:
|
||||
msg = (
|
||||
'Invalid entity selector value "{}". Entities must be of '
|
||||
"the form ${{entity_name}} or "
|
||||
"${{entity_package.entity_name}}"
|
||||
).format(selector)
|
||||
raise RuntimeException(msg)
|
||||
|
||||
for node, real_node in self.entity_nodes(included_nodes):
|
||||
if target_package not in (real_node.package_name, SELECTOR_GLOB):
|
||||
continue
|
||||
if target_name not in (real_node.name, SELECTOR_GLOB):
|
||||
continue
|
||||
|
||||
yield node
|
||||
|
||||
class PathSelectorMethod(SelectorMethod):
|
||||
def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]:
|
||||
@@ -286,8 +324,6 @@ class PathSelectorMethod(SelectorMethod):
|
||||
root = Path.cwd()
|
||||
paths = set(p.relative_to(root) for p in root.glob(selector))
|
||||
for node, real_node in self.all_nodes(included_nodes):
|
||||
if Path(real_node.root_path) != root:
|
||||
continue
|
||||
ofp = Path(real_node.original_file_path)
|
||||
if ofp in paths:
|
||||
yield node
|
||||
@@ -538,6 +574,8 @@ class StateSelectorMethod(SelectorMethod):
|
||||
previous_node = manifest.exposures[node]
|
||||
elif node in manifest.metrics:
|
||||
previous_node = manifest.metrics[node]
|
||||
elif node in manifest.entities:
|
||||
previous_node = manifest.entities[node]
|
||||
|
||||
if checker(previous_node, real_node):
|
||||
yield node
|
||||
@@ -626,6 +664,7 @@ class MethodManager:
|
||||
MethodName.State: StateSelectorMethod,
|
||||
MethodName.Exposure: ExposureSelectorMethod,
|
||||
MethodName.Metric: MetricSelectorMethod,
|
||||
MethodName.Entity: EntitySelectorMethod,
|
||||
MethodName.Result: ResultSelectorMethod,
|
||||
MethodName.SourceStatus: SourceStatusSelectorMethod,
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,4 +1,6 @@
|
||||
import os
|
||||
from dbt.config.project import Project
|
||||
from dbt.config.renderer import DbtProjectYamlRenderer
|
||||
from dbt.contracts.results import RunningStatus, collect_timing_info
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import NodeCompiling, NodeExecuting
|
||||
@@ -71,16 +73,22 @@ def get_dbt_config(project_dir, args=None, single_threaded=False):
|
||||
else:
|
||||
profiles_dir = flags.DEFAULT_PROFILES_DIR
|
||||
|
||||
profile_name = getattr(args, "profile", None)
|
||||
|
||||
runtime_args = RuntimeArgs(
|
||||
project_dir=project_dir,
|
||||
profiles_dir=profiles_dir,
|
||||
single_threaded=single_threaded,
|
||||
profile=getattr(args, "profile", None),
|
||||
profile=profile_name,
|
||||
target=getattr(args, "target", None),
|
||||
)
|
||||
|
||||
# Construct a RuntimeConfig from phony args
|
||||
config = RuntimeConfig.from_args(runtime_args)
|
||||
profile = RuntimeConfig.collect_profile(args=runtime_args, profile_name=profile_name)
|
||||
project_renderer = DbtProjectYamlRenderer(profile, None)
|
||||
project = RuntimeConfig.collect_project(args=runtime_args, project_renderer=project_renderer)
|
||||
assert type(project) is Project
|
||||
|
||||
config = RuntimeConfig.from_parts(project, profile, runtime_args)
|
||||
|
||||
# Set global flags from arguments
|
||||
flags.set_from_args(args, config)
|
||||
|
||||
@@ -28,9 +28,7 @@ if sys.platform == "win32" and (not os.getenv("TERM") or os.getenv("TERM") == "N
|
||||
colorama.init(wrap=True)
|
||||
|
||||
STDOUT_LOG_FORMAT = "{record.message}"
|
||||
DEBUG_LOG_FORMAT = (
|
||||
"{record.time:%Y-%m-%d %H:%M:%S.%f%z} " "({record.thread_name}): " "{record.message}"
|
||||
)
|
||||
DEBUG_LOG_FORMAT = "{record.time:%Y-%m-%d %H:%M:%S.%f%z} ({record.thread_name}): {record.message}"
|
||||
|
||||
|
||||
def get_secret_env() -> List[str]:
|
||||
|
||||
@@ -501,6 +501,20 @@ def _add_defer_argument(*subparsers):
|
||||
)
|
||||
|
||||
|
||||
def _add_favor_state_argument(*subparsers):
|
||||
for sub in subparsers:
|
||||
sub.add_optional_argument_inverse(
|
||||
"--favor-state",
|
||||
enable_help="""
|
||||
If set, defer to the state variable for resolving unselected nodes, even if node exist as a database object in the current environment.
|
||||
""",
|
||||
disable_help="""
|
||||
If defer is set, expect standard defer behaviour.
|
||||
""",
|
||||
default=flags.FAVOR_STATE_MODE,
|
||||
)
|
||||
|
||||
|
||||
def _build_run_subparser(subparsers, base_subparser):
|
||||
run_sub = subparsers.add_parser(
|
||||
"run",
|
||||
@@ -1173,6 +1187,8 @@ def parse_args(args, cls=DBTArgumentParser):
|
||||
_add_selection_arguments(run_sub, compile_sub, generate_sub, test_sub, snapshot_sub, seed_sub)
|
||||
# --defer
|
||||
_add_defer_argument(run_sub, test_sub, build_sub, snapshot_sub, compile_sub)
|
||||
# --favor-state
|
||||
_add_favor_state_argument(run_sub, test_sub, build_sub, snapshot_sub)
|
||||
# --full-refresh
|
||||
_add_table_mutability_arguments(run_sub, compile_sub, build_sub)
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ class NodeType(StrEnum):
|
||||
Macro = "macro"
|
||||
Exposure = "exposure"
|
||||
Metric = "metric"
|
||||
Entity = "entity"
|
||||
|
||||
@classmethod
|
||||
def executable(cls) -> List["NodeType"]:
|
||||
@@ -52,11 +53,14 @@ class NodeType(StrEnum):
|
||||
cls.Analysis,
|
||||
cls.Exposure,
|
||||
cls.Metric,
|
||||
cls.Entity,
|
||||
]
|
||||
|
||||
def pluralize(self) -> str:
|
||||
if self is self.Analysis:
|
||||
return "analyses"
|
||||
if self is self.Entity:
|
||||
return "entities"
|
||||
return f"{self}s"
|
||||
|
||||
|
||||
|
||||
@@ -169,7 +169,6 @@ class ConfiguredParser(
|
||||
resource_type=self.resource_type,
|
||||
path=path,
|
||||
original_file_path=original_file_path,
|
||||
root_path=self.project.project_root,
|
||||
package_name=self.project.project_name,
|
||||
raw_code=raw_code,
|
||||
language=language,
|
||||
@@ -202,7 +201,6 @@ class ConfiguredParser(
|
||||
"database": self.default_database,
|
||||
"fqn": fqn,
|
||||
"name": name,
|
||||
"root_path": self.project.project_root,
|
||||
"resource_type": self.resource_type,
|
||||
"path": path,
|
||||
"original_file_path": block.path.original_file_path,
|
||||
@@ -347,7 +345,7 @@ class ConfiguredParser(
|
||||
)
|
||||
else:
|
||||
raise InternalException(
|
||||
f"Got an unexpected project version={config_version}, " f"expected 2"
|
||||
f"Got an unexpected project version={config_version}, expected 2"
|
||||
)
|
||||
|
||||
def config_dict(
|
||||
|
||||
@@ -32,7 +32,6 @@ class DocumentationParser(Parser[ParsedDocumentation]):
|
||||
contents = get_rendered(block.contents, {}).strip()
|
||||
|
||||
doc = ParsedDocumentation(
|
||||
root_path=self.project.project_root,
|
||||
path=block.file.path.relative_path,
|
||||
original_file_path=block.path.original_file_path,
|
||||
package_name=self.project.project_name,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user