mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
Compare commits
44 Commits
v1.6.0
...
option_par
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
586cba243b | ||
|
|
3885024873 | ||
|
|
8232feb616 | ||
|
|
2eb24685bb | ||
|
|
227c2c3f0c | ||
|
|
a20b09b1e5 | ||
|
|
e0f811222e | ||
|
|
7c020278a3 | ||
|
|
1e875fea3e | ||
|
|
e150626612 | ||
|
|
48ba14c89f | ||
|
|
bfb054082f | ||
|
|
78bb854d0a | ||
|
|
7faebbcfc3 | ||
|
|
5372157ac4 | ||
|
|
0d64bd947f | ||
|
|
011f19f07e | ||
|
|
2764fe7d77 | ||
|
|
de646cc23a | ||
|
|
2191deb01f | ||
|
|
03a52317d6 | ||
|
|
4cfc662cbf | ||
|
|
e2d77fff9e | ||
|
|
1f003f5881 | ||
|
|
435c85ca8f | ||
|
|
bdeab91f5d | ||
|
|
ed1b23adc7 | ||
|
|
8bad441abd | ||
|
|
0293d24b9a | ||
|
|
e726a3cc61 | ||
|
|
f8a1cea693 | ||
|
|
b003e7d08b | ||
|
|
0f9f5dd454 | ||
|
|
a00ef23c85 | ||
|
|
8ace3bebdb | ||
|
|
86b4409064 | ||
|
|
40c3a44dc7 | ||
|
|
e44dd76b0a | ||
|
|
8f163fa155 | ||
|
|
8bd5b21e19 | ||
|
|
94e72cb5d2 | ||
|
|
27078124b4 | ||
|
|
71f253c2cb | ||
|
|
4e0322cc0f |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.6.0
|
||||
current_version = 1.6.2
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
27
.changes/1.6.1.md
Normal file
27
.changes/1.6.1.md
Normal file
@@ -0,0 +1,27 @@
|
||||
## dbt-core 1.6.1 - August 23, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
- Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension` ([#8453](https://github.com/dbt-labs/dbt-core/issues/8453))
|
||||
|
||||
### Docs
|
||||
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- Update manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
- Check for existing_relation immediately prior to renaming ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
20
.changes/1.6.2.md
Normal file
20
.changes/1.6.2.md
Normal file
@@ -0,0 +1,20 @@
|
||||
## dbt-core 1.6.2 - September 07, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Accept a `dbt-cloud` config in dbt_project.yml ([#8438](https://github.com/dbt-labs/dbt-core/issues/8438))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- fix ambiguous reference error for tests and versions when model name is duplicated across packages ([#8327](https://github.com/dbt-labs/dbt-core/issues/8327), [#8493](https://github.com/dbt-labs/dbt-core/issues/8493))
|
||||
- Fix "Internal Error: Expected node <unique-id> not found in manifest" when depends_on set on ModelNodeArgs ([#8506](https://github.com/dbt-labs/dbt-core/issues/8506))
|
||||
- Fix snapshot success message ([#7583](https://github.com/dbt-labs/dbt-core/issues/7583))
|
||||
- Parse the correct schema version from manifest ([#8544](https://github.com/dbt-labs/dbt-core/issues/8544))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
6
.changes/unreleased/Features-20230823-140407.yaml
Normal file
6
.changes/unreleased/Features-20230823-140407.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: Add --no-inject-ephemeral-ctes flag for `compile` command, for usage by linting.
|
||||
time: 2023-08-23T14:04:07.617476-04:00
|
||||
custom:
|
||||
Author: benmosher
|
||||
Issue: "8480"
|
||||
6
.changes/unreleased/Fixes-20230803-093502.yaml
Normal file
6
.changes/unreleased/Fixes-20230803-093502.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add explicit support for integers for the show command
|
||||
time: 2023-08-03T09:35:02.163968-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "8153"
|
||||
6
.changes/unreleased/Fixes-20230906-142213.yaml
Normal file
6
.changes/unreleased/Fixes-20230906-142213.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: make version comparison insensitive to order
|
||||
time: 2023-09-06T14:22:13.114549-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8571"
|
||||
6
.changes/unreleased/Under the Hood-20230913-141651.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230913-141651.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Fix test_numeric_values to look for more specific strings
|
||||
time: 2023-09-13T14:16:51.453247-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8470"
|
||||
74
.github/workflows/main.yml
vendored
74
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -107,23 +112,55 @@ jobs:
|
||||
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
|
||||
path: unit_results.csv
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
@@ -167,6 +204,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -187,6 +226,23 @@ jobs:
|
||||
name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }}.csv
|
||||
path: integration_results.csv
|
||||
|
||||
integration-report:
|
||||
if: ${{ always() }}
|
||||
name: Integration Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "Integration Tests Failed"
|
||||
if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }}
|
||||
# when this is true the next step won't execute
|
||||
run: |
|
||||
echo "::notice title='Integration test suite failed'"
|
||||
exit 1
|
||||
|
||||
- name: "Integration Tests Passed"
|
||||
run: |
|
||||
echo "::notice title='Integration test suite passed'"
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
51
CHANGELOG.md
51
CHANGELOG.md
@@ -5,6 +5,56 @@
|
||||
- "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version.
|
||||
- Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry)
|
||||
|
||||
## dbt-core 1.6.2 - September 07, 2023
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Removed the FirstRunResultError and AfterFirstRunResultError event types, using the existing RunResultError in their place. ([#7963](https://github.com/dbt-labs/dbt-core/issues/7963))
|
||||
|
||||
### Features
|
||||
|
||||
- Accept a `dbt-cloud` config in dbt_project.yml ([#8438](https://github.com/dbt-labs/dbt-core/issues/8438))
|
||||
|
||||
### Fixes
|
||||
|
||||
- Copy dir during `dbt deps` if symlink fails ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
- fix ambiguous reference error for tests and versions when model name is duplicated across packages ([#8327](https://github.com/dbt-labs/dbt-core/issues/8327), [#8493](https://github.com/dbt-labs/dbt-core/issues/8493))
|
||||
- Fix "Internal Error: Expected node <unique-id> not found in manifest" when depends_on set on ModelNodeArgs ([#8506](https://github.com/dbt-labs/dbt-core/issues/8506))
|
||||
- Fix snapshot success message ([#7583](https://github.com/dbt-labs/dbt-core/issues/7583))
|
||||
- Parse the correct schema version from manifest ([#8544](https://github.com/dbt-labs/dbt-core/issues/8544))
|
||||
|
||||
### Contributors
|
||||
- [@anjutiwari](https://github.com/anjutiwari) ([#7428](https://github.com/dbt-labs/dbt-core/issues/7428), [#8223](https://github.com/dbt-labs/dbt-core/issues/8223))
|
||||
|
||||
|
||||
## dbt-core 1.6.1 - August 23, 2023
|
||||
|
||||
### Fixes
|
||||
|
||||
- Add status to Parse Inline Error ([#8173](https://github.com/dbt-labs/dbt-core/issues/8173))
|
||||
- Fix retry not working with log-file-max-bytes ([#8297](https://github.com/dbt-labs/dbt-core/issues/8297))
|
||||
- Detect changes to model access, version, or latest_version in state:modified ([#8189](https://github.com/dbt-labs/dbt-core/issues/8189))
|
||||
- fix fqn-selection for external versioned models ([#8374](https://github.com/dbt-labs/dbt-core/issues/8374))
|
||||
- Fix: DbtInternalError after model that previously ref'd external model is deleted ([#8375](https://github.com/dbt-labs/dbt-core/issues/8375))
|
||||
- Fix using list command with path selector and project-dir ([#8385](https://github.com/dbt-labs/dbt-core/issues/8385))
|
||||
- Remedy performance regression by only writing run_results.json once. ([#8360](https://github.com/dbt-labs/dbt-core/issues/8360))
|
||||
- Ensure parsing does not break when `window_groupings` is not specified for `non_additive_dimension` ([#8453](https://github.com/dbt-labs/dbt-core/issues/8453))
|
||||
|
||||
### Docs
|
||||
|
||||
- Display contract and column constraints on the model page ([dbt-docs/#433](https://github.com/dbt-labs/dbt-docs/issues/433))
|
||||
- Display semantic model details in docs ([dbt-docs/#431](https://github.com/dbt-labs/dbt-docs/issues/431))
|
||||
|
||||
### Under the Hood
|
||||
|
||||
- Refactor flaky test pp_versioned_models ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
- format exception from dbtPlugin.initialize ([#8152](https://github.com/dbt-labs/dbt-core/issues/8152))
|
||||
- Update manifest v10 ([#8333](https://github.com/dbt-labs/dbt-core/issues/8333))
|
||||
- add tracking for plugin.get_nodes calls ([#8344](https://github.com/dbt-labs/dbt-core/issues/8344))
|
||||
- add internal flag: --no-partial-parse-file-diff to inform whether to compute a file diff during partial parsing ([#8363](https://github.com/dbt-labs/dbt-core/issues/8363))
|
||||
- Use python version 3.10.7 in Docker image. ([#8444](https://github.com/dbt-labs/dbt-core/issues/8444))
|
||||
- Check for existing_relation immediately prior to renaming ([#7781](https://github.com/dbt-labs/dbt-core/issues/7781))
|
||||
|
||||
## dbt-core 1.6.0 - July 31, 2023
|
||||
|
||||
### Breaking Changes
|
||||
@@ -210,7 +260,6 @@
|
||||
- [@trouze](https://github.com/trouze) ([#7564](https://github.com/dbt-labs/dbt-core/issues/7564))
|
||||
- [@willbryant](https://github.com/willbryant) ([#7350](https://github.com/dbt-labs/dbt-core/issues/7350))
|
||||
|
||||
|
||||
## Previous Releases
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
@@ -61,7 +61,6 @@ def args_to_context(args: List[str]) -> Context:
|
||||
if len(args) == 1 and "," in args[0]:
|
||||
args = args[0].split(",")
|
||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||
|
||||
# Handle source and docs group.
|
||||
if isinstance(sub_command, Group):
|
||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||
@@ -319,7 +318,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
for k, v in args_dict.items():
|
||||
k = k.lower()
|
||||
|
||||
# if a "which" value exists in the args dict, it should match the command provided
|
||||
if k == WHICH_KEY:
|
||||
if v != command.value:
|
||||
@@ -344,7 +342,8 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
||||
|
||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||
add_fn(v)
|
||||
elif v in (None, False):
|
||||
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||
elif v is None or v is False:
|
||||
add_fn(f"--no-{spinal_cased}")
|
||||
elif v is True:
|
||||
add_fn(f"--{spinal_cased}")
|
||||
|
||||
@@ -141,6 +141,7 @@ class dbtRunner:
|
||||
@p.macro_debugging
|
||||
@p.partial_parse
|
||||
@p.partial_parse_file_path
|
||||
@p.partial_parse_file_diff
|
||||
@p.populate_cache
|
||||
@p.print
|
||||
@p.printer_width
|
||||
@@ -329,6 +330,7 @@ def docs_serve(ctx, **kwargs):
|
||||
@p.state
|
||||
@p.defer_state
|
||||
@p.deprecated_state
|
||||
@p.compile_inject_ephemeral_ctes
|
||||
@p.target
|
||||
@p.target_path
|
||||
@p.threads
|
||||
|
||||
@@ -40,6 +40,14 @@ compile_docs = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
compile_inject_ephemeral_ctes = click.option(
|
||||
"--inject-ephemeral-ctes/--no-inject-ephemeral-ctes",
|
||||
envvar=None,
|
||||
help="Internal flag controlling injection of referenced ephemeral models' CTEs during `compile`.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
config_dir = click.option(
|
||||
"--config-dir",
|
||||
envvar=None,
|
||||
@@ -257,6 +265,14 @@ partial_parse_file_path = click.option(
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
)
|
||||
|
||||
partial_parse_file_diff = click.option(
|
||||
"--partial-parse-file-diff/--no-partial-parse-file-diff",
|
||||
envvar="DBT_PARTIAL_PARSE_FILE_DIFF",
|
||||
help="Internal flag for whether to compute a file diff during partial parsing.",
|
||||
hidden=True,
|
||||
default=True,
|
||||
)
|
||||
|
||||
populate_cache = click.option(
|
||||
"--populate-cache/--no-populate-cache",
|
||||
envvar="DBT_POPULATE_CACHE",
|
||||
|
||||
@@ -9,10 +9,23 @@ from typing import Iterable, List, Dict, Union, Optional, Any
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
|
||||
|
||||
BOM = BOM_UTF8.decode("utf-8") # '\ufeff'
|
||||
|
||||
|
||||
class Integer(agate.data_types.DataType):
|
||||
def cast(self, d):
|
||||
# by default agate will cast none as a Number
|
||||
# but we need to cast it as an Integer to preserve
|
||||
# the type when merging and unioning tables
|
||||
if type(d) == int or d is None:
|
||||
return d
|
||||
else:
|
||||
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
|
||||
|
||||
def jsonify(self, d):
|
||||
return d
|
||||
|
||||
|
||||
class Number(agate.data_types.Number):
|
||||
# undo the change in https://github.com/wireservice/agate/pull/733
|
||||
# i.e. do not cast True and False to numeric 1 and 0
|
||||
@@ -48,6 +61,7 @@ def build_type_tester(
|
||||
) -> agate.TypeTester:
|
||||
|
||||
types = [
|
||||
Integer(null_values=("null", "")),
|
||||
Number(null_values=("null", "")),
|
||||
agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
|
||||
agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
|
||||
@@ -166,6 +180,13 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
elif isinstance(value, _NullMarker):
|
||||
# use the existing value
|
||||
return
|
||||
# when one table column is Number while another is Integer, force the column to Number on merge
|
||||
elif isinstance(value, Integer) and isinstance(existing_type, agate.data_types.Number):
|
||||
# use the existing value
|
||||
return
|
||||
elif isinstance(existing_type, Integer) and isinstance(value, agate.data_types.Number):
|
||||
# overwrite
|
||||
super().__setitem__(key, value)
|
||||
elif not isinstance(value, type(existing_type)):
|
||||
# actual type mismatch!
|
||||
raise DbtRuntimeError(
|
||||
@@ -177,8 +198,9 @@ class ColumnTypeBuilder(Dict[str, NullableAgateType]):
|
||||
result: Dict[str, agate.data_types.DataType] = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, _NullMarker):
|
||||
# this is what agate would do.
|
||||
result[key] = agate.data_types.Number()
|
||||
# agate would make it a Number but we'll make it Integer so that if this column
|
||||
# gets merged with another Integer column, it won't get forced to a Number
|
||||
result[key] = Integer()
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
@@ -320,6 +320,10 @@ class Compiler:
|
||||
if model.compiled_code is None:
|
||||
raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model)
|
||||
|
||||
# tech debt: safe flag/arg access (#6259)
|
||||
if not getattr(self.config.args, "inject_ephemeral_ctes", True):
|
||||
return (model, [])
|
||||
|
||||
# extra_ctes_injected flag says that we've already recursively injected the ctes
|
||||
if model.extra_ctes_injected:
|
||||
return (model, model.extra_ctes)
|
||||
|
||||
@@ -428,6 +428,7 @@ class PartialProject(RenderComponents):
|
||||
metrics: Dict[str, Any]
|
||||
exposures: Dict[str, Any]
|
||||
vars_value: VarProvider
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
dispatch = cfg.dispatch
|
||||
models = cfg.models
|
||||
@@ -459,6 +460,8 @@ class PartialProject(RenderComponents):
|
||||
manifest_selectors = SelectorDict.parse_from_selectors_list(
|
||||
rendered.selectors_dict["selectors"]
|
||||
)
|
||||
dbt_cloud = cfg.dbt_cloud
|
||||
|
||||
project = Project(
|
||||
project_name=name,
|
||||
version=version,
|
||||
@@ -498,6 +501,7 @@ class PartialProject(RenderComponents):
|
||||
unrendered=unrendered,
|
||||
project_env_vars=project_env_vars,
|
||||
restrict_access=cfg.restrict_access,
|
||||
dbt_cloud=dbt_cloud,
|
||||
)
|
||||
# sanity check - this means an internal issue
|
||||
project.validate()
|
||||
@@ -609,6 +613,7 @@ class Project:
|
||||
unrendered: RenderComponents
|
||||
project_env_vars: Dict[str, Any]
|
||||
restrict_access: bool
|
||||
dbt_cloud: Dict[str, Any]
|
||||
|
||||
@property
|
||||
def all_source_paths(self) -> List[str]:
|
||||
@@ -678,6 +683,7 @@ class Project:
|
||||
"require-dbt-version": [v.to_version_string() for v in self.dbt_version],
|
||||
"config-version": self.config_version,
|
||||
"restrict-access": self.restrict_access,
|
||||
"dbt-cloud": self.dbt_cloud,
|
||||
}
|
||||
)
|
||||
if self.query_comment:
|
||||
|
||||
@@ -182,6 +182,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
||||
args=args,
|
||||
cli_vars=cli_vars,
|
||||
dependencies=dependencies,
|
||||
dbt_cloud=project.dbt_cloud,
|
||||
)
|
||||
|
||||
# Called by 'load_projects' in this class
|
||||
|
||||
@@ -1510,7 +1510,15 @@ def get_manifest_schema_version(dct: dict) -> int:
|
||||
schema_version = dct.get("metadata", {}).get("dbt_schema_version", None)
|
||||
if not schema_version:
|
||||
raise ValueError("Manifest doesn't have schema version")
|
||||
return int(schema_version.split(".")[-2][-1])
|
||||
|
||||
# schema_version is in this format: https://schemas.getdbt.com/dbt/manifest/v10.json
|
||||
# What the code below is doing:
|
||||
# 1. Split on "/" – v10.json
|
||||
# 2. Split on "." – v10
|
||||
# 3. Skip first character – 10
|
||||
# 4. Convert to int
|
||||
# TODO: If this gets more complicated, turn into a regex
|
||||
return int(schema_version.split("/")[-1].split(".")[0][1:])
|
||||
|
||||
|
||||
def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]):
|
||||
|
||||
@@ -29,3 +29,11 @@ class ModelNodeArgs:
|
||||
unique_id = f"{unique_id}.v{self.version}"
|
||||
|
||||
return unique_id
|
||||
|
||||
@property
|
||||
def fqn(self) -> List[str]:
|
||||
fqn = [self.package_name, self.name]
|
||||
if self.version:
|
||||
fqn.append(f"v{self.version}")
|
||||
|
||||
return fqn
|
||||
|
||||
@@ -590,7 +590,7 @@ class ModelNode(CompiledNode):
|
||||
name=args.name,
|
||||
package_name=args.package_name,
|
||||
unique_id=unique_id,
|
||||
fqn=[args.package_name, args.name],
|
||||
fqn=args.fqn,
|
||||
version=args.version,
|
||||
latest_version=args.latest_version,
|
||||
relation_name=args.relation_name,
|
||||
@@ -626,6 +626,18 @@ class ModelNode(CompiledNode):
|
||||
def materialization_enforces_constraints(self) -> bool:
|
||||
return self.config.materialized in ["table", "incremental"]
|
||||
|
||||
def same_contents(self, old, adapter_type) -> bool:
|
||||
return super().same_contents(old, adapter_type) and self.same_ref_representation(old)
|
||||
|
||||
def same_ref_representation(self, old) -> bool:
|
||||
return (
|
||||
# Changing the latest_version may break downstream unpinned refs
|
||||
self.latest_version == old.latest_version
|
||||
# Changes to access or deprecation_date may lead to ref-related parsing errors
|
||||
and self.access == old.access
|
||||
and self.deprecation_date == old.deprecation_date
|
||||
)
|
||||
|
||||
def build_contract_checksum(self):
|
||||
# We don't need to construct the checksum if the model does not
|
||||
# have contract enforced, because it won't be used.
|
||||
|
||||
@@ -163,14 +163,9 @@ class UnparsedVersion(dbtClassMixin):
|
||||
|
||||
def __lt__(self, other):
|
||||
try:
|
||||
v = type(other.v)(self.v)
|
||||
return v < other.v
|
||||
return float(self.v) < float(other.v)
|
||||
except ValueError:
|
||||
try:
|
||||
other_v = type(self.v)(other.v)
|
||||
return self.v < other_v
|
||||
except ValueError:
|
||||
return str(self.v) < str(other.v)
|
||||
return str(self.v) < str(other.v)
|
||||
|
||||
@property
|
||||
def include_exclude(self) -> dbt.helper_types.IncludeExclude:
|
||||
@@ -689,7 +684,7 @@ class UnparsedEntity(dbtClassMixin):
|
||||
class UnparsedNonAdditiveDimension(dbtClassMixin):
|
||||
name: str
|
||||
window_choice: str # AggregationType enum
|
||||
window_groupings: List[str]
|
||||
window_groupings: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -224,6 +224,7 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
packages: List[PackageSpec] = field(default_factory=list)
|
||||
query_comment: Optional[Union[QueryComment, NoValue, str]] = field(default_factory=NoValue)
|
||||
restrict_access: bool = False
|
||||
dbt_cloud: Optional[Dict[str, Any]] = None
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
@@ -240,6 +241,10 @@ class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
or not isinstance(entry["search_order"], list)
|
||||
):
|
||||
raise ValidationError(f"Invalid project dispatch config: {entry}")
|
||||
if "dbt_cloud" in data and not isinstance(data["dbt_cloud"], dict):
|
||||
raise ValidationError(
|
||||
f"Invalid dbt_cloud config. Expected a 'dict' but got '{type(data['dbt_cloud'])}'"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -51,19 +51,15 @@ class LocalPinnedPackage(LocalPackageMixin, PinnedPackage):
|
||||
src_path = self.resolve_path(project)
|
||||
dest_path = self.get_installation_path(project, renderer)
|
||||
|
||||
can_create_symlink = system.supports_symlinks()
|
||||
|
||||
if system.path_exists(dest_path):
|
||||
if not system.path_is_symlink(dest_path):
|
||||
system.rmdir(dest_path)
|
||||
else:
|
||||
system.remove_file(dest_path)
|
||||
|
||||
if can_create_symlink:
|
||||
try:
|
||||
fire_event(DepsCreatingLocalSymlink())
|
||||
system.make_symlink(src_path, dest_path)
|
||||
|
||||
else:
|
||||
except OSError:
|
||||
fire_event(DepsSymlinkNotAvailable())
|
||||
shutil.copytree(src_path, dest_path)
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import threading
|
||||
import traceback
|
||||
from typing import Any, Callable, List, Optional, TextIO
|
||||
from typing import Any, Callable, List, Optional, TextIO, Protocol
|
||||
from uuid import uuid4
|
||||
from dbt.events.format import timestamp_to_datetime_string
|
||||
|
||||
@@ -206,7 +206,7 @@ class EventManager:
|
||||
for callback in self.callbacks:
|
||||
callback(msg)
|
||||
|
||||
def add_logger(self, config: LoggerConfig):
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
logger = (
|
||||
_JsonLogger(self, config)
|
||||
if config.line_format == LineFormat.Json
|
||||
@@ -218,3 +218,25 @@ class EventManager:
|
||||
def flush(self):
|
||||
for logger in self.loggers:
|
||||
logger.flush()
|
||||
|
||||
|
||||
class IEventManager(Protocol):
|
||||
callbacks: List[Callable[[EventMsg], None]]
|
||||
invocation_id: str
|
||||
|
||||
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
...
|
||||
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
...
|
||||
|
||||
|
||||
class TestEventManager(IEventManager):
|
||||
def __init__(self):
|
||||
self.event_history = []
|
||||
|
||||
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||
self.event_history.append((e, level))
|
||||
|
||||
def add_logger(self, config: LoggerConfig) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from dbt.constants import METADATA_ENV_PREFIX
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, EventMsg
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter
|
||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter, IEventManager
|
||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||
from dbt.events.types import Formatting, Note
|
||||
from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER
|
||||
@@ -182,7 +182,7 @@ def cleanup_event_logger():
|
||||
# Since dbt-rpc does not do its own log setup, and since some events can
|
||||
# currently fire before logs can be configured by setup_event_logger(), we
|
||||
# create a default configuration with default settings and no file output.
|
||||
EVENT_MANAGER: EventManager = EventManager()
|
||||
EVENT_MANAGER: IEventManager = EventManager()
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logbook_log_config(False, True, False, False) # type: ignore
|
||||
if ENABLE_LEGACY_LOGGER
|
||||
@@ -295,3 +295,8 @@ def set_invocation_id() -> None:
|
||||
# This is primarily for setting the invocation_id for separate
|
||||
# commands in the dbt servers. It shouldn't be necessary for the CLI.
|
||||
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
|
||||
|
||||
|
||||
def ctx_set_event_manager(event_manager: IEventManager):
|
||||
global EVENT_MANAGER
|
||||
EVENT_MANAGER = event_manager
|
||||
|
||||
@@ -1650,6 +1650,7 @@ message LogSnapshotResult {
|
||||
int32 total = 5;
|
||||
float execution_time = 6;
|
||||
map<string, string> cfg = 7;
|
||||
string result_message = 8;
|
||||
}
|
||||
|
||||
message LogSnapshotResultMsg {
|
||||
@@ -2245,25 +2246,7 @@ message CheckNodeTestFailureMsg {
|
||||
CheckNodeTestFailure data = 2;
|
||||
}
|
||||
|
||||
// Z028
|
||||
message FirstRunResultError {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message FirstRunResultErrorMsg {
|
||||
EventInfo info = 1;
|
||||
FirstRunResultError data = 2;
|
||||
}
|
||||
|
||||
// Z029
|
||||
message AfterFirstRunResultError {
|
||||
string msg = 1;
|
||||
}
|
||||
|
||||
message AfterFirstRunResultErrorMsg {
|
||||
EventInfo info = 1;
|
||||
AfterFirstRunResultError data = 2;
|
||||
}
|
||||
// Skipped Z028, Z029
|
||||
|
||||
// Z030
|
||||
message EndOfRunSummary {
|
||||
|
||||
@@ -1614,7 +1614,7 @@ class LogSnapshotResult(DynamicLevel):
|
||||
status = red(self.status.upper())
|
||||
else:
|
||||
info = "OK snapshotted"
|
||||
status = green(self.status)
|
||||
status = green(self.result_message)
|
||||
|
||||
msg = "{info} {description}".format(info=info, description=self.description, **self.cfg)
|
||||
return format_fancy_output_line(
|
||||
@@ -2171,25 +2171,7 @@ class CheckNodeTestFailure(InfoLevel):
|
||||
return f" See test failures:\n {border}\n {msg}\n {border}"
|
||||
|
||||
|
||||
# FirstRunResultError and AfterFirstRunResultError are just splitting the message from the result
|
||||
# object into multiple log lines
|
||||
# TODO: is this reallly needed? See printer.py
|
||||
|
||||
|
||||
class FirstRunResultError(ErrorLevel):
|
||||
def code(self):
|
||||
return "Z028"
|
||||
|
||||
def message(self) -> str:
|
||||
return yellow(self.msg)
|
||||
|
||||
|
||||
class AfterFirstRunResultError(ErrorLevel):
|
||||
def code(self):
|
||||
return "Z029"
|
||||
|
||||
def message(self) -> str:
|
||||
return self.msg
|
||||
# Skipped Z028, Z029
|
||||
|
||||
|
||||
class EndOfRunSummary(InfoLevel):
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -33,7 +33,12 @@
|
||||
|
||||
-- cleanup
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
@@ -45,7 +45,12 @@
|
||||
-- cleanup
|
||||
-- move the existing view out of the way
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -4,8 +4,8 @@ from dbt.dataclass_schema import StrEnum
|
||||
|
||||
|
||||
class AccessType(StrEnum):
|
||||
Protected = "protected"
|
||||
Private = "private"
|
||||
Protected = "protected"
|
||||
Public = "public"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -122,7 +122,7 @@ from dbt.parser.sources import SourcePatcher
|
||||
from dbt.version import __version__
|
||||
|
||||
from dbt.dataclass_schema import StrEnum, dbtClassMixin
|
||||
from dbt.plugins import get_plugin_manager
|
||||
from dbt import plugins
|
||||
|
||||
from dbt_semantic_interfaces.enum_extension import assert_values_exhausted
|
||||
from dbt_semantic_interfaces.type_enums import MetricType
|
||||
@@ -284,8 +284,17 @@ class ManifestLoader:
|
||||
adapter.clear_macro_manifest()
|
||||
macro_hook = adapter.connections.set_query_header
|
||||
|
||||
flags = get_flags()
|
||||
if not flags.PARTIAL_PARSE_FILE_DIFF:
|
||||
file_diff = FileDiff.from_dict(
|
||||
{
|
||||
"deleted": [],
|
||||
"changed": [],
|
||||
"added": [],
|
||||
}
|
||||
)
|
||||
# Hack to test file_diffs
|
||||
if os.environ.get("DBT_PP_FILE_DIFF_TEST"):
|
||||
elif os.environ.get("DBT_PP_FILE_DIFF_TEST"):
|
||||
file_diff_path = "file_diff.json"
|
||||
if path_exists(file_diff_path):
|
||||
file_diff_dct = read_json(file_diff_path)
|
||||
@@ -503,6 +512,7 @@ class ManifestLoader:
|
||||
self.manifest.selectors = self.root_project.manifest_selectors
|
||||
|
||||
# inject any available external nodes
|
||||
self.manifest.build_parent_and_child_maps()
|
||||
external_nodes_modified = self.inject_external_nodes()
|
||||
if external_nodes_modified:
|
||||
self.manifest.rebuild_ref_lookup()
|
||||
@@ -547,7 +557,7 @@ class ManifestLoader:
|
||||
)
|
||||
# parent and child maps will be rebuilt by write_manifest
|
||||
|
||||
if not skip_parsing:
|
||||
if not skip_parsing or external_nodes_modified:
|
||||
# write out the fully parsed manifest
|
||||
self.write_manifest_for_partial_parse()
|
||||
|
||||
@@ -745,13 +755,16 @@ class ManifestLoader:
|
||||
def inject_external_nodes(self) -> bool:
|
||||
# Remove previously existing external nodes since we are regenerating them
|
||||
manifest_nodes_modified = False
|
||||
# Remove all dependent nodes before removing referencing nodes
|
||||
for unique_id in self.manifest.external_node_unique_ids:
|
||||
self.manifest.nodes.pop(unique_id)
|
||||
remove_dependent_project_references(self.manifest, unique_id)
|
||||
manifest_nodes_modified = True
|
||||
for unique_id in self.manifest.external_node_unique_ids:
|
||||
# remove external nodes from manifest only after dependent project references safely removed
|
||||
self.manifest.nodes.pop(unique_id)
|
||||
|
||||
# Inject any newly-available external nodes
|
||||
pm = get_plugin_manager(self.root_project.project_name)
|
||||
pm = plugins.get_plugin_manager(self.root_project.project_name)
|
||||
plugin_model_nodes = pm.get_nodes().models
|
||||
for node_arg in plugin_model_nodes.values():
|
||||
node = ModelNode.from_args(node_arg)
|
||||
|
||||
@@ -233,7 +233,7 @@ class SchemaGenericTestParser(SimpleParser):
|
||||
attached_node = None # type: Optional[Union[ManifestNode, GraphMemberNode]]
|
||||
if not isinstance(target, UnpatchedSourceDefinition):
|
||||
attached_node_unique_id = self.manifest.ref_lookup.get_unique_id(
|
||||
target.name, None, version
|
||||
target.name, target.package_name, version
|
||||
)
|
||||
if attached_node_unique_id:
|
||||
attached_node = self.manifest.nodes[attached_node_unique_id]
|
||||
|
||||
@@ -693,7 +693,7 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
||||
)
|
||||
# ref lookup without version - version is not set yet
|
||||
versioned_model_unique_id = self.manifest.ref_lookup.get_unique_id(
|
||||
versioned_model_name, None, None
|
||||
versioned_model_name, target.package_name, None
|
||||
)
|
||||
|
||||
versioned_model_node = None
|
||||
@@ -702,7 +702,7 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
|
||||
# If this is the latest version, it's allowed to define itself in a model file name that doesn't have a suffix
|
||||
if versioned_model_unique_id is None and unparsed_version.v == latest_version:
|
||||
versioned_model_unique_id = self.manifest.ref_lookup.get_unique_id(
|
||||
block.name, None, None
|
||||
block.name, target.package_name, None
|
||||
)
|
||||
|
||||
if versioned_model_unique_id is None:
|
||||
|
||||
@@ -6,6 +6,7 @@ from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.plugins.contracts import PluginArtifacts
|
||||
from dbt.plugins.manifest import PluginNodes
|
||||
import dbt.tracking
|
||||
|
||||
|
||||
def dbt_hook(func):
|
||||
@@ -29,8 +30,11 @@ class dbtPlugin:
|
||||
self.project_name = project_name
|
||||
try:
|
||||
self.initialize()
|
||||
except DbtRuntimeError as e:
|
||||
# Remove the first line of DbtRuntimeError to avoid redundant "Runtime Error" line
|
||||
raise DbtRuntimeError("\n".join(str(e).split("\n")[1:]))
|
||||
except Exception as e:
|
||||
raise DbtRuntimeError(f"initialize: {e}")
|
||||
raise DbtRuntimeError(str(e))
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -116,5 +120,14 @@ class PluginManager:
|
||||
all_plugin_nodes = PluginNodes()
|
||||
for hook_method in self.hooks.get("get_nodes", []):
|
||||
plugin_nodes = hook_method()
|
||||
dbt.tracking.track_plugin_get_nodes(
|
||||
{
|
||||
"plugin_name": hook_method.__self__.name, # type: ignore
|
||||
"num_model_nodes": len(plugin_nodes.models),
|
||||
"num_model_packages": len(
|
||||
{model.package_name for model in plugin_nodes.models.values()}
|
||||
),
|
||||
}
|
||||
)
|
||||
all_plugin_nodes.update(plugin_nodes)
|
||||
return all_plugin_nodes
|
||||
|
||||
@@ -139,6 +139,7 @@ class CompileTask(GraphRunnableTask):
|
||||
"node_path": "sql/inline_query",
|
||||
"node_name": "inline_query",
|
||||
"unique_id": "sqloperation.test.inline_query",
|
||||
"node_status": "failed",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@@ -15,6 +15,7 @@ from dbt.events.types import (
|
||||
ListCmdOut,
|
||||
)
|
||||
from dbt.exceptions import DbtRuntimeError, DbtInternalError
|
||||
from dbt.events.contextvars import task_contextvars
|
||||
|
||||
|
||||
class ListTask(GraphRunnableTask):
|
||||
@@ -123,20 +124,23 @@ class ListTask(GraphRunnableTask):
|
||||
yield node.original_file_path
|
||||
|
||||
def run(self):
|
||||
self.compile_manifest()
|
||||
output = self.args.output
|
||||
if output == "selector":
|
||||
generator = self.generate_selectors
|
||||
elif output == "name":
|
||||
generator = self.generate_names
|
||||
elif output == "json":
|
||||
generator = self.generate_json
|
||||
elif output == "path":
|
||||
generator = self.generate_paths
|
||||
else:
|
||||
raise DbtInternalError("Invalid output {}".format(output))
|
||||
# We set up a context manager here with "task_contextvars" because we
|
||||
# we need the project_root in compile_manifest.
|
||||
with task_contextvars(project_root=self.config.project_root):
|
||||
self.compile_manifest()
|
||||
output = self.args.output
|
||||
if output == "selector":
|
||||
generator = self.generate_selectors
|
||||
elif output == "name":
|
||||
generator = self.generate_names
|
||||
elif output == "json":
|
||||
generator = self.generate_json
|
||||
elif output == "path":
|
||||
generator = self.generate_paths
|
||||
else:
|
||||
raise DbtInternalError("Invalid output {}".format(output))
|
||||
|
||||
return self.output_results(generator())
|
||||
return self.output_results(generator())
|
||||
|
||||
def output_results(self, results):
|
||||
"""Log, or output a plain, newline-delimited, and ready-to-pipe list of nodes found."""
|
||||
|
||||
@@ -14,8 +14,6 @@ from dbt.events.types import (
|
||||
RunResultErrorNoMessage,
|
||||
SQLCompiledPath,
|
||||
CheckNodeTestFailure,
|
||||
FirstRunResultError,
|
||||
AfterFirstRunResultError,
|
||||
EndOfRunSummary,
|
||||
)
|
||||
|
||||
@@ -118,15 +116,7 @@ def print_run_result_error(result, newline: bool = True, is_warning: bool = Fals
|
||||
fire_event(CheckNodeTestFailure(relation_name=result.node.relation_name))
|
||||
|
||||
elif result.message is not None:
|
||||
first = True
|
||||
for line in result.message.split("\n"):
|
||||
# TODO: why do we format like this? Is there a reason this needs to
|
||||
# be split instead of sending it as a single log line?
|
||||
if first:
|
||||
fire_event(FirstRunResultError(msg=line))
|
||||
first = False
|
||||
else:
|
||||
fire_event(AfterFirstRunResultError(msg=line))
|
||||
fire_event(RunResultError(msg=result.message))
|
||||
|
||||
|
||||
def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None:
|
||||
|
||||
@@ -313,15 +313,6 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
cause = None
|
||||
self._mark_dependent_errors(node.unique_id, result, cause)
|
||||
|
||||
interim_run_result = self.get_result(
|
||||
results=self.node_results,
|
||||
elapsed_time=time.time() - self.started_at,
|
||||
generated_at=datetime.utcnow(),
|
||||
)
|
||||
|
||||
if self.args.write_json and hasattr(interim_run_result, "write"):
|
||||
interim_run_result.write(self.result_path())
|
||||
|
||||
def _cancel_connections(self, pool):
|
||||
"""Given a pool, cancel all adapter connections and wait until all
|
||||
runners gentle terminates.
|
||||
@@ -378,8 +369,18 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
# ensure information about all nodes is propagated to run results when failing fast
|
||||
return self.node_results
|
||||
except KeyboardInterrupt:
|
||||
run_result = self.get_result(
|
||||
results=self.node_results,
|
||||
elapsed_time=time.time() - self.started_at,
|
||||
generated_at=datetime.utcnow(),
|
||||
)
|
||||
|
||||
if self.args.write_json and hasattr(run_result, "write"):
|
||||
run_result.write(self.result_path())
|
||||
|
||||
self._cancel_connections(pool)
|
||||
print_run_end_messages(self.node_results, keyboard_interrupt=True)
|
||||
|
||||
raise
|
||||
|
||||
pool.close()
|
||||
@@ -443,7 +444,7 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
Run dbt for the query, based on the graph.
|
||||
"""
|
||||
# We set up a context manager here with "task_contextvars" because we
|
||||
# we need the project_root in runtime_initialize.
|
||||
# need the project_root in runtime_initialize.
|
||||
with task_contextvars(project_root=self.config.project_root):
|
||||
self._runtime_initialize()
|
||||
|
||||
@@ -584,7 +585,7 @@ class GraphRunnableTask(ConfiguredTask):
|
||||
create_futures.append(fut)
|
||||
|
||||
for create_future in as_completed(create_futures):
|
||||
# trigger/re-raise any excceptions while creating schemas
|
||||
# trigger/re-raise any exceptions while creating schemas
|
||||
create_future.result()
|
||||
|
||||
def get_result(self, results, elapsed_time, generated_at):
|
||||
|
||||
@@ -27,6 +27,7 @@ class SnapshotRunner(ModelRunner):
|
||||
total=self.num_nodes,
|
||||
execution_time=result.execution_time,
|
||||
node_info=model.node_info,
|
||||
result_message=result.message,
|
||||
),
|
||||
level=level,
|
||||
)
|
||||
|
||||
@@ -46,6 +46,7 @@ RESOURCE_COUNTS = "iglu:com.dbt/resource_counts/jsonschema/1-0-1"
|
||||
RPC_REQUEST_SPEC = "iglu:com.dbt/rpc_request/jsonschema/1-0-1"
|
||||
RUNNABLE_TIMING = "iglu:com.dbt/runnable/jsonschema/1-0-0"
|
||||
RUN_MODEL_SPEC = "iglu:com.dbt/run_model/jsonschema/1-0-3"
|
||||
PLUGIN_GET_NODES = "iglu:com.dbt/plugin_get_nodes/jsonschema/1-0-0"
|
||||
|
||||
|
||||
class TimeoutEmitter(Emitter):
|
||||
@@ -409,6 +410,19 @@ def track_partial_parser(options):
|
||||
)
|
||||
|
||||
|
||||
def track_plugin_get_nodes(options):
|
||||
context = [SelfDescribingJson(PLUGIN_GET_NODES, options)]
|
||||
assert active_user is not None, "Cannot track plugin node info when active user is None"
|
||||
|
||||
track(
|
||||
active_user,
|
||||
category="dbt",
|
||||
action="plugin_get_nodes",
|
||||
label=get_invocation_id(),
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
def track_runnable_timing(options):
|
||||
context = [SelfDescribingJson(RUNNABLE_TIMING, options)]
|
||||
assert active_user is not None, "Cannot track runnable info when active user is None"
|
||||
|
||||
@@ -232,5 +232,5 @@ def _get_adapter_plugin_names() -> Iterator[str]:
|
||||
yield plugin_name
|
||||
|
||||
|
||||
__version__ = "1.6.0"
|
||||
__version__ = "1.6.2"
|
||||
installed = get_installed_version()
|
||||
|
||||
@@ -25,7 +25,7 @@ with open(os.path.join(this_directory, "README.md")) as f:
|
||||
|
||||
|
||||
package_name = "dbt-core"
|
||||
package_version = "1.6.0"
|
||||
package_version = "1.6.2"
|
||||
description = """With dbt, data analysts and engineers can build analytics \
|
||||
the way engineers build applications."""
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ pytest-csv
|
||||
pytest-dotenv
|
||||
pytest-logbook
|
||||
pytest-mock
|
||||
pytest-split
|
||||
pytest-xdist
|
||||
sphinx
|
||||
tox>=3.13
|
||||
|
||||
@@ -9,17 +9,19 @@ ARG build_for=linux/amd64
|
||||
##
|
||||
# base image (abstract)
|
||||
##
|
||||
FROM --platform=$build_for python:3.11.2-slim-bullseye as base
|
||||
# Please do not upgrade beyond python3.10.7 currently as dbt-spark does not support
|
||||
# 3.11py and images do not get made properly
|
||||
FROM --platform=$build_for python:3.10.7-slim-bullseye as base
|
||||
|
||||
# N.B. The refs updated automagically every release via bumpversion
|
||||
# N.B. dbt-postgres is currently found in the core codebase so a value of dbt-core@<some_version> is correct
|
||||
|
||||
ARG dbt_core_ref=dbt-core@v1.6.0
|
||||
ARG dbt_postgres_ref=dbt-core@v1.6.0
|
||||
ARG dbt_redshift_ref=dbt-redshift@v1.6.0
|
||||
ARG dbt_bigquery_ref=dbt-bigquery@v1.6.0
|
||||
ARG dbt_snowflake_ref=dbt-snowflake@v1.6.0
|
||||
ARG dbt_spark_ref=dbt-spark@v1.6.0
|
||||
ARG dbt_core_ref=dbt-core@v1.6.2
|
||||
ARG dbt_postgres_ref=dbt-core@v1.6.2
|
||||
ARG dbt_redshift_ref=dbt-redshift@v1.6.2
|
||||
ARG dbt_bigquery_ref=dbt-bigquery@v1.6.2
|
||||
ARG dbt_snowflake_ref=dbt-snowflake@v1.6.2
|
||||
ARG dbt_spark_ref=dbt-spark@v1.6.2
|
||||
# special case args
|
||||
ARG dbt_spark_version=all
|
||||
ARG dbt_third_party
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "1.6.0"
|
||||
version = "1.6.2"
|
||||
|
||||
@@ -32,7 +32,10 @@ class PostgresCredentials(Credentials):
|
||||
sslkey: Optional[str] = None
|
||||
sslrootcert: Optional[str] = None
|
||||
application_name: Optional[str] = "dbt"
|
||||
endpoint: Optional[str] = None
|
||||
retries: int = 1
|
||||
options: Optional[str] = None
|
||||
# options: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
_ALIASES = {"dbname": "database", "pass": "password"}
|
||||
|
||||
@@ -130,6 +133,12 @@ class PostgresConnectionManager(SQLConnectionManager):
|
||||
if credentials.application_name:
|
||||
kwargs["application_name"] = credentials.application_name
|
||||
|
||||
if credentials.options:
|
||||
kwargs["options"] = credentials.options
|
||||
|
||||
if credentials.endpoint:
|
||||
kwargs["endpoint"] = credentials.endpoint
|
||||
|
||||
def connect():
|
||||
handle = psycopg2.connect(
|
||||
dbname=credentials.database,
|
||||
|
||||
@@ -41,7 +41,7 @@ def _dbt_psycopg2_name():
|
||||
|
||||
|
||||
package_name = "dbt-postgres"
|
||||
package_version = "1.6.0"
|
||||
package_version = "1.6.2"
|
||||
description = """The postgres adapter plugin for dbt (data build tool)"""
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -141,6 +141,9 @@
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/Metric"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/SemanticModel"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -212,7 +215,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], groups: Mapping[str, dbt.contracts.graph.nodes.Group], selectors: Mapping[str, Any], disabled: Union[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition, dbt.contracts.graph.nodes.Exposure, dbt.contracts.graph.nodes.Metric]]], NoneType], parent_map: Union[Dict[str, List[str]], NoneType], child_map: Union[Dict[str, List[str]], NoneType], group_map: Union[Dict[str, List[str]], NoneType], semantic_models: Mapping[str, dbt.contracts.graph.nodes.SemanticModel])",
|
||||
"description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], groups: Mapping[str, dbt.contracts.graph.nodes.Group], selectors: Mapping[str, Any], disabled: Union[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition, dbt.contracts.graph.nodes.Exposure, dbt.contracts.graph.nodes.Metric, dbt.contracts.graph.nodes.SemanticModel]]], NoneType], parent_map: Union[Dict[str, List[str]], NoneType], child_map: Union[Dict[str, List[str]], NoneType], group_map: Union[Dict[str, List[str]], NoneType], semantic_models: Mapping[str, dbt.contracts.graph.nodes.SemanticModel])",
|
||||
"definitions": {
|
||||
"ManifestMetadata": {
|
||||
"type": "object",
|
||||
@@ -224,12 +227,12 @@
|
||||
},
|
||||
"dbt_version": {
|
||||
"type": "string",
|
||||
"default": "1.6.0b4"
|
||||
"default": "1.6.0"
|
||||
},
|
||||
"generated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"default": "2023-06-15T20:32:38.802488Z"
|
||||
"default": "2023-08-07T20:10:03.381822Z"
|
||||
},
|
||||
"invocation_id": {
|
||||
"oneOf": [
|
||||
@@ -240,7 +243,7 @@
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": "fe95e4d0-61ff-487d-8293-092f543fcab2"
|
||||
"default": "03dee192-ff77-43cc-bc3f-5eeaf6d36344"
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
@@ -471,7 +474,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.804467
|
||||
"default": 1691439003.386713
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1184,7 +1187,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.805745
|
||||
"default": 1691439003.389955
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1572,7 +1575,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.806452
|
||||
"default": 1691439003.3916101
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -1848,7 +1851,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.807143
|
||||
"default": 1691439003.393298
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2001,10 +2004,10 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"state_relation": {
|
||||
"defer_relation": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/StateRelation"
|
||||
"$ref": "#/definitions/DeferRelation"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
@@ -2013,7 +2016,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "ModelNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = <factory>, access: dbt.node_types.AccessType = <AccessType.Protected: 'protected'>, constraints: List[dbt.contracts.graph.nodes.ModelLevelConstraint] = <factory>, version: Union[str, float, NoneType] = None, latest_version: Union[str, float, NoneType] = None, deprecation_date: Union[datetime.datetime, NoneType] = None, state_relation: Union[dbt.contracts.graph.nodes.StateRelation, NoneType] = None)"
|
||||
"description": "ModelNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = <factory>, access: dbt.node_types.AccessType = <AccessType.Protected: 'protected'>, constraints: List[dbt.contracts.graph.nodes.ModelLevelConstraint] = <factory>, version: Union[str, float, NoneType] = None, latest_version: Union[str, float, NoneType] = None, deprecation_date: Union[datetime.datetime, NoneType] = None, defer_relation: Union[dbt.contracts.graph.nodes.DeferRelation, NoneType] = None)"
|
||||
},
|
||||
"ModelLevelConstraint": {
|
||||
"type": "object",
|
||||
@@ -2071,16 +2074,13 @@
|
||||
"additionalProperties": false,
|
||||
"description": "ModelLevelConstraint(type: dbt.contracts.graph.nodes.ConstraintType, name: Union[str, NoneType] = None, expression: Union[str, NoneType] = None, warn_unenforced: bool = True, warn_unsupported: bool = True, columns: List[str] = <factory>)"
|
||||
},
|
||||
"StateRelation": {
|
||||
"DeferRelation": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"alias",
|
||||
"schema"
|
||||
"schema",
|
||||
"alias"
|
||||
],
|
||||
"properties": {
|
||||
"alias": {
|
||||
"type": "string"
|
||||
},
|
||||
"database": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -2093,10 +2093,23 @@
|
||||
},
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"alias": {
|
||||
"type": "string"
|
||||
},
|
||||
"relation_name": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "StateRelation(alias: str, database: Union[str, NoneType], schema: str)"
|
||||
"description": "DeferRelation(database: Union[str, NoneType], schema: str, alias: str, relation_name: Union[str, NoneType])"
|
||||
},
|
||||
"RPCNode": {
|
||||
"type": "object",
|
||||
@@ -2260,7 +2273,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.808148
|
||||
"default": 1691439003.39583
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2398,7 +2411,7 @@
|
||||
"resource_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"sqloperation"
|
||||
"sql_operation"
|
||||
]
|
||||
},
|
||||
"package_name": {
|
||||
@@ -2526,7 +2539,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.8088078
|
||||
"default": 1691439003.3974268
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -2784,7 +2797,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.8095539
|
||||
"default": 1691439003.399393
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -3079,7 +3092,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.810841
|
||||
"default": 1691439003.4026701
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -3179,10 +3192,10 @@
|
||||
"checksum": null
|
||||
}
|
||||
},
|
||||
"state_relation": {
|
||||
"defer_relation": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/StateRelation"
|
||||
"$ref": "#/definitions/DeferRelation"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
@@ -3191,7 +3204,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SnapshotNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = <factory>, state_relation: Union[dbt.contracts.graph.nodes.StateRelation, NoneType] = None)"
|
||||
"description": "SnapshotNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = <factory>, sources: List[List[str]] = <factory>, metrics: List[List[str]] = <factory>, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = <factory>, _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = <factory>, defer_relation: Union[dbt.contracts.graph.nodes.DeferRelation, NoneType] = None)"
|
||||
},
|
||||
"SnapshotConfig": {
|
||||
"type": "object",
|
||||
@@ -3586,7 +3599,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.812035
|
||||
"default": 1691439003.4056058
|
||||
},
|
||||
"config_call_dict": {
|
||||
"type": "object",
|
||||
@@ -3622,10 +3635,10 @@
|
||||
"macros": []
|
||||
}
|
||||
},
|
||||
"state_relation": {
|
||||
"defer_relation": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/StateRelation"
|
||||
"$ref": "#/definitions/DeferRelation"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
@@ -3634,7 +3647,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SeedNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Union[str, NoneType] = None, raw_code: str = '', root_path: Union[str, NoneType] = None, depends_on: dbt.contracts.graph.nodes.MacroDependsOn = <factory>, state_relation: Union[dbt.contracts.graph.nodes.StateRelation, NoneType] = None)"
|
||||
"description": "SeedNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = <factory>, _event_status: Dict[str, Any] = <factory>, tags: List[str] = <factory>, description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = <factory>, meta: Dict[str, Any] = <factory>, group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = <factory>, patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = <factory>, created_at: float = <factory>, config_call_dict: Dict[str, Any] = <factory>, relation_name: Union[str, NoneType] = None, raw_code: str = '', root_path: Union[str, NoneType] = None, depends_on: dbt.contracts.graph.nodes.MacroDependsOn = <factory>, defer_relation: Union[dbt.contracts.graph.nodes.DeferRelation, NoneType] = None)"
|
||||
},
|
||||
"SeedConfig": {
|
||||
"type": "object",
|
||||
@@ -4007,7 +4020,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.8133152
|
||||
"default": 1691439003.408927
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
@@ -4319,7 +4332,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.8135822
|
||||
"default": 1691439003.409885
|
||||
},
|
||||
"supported_languages": {
|
||||
"oneOf": [
|
||||
@@ -4559,7 +4572,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.814228
|
||||
"default": 1691439003.411563
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
@@ -4659,7 +4672,6 @@
|
||||
"enum": [
|
||||
"simple",
|
||||
"ratio",
|
||||
"expr",
|
||||
"cumulative",
|
||||
"derived"
|
||||
]
|
||||
@@ -4745,7 +4757,7 @@
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1686861158.815338
|
||||
"default": 1691439003.41419
|
||||
},
|
||||
"group": {
|
||||
"oneOf": [
|
||||
@@ -4775,23 +4787,17 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"measures": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/MetricInputMeasure"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"input_measures": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/MetricInputMeasure"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"numerator": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/MetricInputMeasure"
|
||||
"$ref": "#/definitions/MetricInput"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
@@ -4801,7 +4807,7 @@
|
||||
"denominator": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/MetricInputMeasure"
|
||||
"$ref": "#/definitions/MetricInput"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
@@ -4860,7 +4866,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "MetricTypeParams(measure: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, measures: Union[List[dbt.contracts.graph.nodes.MetricInputMeasure], NoneType] = None, numerator: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, denominator: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, expr: Union[str, NoneType] = None, window: Union[dbt.contracts.graph.nodes.MetricTimeWindow, NoneType] = None, grain_to_date: Union[dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity, NoneType] = None, metrics: Union[List[dbt.contracts.graph.nodes.MetricInput], NoneType] = None)"
|
||||
"description": "MetricTypeParams(measure: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, input_measures: List[dbt.contracts.graph.nodes.MetricInputMeasure] = <factory>, numerator: Union[dbt.contracts.graph.nodes.MetricInput, NoneType] = None, denominator: Union[dbt.contracts.graph.nodes.MetricInput, NoneType] = None, expr: Union[str, NoneType] = None, window: Union[dbt.contracts.graph.nodes.MetricTimeWindow, NoneType] = None, grain_to_date: Union[dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity, NoneType] = None, metrics: Union[List[dbt.contracts.graph.nodes.MetricInput], NoneType] = None)"
|
||||
},
|
||||
"MetricInputMeasure": {
|
||||
"type": "object",
|
||||
@@ -4908,30 +4914,6 @@
|
||||
"additionalProperties": false,
|
||||
"description": "WhereFilter(where_sql_template: str)"
|
||||
},
|
||||
"MetricTimeWindow": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"count",
|
||||
"granularity"
|
||||
],
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "integer"
|
||||
},
|
||||
"granularity": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"day",
|
||||
"week",
|
||||
"month",
|
||||
"quarter",
|
||||
"year"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "MetricTimeWindow(count: int, granularity: dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity)"
|
||||
},
|
||||
"MetricInput": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -4992,6 +4974,30 @@
|
||||
"additionalProperties": false,
|
||||
"description": "MetricInput(name: str, filter: Union[dbt.contracts.graph.nodes.WhereFilter, NoneType] = None, alias: Union[str, NoneType] = None, offset_window: Union[dbt.contracts.graph.nodes.MetricTimeWindow, NoneType] = None, offset_to_grain: Union[dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity, NoneType] = None)"
|
||||
},
|
||||
"MetricTimeWindow": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"count",
|
||||
"granularity"
|
||||
],
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "integer"
|
||||
},
|
||||
"granularity": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"day",
|
||||
"week",
|
||||
"month",
|
||||
"quarter",
|
||||
"year"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "MetricTimeWindow(count: int, granularity: dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity)"
|
||||
},
|
||||
"SourceFileMetadata": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -5122,14 +5128,14 @@
|
||||
"operation",
|
||||
"seed",
|
||||
"rpc",
|
||||
"sqloperation",
|
||||
"sql_operation",
|
||||
"doc",
|
||||
"source",
|
||||
"macro",
|
||||
"exposure",
|
||||
"metric",
|
||||
"group",
|
||||
"semanticmodel"
|
||||
"semantic_model"
|
||||
]
|
||||
},
|
||||
"package_name": {
|
||||
@@ -5213,10 +5219,44 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"depends_on": {
|
||||
"$ref": "#/definitions/DependsOn",
|
||||
"default": {
|
||||
"macros": [],
|
||||
"nodes": []
|
||||
}
|
||||
},
|
||||
"refs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/RefArgs"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"created_at": {
|
||||
"type": "number",
|
||||
"default": 1691439003.4182558
|
||||
},
|
||||
"config": {
|
||||
"$ref": "#/definitions/SemanticModelConfig",
|
||||
"default": {
|
||||
"enabled": true
|
||||
}
|
||||
},
|
||||
"primary_entity": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "SemanticModel(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], model: str, node_relation: Union[dbt.contracts.graph.nodes.NodeRelation, NoneType], description: Union[str, NoneType] = None, defaults: Union[dbt.contracts.graph.semantic_models.Defaults, NoneType] = None, entities: Sequence[dbt.contracts.graph.semantic_models.Entity] = <factory>, measures: Sequence[dbt.contracts.graph.semantic_models.Measure] = <factory>, dimensions: Sequence[dbt.contracts.graph.semantic_models.Dimension] = <factory>, metadata: Union[dbt.contracts.graph.semantic_models.SourceFileMetadata, NoneType] = None)"
|
||||
"description": "SemanticModel(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], model: str, node_relation: Union[dbt.contracts.graph.nodes.NodeRelation, NoneType], description: Union[str, NoneType] = None, defaults: Union[dbt.contracts.graph.semantic_models.Defaults, NoneType] = None, entities: Sequence[dbt.contracts.graph.semantic_models.Entity] = <factory>, measures: Sequence[dbt.contracts.graph.semantic_models.Measure] = <factory>, dimensions: Sequence[dbt.contracts.graph.semantic_models.Dimension] = <factory>, metadata: Union[dbt.contracts.graph.semantic_models.SourceFileMetadata, NoneType] = None, depends_on: dbt.contracts.graph.nodes.DependsOn = <factory>, refs: List[dbt.contracts.graph.nodes.RefArgs] = <factory>, created_at: float = <factory>, config: dbt.contracts.graph.model_config.SemanticModelConfig = <factory>, primary_entity: Union[str, NoneType] = None)"
|
||||
},
|
||||
"NodeRelation": {
|
||||
"type": "object",
|
||||
@@ -5240,10 +5280,20 @@
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"relation_name": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "NodeRelation(alias: str, schema_name: str, database: Union[str, NoneType] = None)"
|
||||
"description": "NodeRelation(alias: str, schema_name: str, database: Union[str, NoneType] = None, relation_name: Union[str, NoneType] = None)"
|
||||
},
|
||||
"Defaults": {
|
||||
"type": "object",
|
||||
@@ -5413,35 +5463,23 @@
|
||||
]
|
||||
},
|
||||
"use_discrete_percentile": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"use_approximate_percentile": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "MeasureAggregationParameters(percentile: Union[float, NoneType] = None, use_discrete_percentile: Union[bool, NoneType] = None, use_approximate_percentile: Union[bool, NoneType] = None)"
|
||||
"description": "MeasureAggregationParameters(percentile: Union[float, NoneType] = None, use_discrete_percentile: bool = False, use_approximate_percentile: bool = False)"
|
||||
},
|
||||
"NonAdditiveDimension": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"window_choice",
|
||||
"window_grouples"
|
||||
"window_groupings"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
@@ -5461,7 +5499,7 @@
|
||||
"count"
|
||||
]
|
||||
},
|
||||
"window_grouples": {
|
||||
"window_groupings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
@@ -5469,7 +5507,7 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "NonAdditiveDimension(name: str, window_choice: dbt_semantic_interfaces.type_enums.aggregation_type.AggregationType, window_grouples: List[str])"
|
||||
"description": "NonAdditiveDimension(name: str, window_choice: dbt_semantic_interfaces.type_enums.aggregation_type.AggregationType, window_groupings: List[str])"
|
||||
},
|
||||
"Dimension": {
|
||||
"type": "object",
|
||||
@@ -5581,6 +5619,18 @@
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"description": "DimensionValidityParams(is_start: bool = False, is_end: bool = False)"
|
||||
},
|
||||
"SemanticModelConfig": {
|
||||
"type": "object",
|
||||
"required": [],
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"description": "SemanticModelConfig(_extra: Dict[str, Any] = <factory>, enabled: bool = True)"
|
||||
}
|
||||
},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "1.6.0"
|
||||
version = "1.6.2"
|
||||
|
||||
@@ -20,7 +20,7 @@ except ImportError:
|
||||
|
||||
|
||||
package_name = "dbt-tests-adapter"
|
||||
package_version = "1.6.0"
|
||||
package_version = "1.6.2"
|
||||
description = """The dbt adapter tests for adapter plugins"""
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,9 +1,12 @@
|
||||
import pytest
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from dbt.tests.util import run_dbt, get_manifest
|
||||
|
||||
import pytest
|
||||
|
||||
from dbt.contracts.graph.manifest import WritableManifest, get_manifest_schema_version
|
||||
from dbt.exceptions import IncompatibleSchemaError
|
||||
from dbt.contracts.graph.manifest import WritableManifest
|
||||
from dbt.tests.util import run_dbt, get_manifest
|
||||
|
||||
# This project must have one of each kind of node type, plus disabled versions, for
|
||||
# test coverage to be complete.
|
||||
@@ -351,3 +354,13 @@ class TestPreviousVersionState:
|
||||
# schema versions 1, 2, 3 are all not forward compatible
|
||||
for schema_version in range(1, 4):
|
||||
self.compare_previous_state(project, schema_version, False)
|
||||
|
||||
def test_get_manifest_schema_version(self, project):
|
||||
for schema_version in range(1, self.CURRENT_EXPECTED_MANIFEST_VERSION):
|
||||
manifest_path = os.path.join(
|
||||
project.test_data_dir, f"state/v{schema_version}/manifest.json"
|
||||
)
|
||||
manifest = json.load(open(manifest_path))
|
||||
|
||||
manifest_version = get_manifest_schema_version(manifest)
|
||||
assert manifest_version == schema_version
|
||||
|
||||
@@ -2,7 +2,6 @@ from multiprocessing import Process
|
||||
from pathlib import Path
|
||||
import json
|
||||
import pytest
|
||||
import platform
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
good_model_sql = """
|
||||
@@ -41,7 +40,7 @@ class TestRunResultsTimingFailure:
|
||||
assert len(results.results[0].timing) > 0
|
||||
|
||||
|
||||
@pytest.mark.skipif(platform.system() != "Darwin", reason="Fails on linux in github actions")
|
||||
@pytest.mark.skip()
|
||||
class TestRunResultsWritesFileOnSignal:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import os
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt, update_config_file
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from dbt.tests.util import run_dbt, update_config_file, write_config_file
|
||||
from dbt.exceptions import ProjectContractError
|
||||
|
||||
|
||||
@@ -62,3 +65,50 @@ class TestProjectYamlVersionInvalid:
|
||||
assert "at path ['version']: 'invalid' is not valid under any of the given schemas" in str(
|
||||
excinfo.value
|
||||
)
|
||||
|
||||
|
||||
class TestProjectDbtCloudConfig:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml}
|
||||
|
||||
def test_dbt_cloud(self, project):
|
||||
run_dbt(["parse"], expect_pass=True)
|
||||
conf = yaml.safe_load(
|
||||
Path(os.path.join(project.project_root, "dbt_project.yml")).read_text()
|
||||
)
|
||||
assert conf == {"name": "test", "profile": "test"}
|
||||
|
||||
config = {
|
||||
"name": "test",
|
||||
"profile": "test",
|
||||
"dbt-cloud": {
|
||||
"account_id": "123",
|
||||
"application": "test",
|
||||
"environment": "test",
|
||||
"api_key": "test",
|
||||
},
|
||||
}
|
||||
write_config_file(config, project.project_root, "dbt_project.yml")
|
||||
run_dbt(["parse"], expect_pass=True)
|
||||
conf = yaml.safe_load(
|
||||
Path(os.path.join(project.project_root, "dbt_project.yml")).read_text()
|
||||
)
|
||||
assert conf == config
|
||||
|
||||
|
||||
class TestProjectDbtCloudConfigString:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml}
|
||||
|
||||
def test_dbt_cloud_invalid(self, project):
|
||||
run_dbt()
|
||||
config = {"name": "test", "profile": "test", "dbt-cloud": "Some string"}
|
||||
update_config_file(config, "dbt_project.yml")
|
||||
expected_err = (
|
||||
"at path ['dbt-cloud']: 'Some string' is not valid under any of the given schemas"
|
||||
)
|
||||
with pytest.raises(ProjectContractError) as excinfo:
|
||||
run_dbt()
|
||||
assert expected_err in str(excinfo.value)
|
||||
|
||||
@@ -781,3 +781,108 @@ class TestModifiedBodyAndContract:
|
||||
# The model's contract has changed, even if non-breaking, so it should be selected by 'state:modified.contract'
|
||||
results = run_dbt(["list", "-s", "state:modified.contract", "--state", "./state"])
|
||||
assert results == ["test.my_model"]
|
||||
|
||||
|
||||
modified_table_model_access_yml = """
|
||||
version: 2
|
||||
models:
|
||||
- name: table_model
|
||||
access: public
|
||||
"""
|
||||
|
||||
|
||||
class TestModifiedAccess(BaseModifiedState):
|
||||
def test_changed_access(self, project):
|
||||
self.run_and_save_state()
|
||||
|
||||
# No access change
|
||||
assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
|
||||
# Modify access (protected -> public)
|
||||
write_file(modified_table_model_access_yml, "models", "schema.yml")
|
||||
assert run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
|
||||
results = run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
assert results == ["test.table_model"]
|
||||
|
||||
|
||||
modified_table_model_access_yml = """
|
||||
version: 2
|
||||
models:
|
||||
- name: table_model
|
||||
deprecation_date: 2020-01-01
|
||||
"""
|
||||
|
||||
|
||||
class TestModifiedDeprecationDate(BaseModifiedState):
|
||||
def test_changed_access(self, project):
|
||||
self.run_and_save_state()
|
||||
|
||||
# No access change
|
||||
assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
|
||||
# Modify deprecation_date (None -> 2020-01-01)
|
||||
write_file(modified_table_model_access_yml, "models", "schema.yml")
|
||||
assert run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
|
||||
results = run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
assert results == ["test.table_model"]
|
||||
|
||||
|
||||
modified_table_model_version_yml = """
|
||||
version: 2
|
||||
models:
|
||||
- name: table_model
|
||||
versions:
|
||||
- v: 1
|
||||
defined_in: table_model
|
||||
"""
|
||||
|
||||
|
||||
class TestModifiedVersion(BaseModifiedState):
|
||||
def test_changed_access(self, project):
|
||||
self.run_and_save_state()
|
||||
|
||||
# Change version (null -> v1)
|
||||
write_file(modified_table_model_version_yml, "models", "schema.yml")
|
||||
|
||||
results = run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
assert results == ["test.table_model.v1"]
|
||||
|
||||
|
||||
table_model_latest_version_yml = """
|
||||
version: 2
|
||||
models:
|
||||
- name: table_model
|
||||
latest_version: 1
|
||||
versions:
|
||||
- v: 1
|
||||
defined_in: table_model
|
||||
"""
|
||||
|
||||
|
||||
modified_table_model_latest_version_yml = """
|
||||
version: 2
|
||||
models:
|
||||
- name: table_model
|
||||
latest_version: 2
|
||||
versions:
|
||||
- v: 1
|
||||
defined_in: table_model
|
||||
- v: 2
|
||||
"""
|
||||
|
||||
|
||||
class TestModifiedLatestVersion(BaseModifiedState):
|
||||
def test_changed_access(self, project):
|
||||
# Setup initial latest_version: 1
|
||||
write_file(table_model_latest_version_yml, "models", "schema.yml")
|
||||
|
||||
self.run_and_save_state()
|
||||
|
||||
# Bump latest version
|
||||
write_file(table_model_sql, "models", "table_model_v2.sql")
|
||||
write_file(modified_table_model_latest_version_yml, "models", "schema.yml")
|
||||
|
||||
results = run_dbt(["list", "-s", "state:modified", "--state", "./state"])
|
||||
assert results == ["test.table_model.v1", "test.table_model.v2"]
|
||||
|
||||
@@ -43,6 +43,26 @@ seeds:
|
||||
|
||||
"""
|
||||
|
||||
local_dep_schema_yml = """
|
||||
models:
|
||||
- name: table_model
|
||||
config:
|
||||
alias: table_model_local_dep
|
||||
columns:
|
||||
- name: id
|
||||
tests:
|
||||
- unique
|
||||
"""
|
||||
|
||||
local_dep_versions_schema_yml = """
|
||||
models:
|
||||
- name: table_model
|
||||
config:
|
||||
alias: table_model_local_dep
|
||||
versions:
|
||||
- v: 1
|
||||
"""
|
||||
|
||||
|
||||
class TestDuplicateModelEnabled:
|
||||
@pytest.fixture(scope="class")
|
||||
@@ -142,6 +162,72 @@ class TestDuplicateModelDisabledAcrossPackages:
|
||||
assert model_id in manifest.disabled
|
||||
|
||||
|
||||
class TestDuplicateModelNameWithTestAcrossPackages:
|
||||
@pytest.fixture(scope="class", autouse=True)
|
||||
def setUp(self, project_root):
|
||||
local_dependency_files = {
|
||||
"dbt_project.yml": dbt_project_yml,
|
||||
"models": {"table_model.sql": enabled_model_sql, "schema.yml": local_dep_schema_yml},
|
||||
}
|
||||
write_project_files(project_root, "local_dependency", local_dependency_files)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {"table_model.sql": enabled_model_sql}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def packages(self):
|
||||
return {"packages": [{"local": "local_dependency"}]}
|
||||
|
||||
def test_duplicate_model_name_with_test_across_packages(self, project):
|
||||
run_dbt(["deps"])
|
||||
manifest = run_dbt(["parse"])
|
||||
assert len(manifest.nodes) == 3
|
||||
|
||||
# model nodes with duplicate names exist
|
||||
local_dep_model_node_id = "model.local_dep.table_model"
|
||||
root_model_node_id = "model.test.table_model"
|
||||
assert local_dep_model_node_id in manifest.nodes
|
||||
assert root_model_node_id in manifest.nodes
|
||||
|
||||
# test node exists and is attached to correct node
|
||||
test_node_id = "test.local_dep.unique_table_model_id.1da9e464d9"
|
||||
assert test_node_id in manifest.nodes
|
||||
assert manifest.nodes[test_node_id].attached_node == local_dep_model_node_id
|
||||
|
||||
|
||||
class TestDuplicateModelNameWithVersionAcrossPackages:
|
||||
@pytest.fixture(scope="class", autouse=True)
|
||||
def setUp(self, project_root):
|
||||
local_dependency_files = {
|
||||
"dbt_project.yml": dbt_project_yml,
|
||||
"models": {
|
||||
"table_model.sql": enabled_model_sql,
|
||||
"schema.yml": local_dep_versions_schema_yml,
|
||||
},
|
||||
}
|
||||
write_project_files(project_root, "local_dependency", local_dependency_files)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {"table_model.sql": enabled_model_sql}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def packages(self):
|
||||
return {"packages": [{"local": "local_dependency"}]}
|
||||
|
||||
def test_duplicate_model_name_with_test_across_packages(self, project):
|
||||
run_dbt(["deps"])
|
||||
manifest = run_dbt(["parse"])
|
||||
assert len(manifest.nodes) == 2
|
||||
|
||||
# model nodes with duplicate names exist
|
||||
local_dep_model_node_id = "model.local_dep.table_model.v1"
|
||||
root_model_node_id = "model.test.table_model"
|
||||
assert local_dep_model_node_id in manifest.nodes
|
||||
assert root_model_node_id in manifest.nodes
|
||||
|
||||
|
||||
class TestModelTestOverlap:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
@@ -142,6 +142,24 @@ class TestGraphSelection(SelectionFixtures):
|
||||
check_result_nodes_by_name(results, ["subdir"])
|
||||
assert_correct_schemas(project)
|
||||
|
||||
# Check that list command works
|
||||
os.chdir(
|
||||
project.profiles_dir
|
||||
) # Change to random directory to test that Path selector works with project-dir
|
||||
results = run_dbt(
|
||||
[
|
||||
"-q",
|
||||
"ls",
|
||||
"-s",
|
||||
"path:models/test/subdir.sql",
|
||||
"--project-dir",
|
||||
str(project.project_root),
|
||||
]
|
||||
# ["list", "--project-dir", str(project.project_root), "--select", "models/test/subdir*"]
|
||||
)
|
||||
print(f"--- results: {results}")
|
||||
assert len(results) == 1
|
||||
|
||||
def test_locally_qualified_name_model_with_dots(self, project):
|
||||
results = run_dbt(["run", "--select", "alternative.users"], expect_pass=False)
|
||||
check_result_nodes_by_name(results, ["alternative.users"])
|
||||
@@ -268,3 +286,22 @@ class TestGraphSelection(SelectionFixtures):
|
||||
"users",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class TestListPathGraphSelection(SelectionFixtures):
|
||||
def test_list_select_with_project_dir(self, project):
|
||||
# Check that list command works
|
||||
os.chdir(
|
||||
project.profiles_dir
|
||||
) # Change to random directory to test that Path selector works with project-dir
|
||||
results = run_dbt(
|
||||
[
|
||||
"-q",
|
||||
"ls",
|
||||
"-s",
|
||||
"path:models/test/subdir.sql",
|
||||
"--project-dir",
|
||||
str(project.project_root),
|
||||
]
|
||||
)
|
||||
assert results == ["test.test.subdir"]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from dbt.contracts.graph.nodes import ModelNode
|
||||
from dbt.contracts.results import RunExecutionResult, RunResult
|
||||
import pytest
|
||||
from dbt.tests.util import run_dbt
|
||||
|
||||
@@ -53,6 +55,16 @@ models:
|
||||
"""
|
||||
|
||||
|
||||
SUPPRESSED_CTE_EXPECTED_OUTPUT = """-- fct_eph_first.sql
|
||||
|
||||
|
||||
with int_eph_first as(
|
||||
select * from __dbt__cte__int_eph_first
|
||||
)
|
||||
|
||||
select * from int_eph_first"""
|
||||
|
||||
|
||||
class TestEphemeralCompilation:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
@@ -67,5 +79,13 @@ class TestEphemeralCompilation:
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 0
|
||||
|
||||
results = run_dbt(["test"])
|
||||
len(results) == 4
|
||||
def test__suppress_injected_ctes(self, project):
|
||||
compile_output = run_dbt(
|
||||
["compile", "--no-inject-ephemeral-ctes", "--select", "fct_eph_first"]
|
||||
)
|
||||
assert isinstance(compile_output, RunExecutionResult)
|
||||
node_result = compile_output.results[0]
|
||||
assert isinstance(node_result, RunResult)
|
||||
node = node_result.node
|
||||
assert isinstance(node, ModelNode)
|
||||
assert node.compiled_code == SUPPRESSED_CTE_EXPECTED_OUTPUT
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import os
|
||||
import pytest
|
||||
|
||||
from dbt.tests.util import run_dbt, write_artifact
|
||||
from dbt.tests.util import run_dbt, write_artifact, write_file
|
||||
from tests.functional.partial_parsing.fixtures import model_one_sql, model_two_sql
|
||||
|
||||
|
||||
first_file_diff = {
|
||||
@@ -17,7 +19,7 @@ second_file_diff = {
|
||||
}
|
||||
|
||||
|
||||
class TestFileDiffs:
|
||||
class TestFileDiffPaths:
|
||||
def test_file_diffs(self, project):
|
||||
|
||||
os.environ["DBT_PP_FILE_DIFF_TEST"] = "true"
|
||||
@@ -35,3 +37,27 @@ class TestFileDiffs:
|
||||
write_artifact(second_file_diff, "file_diff.json")
|
||||
results = run_dbt()
|
||||
assert len(results) == 2
|
||||
|
||||
|
||||
class TestFileDiffs:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model_one.sql": model_one_sql,
|
||||
}
|
||||
|
||||
def test_no_file_diffs(self, project):
|
||||
# We start with a project with one model
|
||||
manifest = run_dbt(["parse"])
|
||||
assert len(manifest.nodes) == 1
|
||||
|
||||
# add a model file
|
||||
write_file(model_two_sql, project.project_root, "models", "model_two.sql")
|
||||
|
||||
# parse without computing a file diff
|
||||
manifest = run_dbt(["--partial-parse", "--no-partial-parse-file-diff", "parse"])
|
||||
assert len(manifest.nodes) == 1
|
||||
|
||||
# default behaviour - parse with computing a file diff
|
||||
manifest = run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 2
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from dbt.tests.util import run_dbt, get_manifest, write_file, rm_file, run_dbt_and_capture
|
||||
from dbt.tests.fixtures.project import write_project_files
|
||||
@@ -8,9 +9,6 @@ from tests.functional.partial_parsing.fixtures import (
|
||||
models_schema1_yml,
|
||||
models_schema2_yml,
|
||||
models_schema2b_yml,
|
||||
models_versions_schema_yml,
|
||||
models_versions_defined_in_schema_yml,
|
||||
models_versions_updated_schema_yml,
|
||||
model_three_sql,
|
||||
model_three_modified_sql,
|
||||
model_four1_sql,
|
||||
@@ -71,9 +69,10 @@ from tests.functional.partial_parsing.fixtures import (
|
||||
groups_schema_yml_two_groups_private_orders_invalid_access,
|
||||
)
|
||||
|
||||
from dbt.exceptions import CompilationError, ParsingError, DuplicateVersionedUnversionedError
|
||||
from dbt.exceptions import CompilationError, ParsingError
|
||||
from dbt.contracts.files import ParseFileType
|
||||
from dbt.contracts.results import TestStatus
|
||||
from dbt.plugins.manifest import PluginNodes, ModelNodeArgs
|
||||
|
||||
import re
|
||||
import os
|
||||
@@ -303,72 +302,6 @@ class TestModels:
|
||||
assert model_id not in manifest.disabled
|
||||
|
||||
|
||||
class TestVersionedModels:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model_one_v1.sql": model_one_sql,
|
||||
"model_one.sql": model_one_sql,
|
||||
"model_one_downstream.sql": model_four2_sql,
|
||||
"schema.yml": models_versions_schema_yml,
|
||||
}
|
||||
|
||||
def test_pp_versioned_models(self, project):
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert not model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"]
|
||||
|
||||
# update schema.yml block - model_one is now 'defined_in: model_one_different'
|
||||
rm_file(project.project_root, "models", "model_one.sql")
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one_different.sql")
|
||||
write_file(
|
||||
models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
|
||||
# update versions schema.yml block - latest_version from 2 to 1
|
||||
write_file(
|
||||
models_versions_updated_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
results, log_output = run_dbt_and_capture(
|
||||
["--partial-parse", "--log-format", "json", "run"]
|
||||
)
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert not model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"]
|
||||
# assert unpinned ref to latest-not-max version yields an "FYI" info-level log
|
||||
assert "UnpinnedRefNewVersionAvailable" in log_output
|
||||
|
||||
# update versioned model
|
||||
write_file(model_two_sql, project.project_root, "models", "model_one_different.sql")
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert len(manifest.nodes) == 3
|
||||
print(f"--- nodes: {manifest.nodes.keys()}")
|
||||
|
||||
# create a new model_one in model_one.sql and re-parse
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one.sql")
|
||||
with pytest.raises(DuplicateVersionedUnversionedError):
|
||||
run_dbt(["parse"])
|
||||
|
||||
|
||||
class TestSources:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
@@ -805,3 +738,111 @@ class TestGroups:
|
||||
)
|
||||
with pytest.raises(ParsingError):
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
|
||||
|
||||
class TestExternalModels:
|
||||
@pytest.fixture(scope="class")
|
||||
def external_model_node(self):
|
||||
return ModelNodeArgs(
|
||||
name="external_model",
|
||||
package_name="external",
|
||||
identifier="test_identifier",
|
||||
schema="test_schema",
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def external_model_node_versioned(self):
|
||||
return ModelNodeArgs(
|
||||
name="external_model_versioned",
|
||||
package_name="external",
|
||||
identifier="test_identifier_v1",
|
||||
schema="test_schema",
|
||||
version=1,
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def external_model_node_depends_on(self):
|
||||
return ModelNodeArgs(
|
||||
name="external_model_depends_on",
|
||||
package_name="external",
|
||||
identifier="test_identifier_depends_on",
|
||||
schema="test_schema",
|
||||
depends_on_nodes=["model.external.external_model_depends_on_parent"],
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def external_model_node_depends_on_parent(self):
|
||||
return ModelNodeArgs(
|
||||
name="external_model_depends_on_parent",
|
||||
package_name="external",
|
||||
identifier="test_identifier_depends_on_parent",
|
||||
schema="test_schema",
|
||||
)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {"model_one.sql": model_one_sql}
|
||||
|
||||
@mock.patch("dbt.plugins.get_plugin_manager")
|
||||
def test_pp_external_models(
|
||||
self,
|
||||
get_plugin_manager,
|
||||
project,
|
||||
external_model_node,
|
||||
external_model_node_versioned,
|
||||
external_model_node_depends_on,
|
||||
external_model_node_depends_on_parent,
|
||||
):
|
||||
# initial plugin - one external model
|
||||
external_nodes = PluginNodes()
|
||||
external_nodes.add_model(external_model_node)
|
||||
get_plugin_manager.return_value.get_nodes.return_value = external_nodes
|
||||
|
||||
# initial parse
|
||||
manifest = run_dbt(["parse"])
|
||||
assert len(manifest.nodes) == 2
|
||||
assert set(manifest.nodes.keys()) == {
|
||||
"model.external.external_model",
|
||||
"model.test.model_one",
|
||||
}
|
||||
assert len(manifest.external_node_unique_ids) == 1
|
||||
assert manifest.external_node_unique_ids == ["model.external.external_model"]
|
||||
|
||||
# add a model file
|
||||
write_file(model_two_sql, project.project_root, "models", "model_two.sql")
|
||||
manifest = run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 3
|
||||
|
||||
# add an external model
|
||||
external_nodes.add_model(external_model_node_versioned)
|
||||
manifest = run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 4
|
||||
assert len(manifest.external_node_unique_ids) == 2
|
||||
|
||||
# add a model file that depends on external model
|
||||
write_file(
|
||||
"SELECT * FROM {{ref('external', 'external_model')}}",
|
||||
project.project_root,
|
||||
"models",
|
||||
"model_depends_on_external.sql",
|
||||
)
|
||||
manifest = run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 5
|
||||
assert len(manifest.external_node_unique_ids) == 2
|
||||
|
||||
# remove a model file that depends on external model
|
||||
rm_file(project.project_root, "models", "model_depends_on_external.sql")
|
||||
manifest = run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 4
|
||||
|
||||
# add an external node with depends on
|
||||
external_nodes.add_model(external_model_node_depends_on)
|
||||
external_nodes.add_model(external_model_node_depends_on_parent)
|
||||
manifest = run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 6
|
||||
assert len(manifest.external_node_unique_ids) == 4
|
||||
|
||||
# skip files parsing - ensure no issues
|
||||
run_dbt(["--partial-parse", "parse"])
|
||||
assert len(manifest.nodes) == 6
|
||||
assert len(manifest.external_node_unique_ids) == 4
|
||||
|
||||
126
tests/functional/partial_parsing/test_versioned_models.py
Normal file
126
tests/functional/partial_parsing/test_versioned_models.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import pytest
|
||||
import pathlib
|
||||
from dbt.tests.util import (
|
||||
run_dbt,
|
||||
get_manifest,
|
||||
write_file,
|
||||
rm_file,
|
||||
read_file,
|
||||
)
|
||||
from dbt.exceptions import DuplicateVersionedUnversionedError
|
||||
|
||||
model_one_sql = """
|
||||
select 1 as fun
|
||||
"""
|
||||
|
||||
model_one_downstream_sql = """
|
||||
select fun from {{ ref('model_one') }}
|
||||
"""
|
||||
|
||||
models_versions_schema_yml = """
|
||||
|
||||
models:
|
||||
- name: model_one
|
||||
description: "The first model"
|
||||
versions:
|
||||
- v: 1
|
||||
- v: 2
|
||||
"""
|
||||
|
||||
models_versions_defined_in_schema_yml = """
|
||||
models:
|
||||
- name: model_one
|
||||
description: "The first model"
|
||||
versions:
|
||||
- v: 1
|
||||
- v: 2
|
||||
defined_in: model_one_different
|
||||
"""
|
||||
|
||||
models_versions_updated_schema_yml = """
|
||||
models:
|
||||
- name: model_one
|
||||
latest_version: 1
|
||||
description: "The first model"
|
||||
versions:
|
||||
- v: 1
|
||||
- v: 2
|
||||
defined_in: model_one_different
|
||||
"""
|
||||
|
||||
model_two_sql = """
|
||||
select 1 as notfun
|
||||
"""
|
||||
|
||||
|
||||
class TestVersionedModels:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model_one_v1.sql": model_one_sql,
|
||||
"model_one.sql": model_one_sql,
|
||||
"model_one_downstream.sql": model_one_downstream_sql,
|
||||
"schema.yml": models_versions_schema_yml,
|
||||
}
|
||||
|
||||
def test_pp_versioned_models(self, project):
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert not model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"]
|
||||
|
||||
# update schema.yml block - model_one is now 'defined_in: model_one_different'
|
||||
rm_file(project.project_root, "models", "model_one.sql")
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one_different.sql")
|
||||
write_file(
|
||||
models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
|
||||
# update versions schema.yml block - latest_version from 2 to 1
|
||||
write_file(
|
||||
models_versions_updated_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
# This is where the test was failings in a CI run with:
|
||||
# relation \"test..._test_partial_parsing.model_one_downstream\" does not exist
|
||||
# because in core/dbt/include/global_project/macros/materializations/models/view/view.sql
|
||||
# "existing_relation" didn't actually exist by the time it gets to the rename of the
|
||||
# existing relation.
|
||||
(pathlib.Path(project.project_root) / "log_output").mkdir(parents=True, exist_ok=True)
|
||||
results = run_dbt(
|
||||
["--partial-parse", "--log-format-file", "json", "--log-path", "log_output", "run"]
|
||||
)
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert not model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"]
|
||||
|
||||
# assert unpinned ref to latest-not-max version yields an "FYI" info-level log
|
||||
log_output = read_file("log_output", "dbt.log").replace("\n", " ").replace("\\n", " ")
|
||||
assert "UnpinnedRefNewVersionAvailable" in log_output
|
||||
|
||||
# update versioned model
|
||||
write_file(model_two_sql, project.project_root, "models", "model_one_different.sql")
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert len(manifest.nodes) == 3
|
||||
|
||||
# create a new model_one in model_one.sql and re-parse
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one.sql")
|
||||
with pytest.raises(DuplicateVersionedUnversionedError):
|
||||
run_dbt(["parse"])
|
||||
@@ -49,6 +49,12 @@ semantic_models:
|
||||
agg_time_dimension: ds
|
||||
agg_params:
|
||||
percentile: 0.99
|
||||
- name: test_non_additive
|
||||
expr: txn_revenue
|
||||
agg: sum
|
||||
non_additive_dimension:
|
||||
name: ds
|
||||
window_choice: max
|
||||
|
||||
dimensions:
|
||||
- name: ds
|
||||
@@ -125,7 +131,7 @@ class TestSemanticModelParsing:
|
||||
semantic_model.node_relation.relation_name
|
||||
== f'"dbt"."{project.test_schema}"."fct_revenue"'
|
||||
)
|
||||
assert len(semantic_model.measures) == 5
|
||||
assert len(semantic_model.measures) == 6
|
||||
|
||||
def test_semantic_model_error(self, project):
|
||||
# Next, modify the default schema.yml to remove the semantic model.
|
||||
|
||||
@@ -2,6 +2,31 @@ models__sample_model = """
|
||||
select * from {{ ref('sample_seed') }}
|
||||
"""
|
||||
|
||||
models__sample_number_model = """
|
||||
select
|
||||
cast(1.0 as int) as float_to_int_field,
|
||||
3.0 as float_field,
|
||||
4.3 as float_with_dec_field,
|
||||
5 as int_field
|
||||
"""
|
||||
|
||||
models__sample_number_model_with_nulls = """
|
||||
select
|
||||
cast(1.0 as int) as float_to_int_field,
|
||||
3.0 as float_field,
|
||||
4.3 as float_with_dec_field,
|
||||
5 as int_field
|
||||
|
||||
union all
|
||||
|
||||
select
|
||||
cast(null as int) as float_to_int_field,
|
||||
cast(null as float) as float_field,
|
||||
cast(null as float) as float_with_dec_field,
|
||||
cast(null as int) as int_field
|
||||
|
||||
"""
|
||||
|
||||
models__second_model = """
|
||||
select
|
||||
sample_num as col_one,
|
||||
|
||||
@@ -6,6 +6,8 @@ from tests.functional.show.fixtures import (
|
||||
models__second_ephemeral_model,
|
||||
seeds__sample_seed,
|
||||
models__sample_model,
|
||||
models__sample_number_model,
|
||||
models__sample_number_model_with_nulls,
|
||||
models__second_model,
|
||||
models__ephemeral_model,
|
||||
schema_yml,
|
||||
@@ -14,11 +16,13 @@ from tests.functional.show.fixtures import (
|
||||
)
|
||||
|
||||
|
||||
class TestShow:
|
||||
class ShowBase:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"sample_model.sql": models__sample_model,
|
||||
"sample_number_model.sql": models__sample_number_model,
|
||||
"sample_number_model_with_nulls.sql": models__sample_number_model_with_nulls,
|
||||
"second_model.sql": models__second_model,
|
||||
"ephemeral_model.sql": models__ephemeral_model,
|
||||
"sql_header.sql": models__sql_header,
|
||||
@@ -28,69 +32,122 @@ class TestShow:
|
||||
def seeds(self):
|
||||
return {"sample_seed.csv": seeds__sample_seed}
|
||||
|
||||
@pytest.fixture(scope="class", autouse=True)
|
||||
def setup(self, project):
|
||||
run_dbt(["seed"])
|
||||
|
||||
|
||||
class TestShowNone(ShowBase):
|
||||
def test_none(self, project):
|
||||
with pytest.raises(
|
||||
DbtRuntimeError, match="Either --select or --inline must be passed to show"
|
||||
):
|
||||
run_dbt(["seed"])
|
||||
run_dbt(["show"])
|
||||
|
||||
|
||||
class TestShowSelectText(ShowBase):
|
||||
def test_select_model_text(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(["show", "--select", "second_model"])
|
||||
(_, log_output) = run_dbt_and_capture(["show", "--select", "second_model"])
|
||||
assert "Previewing node 'sample_model'" not in log_output
|
||||
assert "Previewing node 'second_model'" in log_output
|
||||
assert "col_one" in log_output
|
||||
assert "col_two" in log_output
|
||||
assert "answer" in log_output
|
||||
|
||||
|
||||
class TestShowMultiple(ShowBase):
|
||||
def test_select_multiple_model_text(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(
|
||||
["show", "--select", "sample_model second_model"]
|
||||
)
|
||||
(_, log_output) = run_dbt_and_capture(["show", "--select", "sample_model second_model"])
|
||||
assert "Previewing node 'sample_model'" in log_output
|
||||
assert "sample_num" in log_output
|
||||
assert "sample_bool" in log_output
|
||||
|
||||
|
||||
class TestShowSingle(ShowBase):
|
||||
def test_select_single_model_json(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(
|
||||
(_, log_output) = run_dbt_and_capture(
|
||||
["show", "--select", "sample_model", "--output", "json"]
|
||||
)
|
||||
assert "Previewing node 'sample_model'" not in log_output
|
||||
assert "sample_num" in log_output
|
||||
assert "sample_bool" in log_output
|
||||
|
||||
|
||||
class TestShowNumeric(ShowBase):
|
||||
def test_numeric_values(self, project):
|
||||
run_dbt(["build"])
|
||||
(_, log_output) = run_dbt_and_capture(
|
||||
["show", "--select", "sample_number_model", "--output", "json"]
|
||||
)
|
||||
# json log output needs the escapes removed for string matching
|
||||
log_output = log_output.replace("\\", "")
|
||||
assert "Previewing node 'sample_number_model'" not in log_output
|
||||
assert '"float_to_int_field": 1.0' not in log_output
|
||||
assert '"float_to_int_field": 1' in log_output
|
||||
assert '"float_field": 3.0' in log_output
|
||||
assert '"float_with_dec_field": 4.3' in log_output
|
||||
assert '"int_field": 5' in log_output
|
||||
assert '"int_field": 5.0' not in log_output
|
||||
|
||||
|
||||
class TestShowNumericNulls(ShowBase):
|
||||
def test_numeric_values_with_nulls(self, project):
|
||||
run_dbt(["build"])
|
||||
(_, log_output) = run_dbt_and_capture(
|
||||
["show", "--select", "sample_number_model_with_nulls", "--output", "json"]
|
||||
)
|
||||
# json log output needs the escapes removed for string matching
|
||||
log_output = log_output.replace("\\", "")
|
||||
assert "Previewing node 'sample_number_model_with_nulls'" not in log_output
|
||||
assert '"float_to_int_field": 1.0' not in log_output
|
||||
assert '"float_to_int_field": 1' in log_output
|
||||
assert '"float_field": 3.0' in log_output
|
||||
assert '"float_with_dec_field": 4.3' in log_output
|
||||
assert '"int_field": 5' in log_output
|
||||
assert '"int_field": 5.0' not in log_output
|
||||
|
||||
|
||||
class TestShowInline(ShowBase):
|
||||
def test_inline_pass(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(
|
||||
(_, log_output) = run_dbt_and_capture(
|
||||
["show", "--inline", "select * from {{ ref('sample_model') }}"]
|
||||
)
|
||||
assert "Previewing inline node" in log_output
|
||||
assert "sample_num" in log_output
|
||||
assert "sample_bool" in log_output
|
||||
|
||||
|
||||
class TestShowInlineFail(ShowBase):
|
||||
def test_inline_fail(self, project):
|
||||
with pytest.raises(DbtException, match="Error parsing inline query"):
|
||||
run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"])
|
||||
|
||||
|
||||
class TestShowInlineFailDB(ShowBase):
|
||||
def test_inline_fail_database_error(self, project):
|
||||
with pytest.raises(DbtRuntimeError, match="Database Error"):
|
||||
run_dbt(["show", "--inline", "slect asdlkjfsld;j"])
|
||||
|
||||
|
||||
class TestShowEphemeral(ShowBase):
|
||||
def test_ephemeral_model(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"])
|
||||
(_, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"])
|
||||
assert "col_deci" in log_output
|
||||
|
||||
|
||||
class TestShowSecondEphemeral(ShowBase):
|
||||
def test_second_ephemeral_model(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(
|
||||
["show", "--inline", models__second_ephemeral_model]
|
||||
)
|
||||
(_, log_output) = run_dbt_and_capture(["show", "--inline", models__second_ephemeral_model])
|
||||
assert "col_hundo" in log_output
|
||||
|
||||
|
||||
class TestShowLimit(ShowBase):
|
||||
@pytest.mark.parametrize(
|
||||
"args,expected",
|
||||
[
|
||||
@@ -102,16 +159,20 @@ class TestShow:
|
||||
def test_limit(self, project, args, expected):
|
||||
run_dbt(["build"])
|
||||
dbt_args = ["show", "--inline", models__second_ephemeral_model, *args]
|
||||
results, log_output = run_dbt_and_capture(dbt_args)
|
||||
results = run_dbt(dbt_args)
|
||||
assert len(results.results[0].agate_table) == expected
|
||||
|
||||
|
||||
class TestShowSeed(ShowBase):
|
||||
def test_seed(self, project):
|
||||
(results, log_output) = run_dbt_and_capture(["show", "--select", "sample_seed"])
|
||||
(_, log_output) = run_dbt_and_capture(["show", "--select", "sample_seed"])
|
||||
assert "Previewing node 'sample_seed'" in log_output
|
||||
|
||||
|
||||
class TestShowSqlHeader(ShowBase):
|
||||
def test_sql_header(self, project):
|
||||
run_dbt(["build"])
|
||||
(results, log_output) = run_dbt_and_capture(["show", "--select", "sql_header"])
|
||||
(_, log_output) = run_dbt_and_capture(["show", "--select", "sql_header"])
|
||||
assert "Asia/Kolkata" in log_output
|
||||
|
||||
|
||||
|
||||
@@ -121,39 +121,64 @@ class TestAgateHelper(unittest.TestCase):
|
||||
self.assertEqual(tbl[0][0], expected)
|
||||
|
||||
def test_merge_allnull(self):
|
||||
t1 = agate.Table([(1, "a", None), (2, "b", None)], ("a", "b", "c"))
|
||||
t2 = agate.Table([(3, "c", None), (4, "d", None)], ("a", "b", "c"))
|
||||
t1 = agate_helper.table_from_rows([(1, "a", None), (2, "b", None)], ("a", "b", "c"))
|
||||
t2 = agate_helper.table_from_rows([(3, "c", None), (4, "d", None)], ("a", "b", "c"))
|
||||
result = agate_helper.merge_tables([t1, t2])
|
||||
self.assertEqual(result.column_names, ("a", "b", "c"))
|
||||
assert isinstance(result.column_types[0], agate.data_types.Number)
|
||||
assert isinstance(result.column_types[0], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[1], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[2], agate.data_types.Number)
|
||||
assert isinstance(result.column_types[2], agate_helper.Integer)
|
||||
self.assertEqual(len(result), 4)
|
||||
|
||||
def test_merge_mixed(self):
|
||||
t1 = agate.Table([(1, "a", None), (2, "b", None)], ("a", "b", "c"))
|
||||
t2 = agate.Table([(3, "c", "dog"), (4, "d", "cat")], ("a", "b", "c"))
|
||||
t3 = agate.Table([(3, "c", None), (4, "d", None)], ("a", "b", "c"))
|
||||
t1 = agate_helper.table_from_rows(
|
||||
[(1, "a", None, None), (2, "b", None, None)], ("a", "b", "c", "d")
|
||||
)
|
||||
t2 = agate_helper.table_from_rows(
|
||||
[(3, "c", "dog", 1), (4, "d", "cat", 5)], ("a", "b", "c", "d")
|
||||
)
|
||||
t3 = agate_helper.table_from_rows(
|
||||
[(3, "c", None, 1.5), (4, "d", None, 3.5)], ("a", "b", "c", "d")
|
||||
)
|
||||
|
||||
result = agate_helper.merge_tables([t1, t2])
|
||||
self.assertEqual(result.column_names, ("a", "b", "c"))
|
||||
assert isinstance(result.column_types[0], agate.data_types.Number)
|
||||
self.assertEqual(result.column_names, ("a", "b", "c", "d"))
|
||||
assert isinstance(result.column_types[0], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[1], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[2], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[3], agate_helper.Integer)
|
||||
self.assertEqual(len(result), 4)
|
||||
|
||||
result = agate_helper.merge_tables([t1, t3])
|
||||
self.assertEqual(result.column_names, ("a", "b", "c", "d"))
|
||||
assert isinstance(result.column_types[0], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[1], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[2], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[3], agate.data_types.Number)
|
||||
self.assertEqual(len(result), 4)
|
||||
|
||||
result = agate_helper.merge_tables([t2, t3])
|
||||
self.assertEqual(result.column_names, ("a", "b", "c"))
|
||||
assert isinstance(result.column_types[0], agate.data_types.Number)
|
||||
self.assertEqual(result.column_names, ("a", "b", "c", "d"))
|
||||
assert isinstance(result.column_types[0], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[1], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[2], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[3], agate.data_types.Number)
|
||||
self.assertEqual(len(result), 4)
|
||||
|
||||
result = agate_helper.merge_tables([t3, t2])
|
||||
self.assertEqual(result.column_names, ("a", "b", "c", "d"))
|
||||
assert isinstance(result.column_types[0], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[1], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[2], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[3], agate.data_types.Number)
|
||||
self.assertEqual(len(result), 4)
|
||||
|
||||
result = agate_helper.merge_tables([t1, t2, t3])
|
||||
self.assertEqual(result.column_names, ("a", "b", "c"))
|
||||
assert isinstance(result.column_types[0], agate.data_types.Number)
|
||||
self.assertEqual(result.column_names, ("a", "b", "c", "d"))
|
||||
assert isinstance(result.column_types[0], agate_helper.Integer)
|
||||
assert isinstance(result.column_types[1], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[2], agate.data_types.Text)
|
||||
assert isinstance(result.column_types[3], agate.data_types.Number)
|
||||
self.assertEqual(len(result), 6)
|
||||
|
||||
def test_nocast_string_types(self):
|
||||
@@ -191,7 +216,7 @@ class TestAgateHelper(unittest.TestCase):
|
||||
self.assertEqual(len(tbl), len(result_set))
|
||||
|
||||
assert isinstance(tbl.column_types[0], agate.data_types.Boolean)
|
||||
assert isinstance(tbl.column_types[1], agate.data_types.Number)
|
||||
assert isinstance(tbl.column_types[1], agate_helper.Integer)
|
||||
|
||||
expected = [
|
||||
[True, Decimal(1)],
|
||||
|
||||
@@ -391,3 +391,8 @@ class TestFlags:
|
||||
args_dict = {"which": "some bad command"}
|
||||
with pytest.raises(DbtInternalError, match=r"does not match value of which"):
|
||||
self._create_flags_from_dict(Command.RUN, args_dict)
|
||||
|
||||
def test_from_dict_0_value(self):
|
||||
args_dict = {"log_file_max_bytes": 0}
|
||||
flags = Flags.from_dict(Command.RUN, args_dict)
|
||||
assert flags.LOG_FILE_MAX_BYTES == 0
|
||||
|
||||
@@ -17,3 +17,22 @@ class TestModelNodeArgs:
|
||||
version="1",
|
||||
)
|
||||
assert model_node_args.unique_id == "model.package.name.v1"
|
||||
|
||||
def test_model_node_args_fqn(self) -> None:
|
||||
model_node_args = ModelNodeArgs(
|
||||
name="name",
|
||||
package_name="package",
|
||||
identifier="identifier",
|
||||
schema="schema",
|
||||
)
|
||||
assert model_node_args.fqn == ["package", "name"]
|
||||
|
||||
def test_model_node_args_fqn_with_version(self) -> None:
|
||||
model_node_args = ModelNodeArgs(
|
||||
name="name",
|
||||
package_name="package",
|
||||
identifier="identifier",
|
||||
schema="schema",
|
||||
version="1",
|
||||
)
|
||||
assert model_node_args.fqn == ["package", "name", "v1"]
|
||||
|
||||
@@ -452,6 +452,8 @@ unchanged_nodes = [
|
||||
lambda u: (u, u.replace(alias="other")),
|
||||
lambda u: (u, u.replace(schema="other")),
|
||||
lambda u: (u, u.replace(database="other")),
|
||||
# unchanged ref representations - protected is default
|
||||
lambda u: (u, u.replace(access=AccessType.Protected)),
|
||||
]
|
||||
|
||||
|
||||
@@ -485,6 +487,10 @@ changed_nodes = [
|
||||
lambda u: (u, replace_config(u, alias="other")),
|
||||
lambda u: (u, replace_config(u, schema="other")),
|
||||
lambda u: (u, replace_config(u, database="other")),
|
||||
# changed ref representations
|
||||
lambda u: (u, replace_config(u, access=AccessType.Public)),
|
||||
lambda u: (u, replace_config(u, latest_version=2)),
|
||||
lambda u: (u, replace_config(u, version=2)),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import pickle
|
||||
from datetime import timedelta
|
||||
import pickle
|
||||
import pytest
|
||||
|
||||
from dbt.contracts.graph.unparsed import (
|
||||
UnparsedNode,
|
||||
@@ -940,3 +941,25 @@ class TestUnparsedVersion(ContractTestCase):
|
||||
version = self.get_ok_dict()
|
||||
del version["v"]
|
||||
self.assert_fails_validation(version)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"left,right,expected_lt",
|
||||
[
|
||||
# same types
|
||||
(2, 12, True),
|
||||
(12, 2, False),
|
||||
("a", "b", True),
|
||||
("b", "a", False),
|
||||
# mismatched types - numeric
|
||||
(2, 12.0, True),
|
||||
(12.0, 2, False),
|
||||
(2, "12", True),
|
||||
("12", 2, False),
|
||||
# mismatched types
|
||||
(1, "test", True),
|
||||
("test", 1, False),
|
||||
],
|
||||
)
|
||||
def test_unparsed_version_lt(left, right, expected_lt):
|
||||
assert (UnparsedVersion(left) < UnparsedVersion(right)) == expected_lt
|
||||
|
||||
@@ -6,7 +6,7 @@ from unittest import mock
|
||||
import dbt.deps
|
||||
import dbt.exceptions
|
||||
from dbt.deps.git import GitUnpinnedPackage
|
||||
from dbt.deps.local import LocalUnpinnedPackage
|
||||
from dbt.deps.local import LocalUnpinnedPackage, LocalPinnedPackage
|
||||
from dbt.deps.tarball import TarballUnpinnedPackage
|
||||
from dbt.deps.registry import RegistryUnpinnedPackage
|
||||
from dbt.clients.registry import is_compatible_version
|
||||
@@ -92,6 +92,21 @@ class TestGitPackage(unittest.TestCase):
|
||||
self.assertEqual(a_pinned.source_type(), "git")
|
||||
self.assertIs(a_pinned.warn_unpinned, True)
|
||||
|
||||
@mock.patch("shutil.copytree")
|
||||
@mock.patch("dbt.deps.local.system.make_symlink")
|
||||
@mock.patch("dbt.deps.local.LocalPinnedPackage.get_installation_path")
|
||||
@mock.patch("dbt.deps.local.LocalPinnedPackage.resolve_path")
|
||||
def test_deps_install(
|
||||
self, mock_resolve_path, mock_get_installation_path, mock_symlink, mock_shutil
|
||||
):
|
||||
mock_resolve_path.return_value = "/tmp/source"
|
||||
mock_get_installation_path.return_value = "/tmp/dest"
|
||||
mock_symlink.side_effect = OSError("Install deps symlink error")
|
||||
|
||||
LocalPinnedPackage("local").install("dummy", "dummy")
|
||||
self.assertEqual(mock_shutil.call_count, 1)
|
||||
mock_shutil.assert_called_once_with("/tmp/source", "/tmp/dest")
|
||||
|
||||
def test_invalid(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
GitPackage.validate(
|
||||
|
||||
@@ -2,7 +2,7 @@ import pytest
|
||||
import re
|
||||
from typing import TypeVar
|
||||
|
||||
from dbt.contracts.results import TimingInfo
|
||||
from dbt.contracts.results import TimingInfo, RunResult, RunStatus
|
||||
from dbt.events import AdapterLogger, types
|
||||
from dbt.events.base_types import (
|
||||
BaseEvent,
|
||||
@@ -14,11 +14,15 @@ from dbt.events.base_types import (
|
||||
WarnLevel,
|
||||
msg_from_base_event,
|
||||
)
|
||||
from dbt.events.functions import msg_to_dict, msg_to_json
|
||||
from dbt.events.eventmgr import TestEventManager, EventManager
|
||||
from dbt.events.functions import msg_to_dict, msg_to_json, ctx_set_event_manager
|
||||
from dbt.events.helpers import get_json_string_utcnow
|
||||
from dbt.events.types import RunResultError
|
||||
from dbt.flags import set_from_args
|
||||
from argparse import Namespace
|
||||
|
||||
from dbt.task.printer import print_run_result_error
|
||||
|
||||
set_from_args(Namespace(WARN_ERROR=False), None)
|
||||
|
||||
|
||||
@@ -388,8 +392,6 @@ sample_values = [
|
||||
types.RunResultErrorNoMessage(status=""),
|
||||
types.SQLCompiledPath(path=""),
|
||||
types.CheckNodeTestFailure(relation_name=""),
|
||||
types.FirstRunResultError(msg=""),
|
||||
types.AfterFirstRunResultError(msg=""),
|
||||
types.EndOfRunSummary(num_errors=0, num_warnings=0, keyboard_interrupt=False),
|
||||
types.LogSkipBecauseError(schema="", relation="", index=0, total=0),
|
||||
types.EnsureGitInstalled(),
|
||||
@@ -485,3 +487,34 @@ def test_bad_serialization():
|
||||
str(excinfo.value)
|
||||
== "[Note]: Unable to parse dict {'param_event_doesnt_have': 'This should break'}"
|
||||
)
|
||||
|
||||
|
||||
def test_single_run_error():
|
||||
|
||||
try:
|
||||
# Add a recording event manager to the context, so we can test events.
|
||||
event_mgr = TestEventManager()
|
||||
ctx_set_event_manager(event_mgr)
|
||||
|
||||
error_result = RunResult(
|
||||
status=RunStatus.Error,
|
||||
timing=[],
|
||||
thread_id="",
|
||||
execution_time=0.0,
|
||||
node=None,
|
||||
adapter_response=dict(),
|
||||
message="oh no!",
|
||||
failures=[],
|
||||
)
|
||||
|
||||
print_run_result_error(error_result)
|
||||
events = [e for e in event_mgr.event_history if isinstance(e[0], RunResultError)]
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0][0].msg == "oh no!"
|
||||
|
||||
finally:
|
||||
# Set an empty event manager unconditionally on exit. This is an early
|
||||
# attempt at unit testing events, and we need to think about how it
|
||||
# could be done in a thread safe way in the long run.
|
||||
ctx_set_event_manager(EventManager())
|
||||
|
||||
@@ -24,6 +24,7 @@ from dbt.contracts.graph.nodes import (
|
||||
TestConfig,
|
||||
TestMetadata,
|
||||
ColumnInfo,
|
||||
AccessType,
|
||||
)
|
||||
from dbt.contracts.graph.manifest import Manifest, ManifestMetadata
|
||||
from dbt.contracts.graph.unparsed import ExposureType, Owner
|
||||
@@ -125,7 +126,7 @@ def make_model(
|
||||
checksum=FileHash.from_contents(""),
|
||||
version=version,
|
||||
latest_version=latest_version,
|
||||
access=access,
|
||||
access=access or AccessType.Protected,
|
||||
)
|
||||
|
||||
|
||||
@@ -637,6 +638,21 @@ def versioned_model_v3(seed):
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def versioned_model_v12_string(seed):
|
||||
return make_model(
|
||||
"pkg",
|
||||
"versioned_model",
|
||||
'select * from {{ ref("seed") }}',
|
||||
config_kwargs={"materialized": "table"},
|
||||
refs=[seed],
|
||||
sources=[],
|
||||
path="subdirectory/versioned_model_v12.sql",
|
||||
version="12",
|
||||
latest_version=2,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def versioned_model_v4_nested_dir(seed):
|
||||
return make_model(
|
||||
@@ -731,6 +747,7 @@ def manifest(
|
||||
versioned_model_v2,
|
||||
versioned_model_v3,
|
||||
versioned_model_v4_nested_dir,
|
||||
versioned_model_v12_string,
|
||||
ext_source_2,
|
||||
ext_source_other,
|
||||
ext_source_other_2,
|
||||
@@ -759,6 +776,7 @@ def manifest(
|
||||
versioned_model_v2,
|
||||
versioned_model_v3,
|
||||
versioned_model_v4_nested_dir,
|
||||
versioned_model_v12_string,
|
||||
ext_model,
|
||||
table_id_unique,
|
||||
table_id_not_null,
|
||||
@@ -822,6 +840,7 @@ def test_select_fqn(manifest):
|
||||
"versioned_model.v2",
|
||||
"versioned_model.v3",
|
||||
"versioned_model.v4",
|
||||
"versioned_model.v12",
|
||||
"table_model",
|
||||
"table_model_py",
|
||||
"table_model_csv",
|
||||
@@ -839,6 +858,7 @@ def test_select_fqn(manifest):
|
||||
"versioned_model.v2",
|
||||
"versioned_model.v3",
|
||||
"versioned_model.v4",
|
||||
"versioned_model.v12",
|
||||
}
|
||||
assert search_manifest_using_method(manifest, method, "versioned_model.v1") == {
|
||||
"versioned_model.v1"
|
||||
@@ -1050,6 +1070,7 @@ def test_select_package(manifest):
|
||||
"versioned_model.v2",
|
||||
"versioned_model.v3",
|
||||
"versioned_model.v4",
|
||||
"versioned_model.v12",
|
||||
"table_model",
|
||||
"table_model_py",
|
||||
"table_model_csv",
|
||||
@@ -1102,6 +1123,7 @@ def test_select_config_materialized(manifest):
|
||||
"versioned_model.v2",
|
||||
"versioned_model.v3",
|
||||
"versioned_model.v4",
|
||||
"versioned_model.v12",
|
||||
"mynamespace.union_model",
|
||||
}
|
||||
|
||||
@@ -1188,6 +1210,7 @@ def test_select_version(manifest):
|
||||
assert search_manifest_using_method(manifest, method, "prerelease") == {
|
||||
"versioned_model.v3",
|
||||
"versioned_model.v4",
|
||||
"versioned_model.v12",
|
||||
}
|
||||
assert search_manifest_using_method(manifest, method, "none") == {
|
||||
"table_model_py",
|
||||
|
||||
@@ -612,7 +612,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
my_model_v1_node = MockNode(
|
||||
package="root",
|
||||
package="snowplow",
|
||||
name="arbitrary_file_name",
|
||||
config=mock.MagicMock(enabled=True),
|
||||
refs=[],
|
||||
@@ -621,7 +621,7 @@ class SchemaParserVersionedModels(SchemaParserTest):
|
||||
file_id="snowplow://models/arbitrary_file_name.sql",
|
||||
)
|
||||
my_model_v2_node = MockNode(
|
||||
package="root",
|
||||
package="snowplow",
|
||||
name="my_model_v2",
|
||||
config=mock.MagicMock(enabled=True),
|
||||
refs=[],
|
||||
|
||||
@@ -1,7 +1,22 @@
|
||||
import pytest
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from unittest import mock
|
||||
|
||||
from dbt.plugins import PluginManager, dbtPlugin, dbt_hook
|
||||
from dbt.plugins.manifest import PluginNodes, ModelNodeArgs
|
||||
from dbt.plugins.contracts import PluginArtifacts, PluginArtifact
|
||||
from dbt.plugins.exceptions import dbtPluginError
|
||||
|
||||
|
||||
class ExceptionInitializePlugin(dbtPlugin):
|
||||
def initialize(self) -> None:
|
||||
raise Exception("plugin error message")
|
||||
|
||||
|
||||
class dbtRuntimeErrorInitializePlugin(dbtPlugin):
|
||||
def initialize(self) -> None:
|
||||
raise dbtPluginError("plugin error message")
|
||||
|
||||
|
||||
class GetNodesPlugin(dbtPlugin):
|
||||
@@ -42,6 +57,14 @@ class TestPluginManager:
|
||||
def get_artifacts_plugins(self, get_artifacts_plugin):
|
||||
return [get_artifacts_plugin, GetArtifactsPlugin(project_name="test2")]
|
||||
|
||||
def test_plugin_manager_init_exception(self):
|
||||
with pytest.raises(DbtRuntimeError, match="plugin error message"):
|
||||
PluginManager(plugins=[ExceptionInitializePlugin(project_name="test")])
|
||||
|
||||
def test_plugin_manager_init_plugin_exception(self):
|
||||
with pytest.raises(DbtRuntimeError, match="^Runtime Error\n plugin error message"):
|
||||
PluginManager(plugins=[dbtRuntimeErrorInitializePlugin(project_name="test")])
|
||||
|
||||
def test_plugin_manager_init_single_hook(self, get_nodes_plugin):
|
||||
pm = PluginManager(plugins=[get_nodes_plugin])
|
||||
assert len(pm.hooks) == 1
|
||||
@@ -71,10 +94,21 @@ class TestPluginManager:
|
||||
assert len(pm.hooks["get_manifest_artifacts"]) == 1
|
||||
assert pm.hooks["get_manifest_artifacts"][0] == get_artifacts_plugin.get_manifest_artifacts
|
||||
|
||||
def test_get_nodes(self, get_nodes_plugins):
|
||||
@mock.patch("dbt.tracking")
|
||||
def test_get_nodes(self, tracking, get_nodes_plugins):
|
||||
tracking.active_user = mock.Mock()
|
||||
pm = PluginManager(plugins=get_nodes_plugins)
|
||||
|
||||
nodes = pm.get_nodes()
|
||||
|
||||
assert len(nodes.models) == 2
|
||||
assert tracking.track_plugin_get_nodes.called_once_with(
|
||||
{
|
||||
"plugin_name": get_nodes_plugins[0].name,
|
||||
"num_model_nodes": 2,
|
||||
"num_model_packages": 1,
|
||||
}
|
||||
)
|
||||
|
||||
def test_get_manifest_artifact(self, get_artifacts_plugins):
|
||||
pm = PluginManager(plugins=get_artifacts_plugins)
|
||||
|
||||
5
tox.ini
5
tox.ini
@@ -23,8 +23,11 @@ passenv =
|
||||
DBT_*
|
||||
POSTGRES_TEST_*
|
||||
PYTEST_ADDOPTS
|
||||
DD_SERVICE
|
||||
DD_CIVISIBILITY_AGENTLESS_ENABLED
|
||||
DD_API_KEY
|
||||
DD_SITE
|
||||
DD_ENV
|
||||
DD_SERVICE
|
||||
commands =
|
||||
{envpython} -m pytest --cov=core {posargs} tests/functional -k "not tests/functional/graph_selection"
|
||||
{envpython} -m pytest --cov=core {posargs} tests/functional/graph_selection
|
||||
|
||||
Reference in New Issue
Block a user