forked from repo-mirrors/dbt-core
Compare commits
20 Commits
jerco/upda
...
arky/add-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b29709b4d7 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 |
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Docs
|
||||
body: Fix for column tests not rendering on quoted columns
|
||||
time: 2023-05-31T11:54:19.687363-04:00
|
||||
custom:
|
||||
Author: drewbanin
|
||||
Issue: "201"
|
||||
6
.changes/unreleased/Docs-20230715-200907.yaml
Normal file
6
.changes/unreleased/Docs-20230715-200907.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Corrected spelling of "Partiton"
|
||||
time: 2023-07-15T20:09:07.057361092+02:00
|
||||
custom:
|
||||
Author: pgoslatara
|
||||
Issue: "8100"
|
||||
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
6
.changes/unreleased/Fixes-20230720-122723.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-122723.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add status to Parse Inline Error
|
||||
time: 2023-07-20T12:27:23.085084-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8173"
|
||||
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/unreleased/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Refactor flaky test pp_versioned_models
|
||||
time: 2023-07-19T12:46:11.972481-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7781"
|
||||
6
.changes/unreleased/Under the Hood-20230719-163334.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230719-163334.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: format exception from dbtPlugin.initialize
|
||||
time: 2023-07-19T16:33:34.586377-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "8152"
|
||||
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user_docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance critera
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
66
.github/workflows/main.yml
vendored
66
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -106,23 +111,55 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +202,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -185,6 +224,15 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
integration-report:
|
||||
name: integration test suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "[Notification] Integration test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Integration test suite passes""
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
|
||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -378,16 +378,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +523,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -13,7 +13,7 @@ from uuid import uuid4
|
||||
from dbt.events.format import timestamp_to_datetime_string
|
||||
|
||||
from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg
|
||||
|
||||
import dbt.utils
|
||||
|
||||
# A Filter is a function which takes a BaseEvent and returns True if the event
|
||||
# should be logged, False otherwise.
|
||||
@@ -80,6 +80,7 @@ class LoggerConfig:
|
||||
use_colors: bool = False
|
||||
output_stream: Optional[TextIO] = None
|
||||
output_file_name: Optional[str] = None
|
||||
output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb
|
||||
logger: Optional[Any] = None
|
||||
|
||||
|
||||
@@ -100,7 +101,7 @@ class _Logger:
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
maxBytes=config.output_file_max_bytes, # type: ignore
|
||||
backupCount=5,
|
||||
)
|
||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||
@@ -175,7 +176,7 @@ class _JsonLogger(_Logger):
|
||||
from dbt.events.functions import msg_to_dict
|
||||
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
line = self.scrubber(raw_log_line) # type: ignore
|
||||
return line
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import Callable, Dict, List, Optional, TextIO
|
||||
import uuid
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
|
||||
import dbt.utils
|
||||
|
||||
LOG_VERSION = 3
|
||||
metadata_vars: Optional[Dict[str, str]] = None
|
||||
@@ -67,7 +68,11 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
||||
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logfile_config(
|
||||
log_file, flags.USE_COLORS_FILE, log_file_format, log_level_file
|
||||
log_file,
|
||||
flags.USE_COLORS_FILE,
|
||||
log_file_format,
|
||||
log_level_file,
|
||||
flags.LOG_FILE_MAX_BYTES,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -116,7 +121,11 @@ def _stdout_filter(
|
||||
|
||||
|
||||
def _get_logfile_config(
|
||||
log_path: str, use_colors: bool, line_format: LineFormat, level: EventLevel
|
||||
log_path: str,
|
||||
use_colors: bool,
|
||||
line_format: LineFormat,
|
||||
level: EventLevel,
|
||||
log_file_max_bytes: int,
|
||||
) -> LoggerConfig:
|
||||
return LoggerConfig(
|
||||
name="file_log",
|
||||
@@ -126,6 +135,7 @@ def _get_logfile_config(
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
||||
output_file_name=log_path,
|
||||
output_file_max_bytes=log_file_max_bytes,
|
||||
)
|
||||
|
||||
|
||||
@@ -200,7 +210,7 @@ def stop_capture_stdout_logs():
|
||||
# the message may contain secrets which must be scrubbed at the usage site.
|
||||
def msg_to_json(msg: EventMsg) -> str:
|
||||
msg_dict = msg_to_dict(msg)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
||||
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||
return raw_log_line
|
||||
|
||||
|
||||
|
||||
@@ -63,3 +63,12 @@
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_relations() %}
|
||||
{{ return(adapter.dispatch('get_relations', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_relations() %}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'get_relations macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
{% set day_count = (end_date - start_date).days %}
|
||||
{% if day_count < 0 %}
|
||||
{% set msg -%}
|
||||
Partiton start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
Partition start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||
{%- endset %}
|
||||
|
||||
{{ exceptions.raise_compiler_error(msg, model) }}
|
||||
|
||||
@@ -33,7 +33,12 @@
|
||||
|
||||
-- cleanup
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
@@ -45,7 +45,12 @@
|
||||
-- cleanup
|
||||
-- move the existing view out of the way
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ class MacroParser(BaseParser[Macro]):
|
||||
name: str = macro.name.replace(MACRO_PREFIX, "")
|
||||
node = self.parse_macro(block, base_node, name)
|
||||
# get supported_languages for materialization macro
|
||||
if "materialization" in name:
|
||||
if block.block_type_name == "materialization":
|
||||
node.supported_languages = jinja.get_supported_languages(macro)
|
||||
yield node
|
||||
|
||||
|
||||
@@ -497,12 +497,10 @@ class ModelParser(SimpleSQLParser[ModelNode]):
|
||||
# set refs and sources on the node object
|
||||
refs: List[RefArgs] = []
|
||||
for ref in statically_parsed["refs"]:
|
||||
if len(ref) == 1:
|
||||
package, name = None, ref[0]
|
||||
else:
|
||||
package, name = ref
|
||||
|
||||
refs.append(RefArgs(package=package, name=name))
|
||||
name = ref.get("name")
|
||||
package = ref.get("package")
|
||||
version = ref.get("version")
|
||||
refs.append(RefArgs(name, package, version))
|
||||
|
||||
node.refs += refs
|
||||
node.sources += statically_parsed["sources"]
|
||||
|
||||
@@ -29,8 +29,11 @@ class dbtPlugin:
|
||||
self.project_name = project_name
|
||||
try:
|
||||
self.initialize()
|
||||
except DbtRuntimeError as e:
|
||||
# Remove the first line of DbtRuntimeError to avoid redundant "Runtime Error" line
|
||||
raise DbtRuntimeError("\n".join(str(e).split("\n")[1:]))
|
||||
except Exception as e:
|
||||
raise DbtRuntimeError(f"initialize: {e}")
|
||||
raise DbtRuntimeError(str(e))
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
|
||||
@@ -139,6 +139,7 @@ class CompileTask(GraphRunnableTask):
|
||||
"node_path": "sql/inline_query",
|
||||
"node_name": "inline_query",
|
||||
"unique_id": "sqloperation.test.inline_query",
|
||||
"node_status": "failed",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
1
core/dbt/tests/fixtures/project.py
vendored
1
core/dbt/tests/fixtures/project.py
vendored
@@ -502,6 +502,7 @@ def project(
|
||||
DEBUG=False,
|
||||
LOG_CACHE_EVENTS=False,
|
||||
QUIET=False,
|
||||
LOG_FILE_MAX_BYTES=1000000,
|
||||
)
|
||||
setup_event_logger(log_flags)
|
||||
orig_cwd = os.getcwd()
|
||||
|
||||
@@ -16,9 +16,8 @@ import time
|
||||
from pathlib import PosixPath, WindowsPath
|
||||
|
||||
from contextlib import contextmanager
|
||||
from dbt.exceptions import ConnectionError, DuplicateAliasError
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import RetryExternalCall, RecordRetryException
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
from dbt import flags
|
||||
from enum import Enum
|
||||
from typing_extensions import Protocol
|
||||
@@ -40,6 +39,7 @@ from typing import (
|
||||
Sequence,
|
||||
)
|
||||
|
||||
import dbt.events.functions
|
||||
import dbt.exceptions
|
||||
|
||||
DECIMALS: Tuple[Type[Any], ...]
|
||||
@@ -337,15 +337,18 @@ class JSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, DECIMALS):
|
||||
return float(obj)
|
||||
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
||||
elif isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, jinja2.Undefined):
|
||||
elif isinstance(obj, jinja2.Undefined):
|
||||
return ""
|
||||
if hasattr(obj, "to_dict"):
|
||||
elif isinstance(obj, Exception):
|
||||
return repr(obj)
|
||||
elif hasattr(obj, "to_dict"):
|
||||
# if we have a to_dict we should try to serialize the result of
|
||||
# that!
|
||||
return obj.to_dict(omit_none=True)
|
||||
return super().default(obj)
|
||||
else:
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
class ForgivingJSONEncoder(JSONEncoder):
|
||||
@@ -369,7 +372,7 @@ class Translator:
|
||||
for key, value in kwargs.items():
|
||||
canonical_key = self.aliases.get(key, key)
|
||||
if canonical_key in result:
|
||||
raise DuplicateAliasError(kwargs, self.aliases, canonical_key)
|
||||
raise dbt.exceptions.DuplicateAliasError(kwargs, self.aliases, canonical_key)
|
||||
result[canonical_key] = self.translate_value(value)
|
||||
return result
|
||||
|
||||
@@ -389,9 +392,7 @@ class Translator:
|
||||
return self.translate_mapping(value)
|
||||
except RuntimeError as exc:
|
||||
if "maximum recursion depth exceeded" in str(exc):
|
||||
raise dbt.exceptions.RecursionError(
|
||||
"Cycle detected in a value passed to translate!"
|
||||
)
|
||||
raise RecursionError("Cycle detected in a value passed to translate!")
|
||||
raise
|
||||
|
||||
|
||||
@@ -601,14 +602,17 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0):
|
||||
except (
|
||||
requests.exceptions.RequestException,
|
||||
ReadError,
|
||||
EOFError,
|
||||
) as exc:
|
||||
if attempt <= max_attempts - 1:
|
||||
fire_event(RecordRetryException(exc=str(exc)))
|
||||
fire_event(RetryExternalCall(attempt=attempt, max=max_attempts))
|
||||
dbt.events.functions.fire_event(RecordRetryException(exc=str(exc)))
|
||||
dbt.events.functions.fire_event(RetryExternalCall(attempt=attempt, max=max_attempts))
|
||||
time.sleep(1)
|
||||
return _connection_exception_retry(fn, max_attempts, attempt + 1)
|
||||
else:
|
||||
raise ConnectionError("External connection exception occurred: " + str(exc))
|
||||
raise dbt.exceptions.ConnectionError(
|
||||
"External connection exception occurred: " + str(exc)
|
||||
)
|
||||
|
||||
|
||||
# This is used to serialize the args in the run_results and in the logs.
|
||||
@@ -652,6 +656,9 @@ def args_to_dict(args):
|
||||
# this was required for a test case
|
||||
if isinstance(var_args[key], PosixPath) or isinstance(var_args[key], WindowsPath):
|
||||
var_args[key] = str(var_args[key])
|
||||
if isinstance(var_args[key], WarnErrorOptions):
|
||||
var_args[key] = var_args[key].to_dict()
|
||||
|
||||
dict_args[key] = var_args[key]
|
||||
return dict_args
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ setup(
|
||||
"sqlparse>=0.2.3",
|
||||
# ----
|
||||
# These are major-version-0 packages also maintained by dbt-labs. Accept patches.
|
||||
"dbt-extractor~=0.4.1",
|
||||
"dbt-extractor~=0.5.0",
|
||||
"hologram~=0.0.16", # includes transitive dependencies on python-dateutil and jsonschema
|
||||
"minimal-snowplow-tracker~=0.0.2",
|
||||
# DSI is under active development, so we're pinning to specific dev versions for now.
|
||||
|
||||
@@ -6,7 +6,7 @@ flake8
|
||||
flaky
|
||||
freezegun==0.3.12
|
||||
ipdb
|
||||
mypy==1.3.0
|
||||
mypy==1.4.0
|
||||
pip-tools
|
||||
pre-commit
|
||||
protobuf>=4.0.0
|
||||
@@ -16,7 +16,9 @@ pytest-csv
|
||||
pytest-dotenv
|
||||
pytest-logbook
|
||||
pytest-mock
|
||||
pytest-split
|
||||
pytest-xdist
|
||||
python-dev-tools
|
||||
sphinx
|
||||
tox>=3.13
|
||||
twine
|
||||
|
||||
@@ -20,8 +20,7 @@ from dbt.exceptions import (
|
||||
import dbt.utils
|
||||
|
||||
|
||||
# note that this isn't an adapter macro, so just a single underscore
|
||||
GET_RELATIONS_MACRO_NAME = "postgres_get_relations"
|
||||
GET_RELATIONS_MACRO_NAME = "postgres__get_relations"
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{% macro postgres_get_relations () -%}
|
||||
{% macro postgres__get_relations() -%}
|
||||
|
||||
{#
|
||||
-- in pg_depend, objid is the dependent, refobjid is the referenced object
|
||||
@@ -74,3 +74,7 @@
|
||||
|
||||
{{ return(load_result('relations').table) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro postgres_get_relations() %}
|
||||
{{ return(postgres__get_relations()) }}
|
||||
{% endmacro %}
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -4,6 +4,12 @@ models__dep_macro = """
|
||||
}}
|
||||
"""
|
||||
|
||||
models__materialization_macro = """
|
||||
{{
|
||||
materialization_macro()
|
||||
}}
|
||||
"""
|
||||
|
||||
models__with_undefined_macro = """
|
||||
{{ dispatch_to_nowhere() }}
|
||||
select 1 as id
|
||||
@@ -75,6 +81,12 @@ macros__my_macros = """
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
macros__named_materialization = """
|
||||
{% macro materialization_macro() %}
|
||||
select 1 as foo
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
macros__no_default_macros = """
|
||||
{% macro do_something2(foo2, bar2) %}
|
||||
|
||||
|
||||
@@ -20,12 +20,14 @@ from tests.functional.macros.fixtures import (
|
||||
models__override_get_columns_macros,
|
||||
models__deprecated_adapter_macro_model,
|
||||
models__incorrect_dispatch,
|
||||
models__materialization_macro,
|
||||
macros__my_macros,
|
||||
macros__no_default_macros,
|
||||
macros__override_get_columns_macros,
|
||||
macros__package_override_get_columns_macros,
|
||||
macros__deprecated_adapter_macro,
|
||||
macros__incorrect_dispatch,
|
||||
macros__named_materialization,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,6 +80,21 @@ class TestMacros:
|
||||
check_relations_equal(project.adapter, ["expected_local_macro", "local_macro"])
|
||||
|
||||
|
||||
class TestMacrosNamedMaterialization:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"models_materialization_macro.sql": models__materialization_macro,
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def macros(self):
|
||||
return {"macros_named_materialization.sql": macros__named_materialization}
|
||||
|
||||
def test_macro_with_materialization_in_name_works(self, project):
|
||||
run_dbt(expect_pass=True)
|
||||
|
||||
|
||||
class TestInvalidMacros:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
@@ -8,9 +8,6 @@ from tests.functional.partial_parsing.fixtures import (
|
||||
models_schema1_yml,
|
||||
models_schema2_yml,
|
||||
models_schema2b_yml,
|
||||
models_versions_schema_yml,
|
||||
models_versions_defined_in_schema_yml,
|
||||
models_versions_updated_schema_yml,
|
||||
model_three_sql,
|
||||
model_three_modified_sql,
|
||||
model_four1_sql,
|
||||
@@ -71,7 +68,7 @@ from tests.functional.partial_parsing.fixtures import (
|
||||
groups_schema_yml_two_groups_private_orders_invalid_access,
|
||||
)
|
||||
|
||||
from dbt.exceptions import CompilationError, ParsingError, DuplicateVersionedUnversionedError
|
||||
from dbt.exceptions import CompilationError, ParsingError
|
||||
from dbt.contracts.files import ParseFileType
|
||||
from dbt.contracts.results import TestStatus
|
||||
|
||||
@@ -303,72 +300,6 @@ class TestModels:
|
||||
assert model_id not in manifest.disabled
|
||||
|
||||
|
||||
class TestVersionedModels:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model_one_v1.sql": model_one_sql,
|
||||
"model_one.sql": model_one_sql,
|
||||
"model_one_downstream.sql": model_four2_sql,
|
||||
"schema.yml": models_versions_schema_yml,
|
||||
}
|
||||
|
||||
def test_pp_versioned_models(self, project):
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert not model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"]
|
||||
|
||||
# update schema.yml block - model_one is now 'defined_in: model_one_different'
|
||||
rm_file(project.project_root, "models", "model_one.sql")
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one_different.sql")
|
||||
write_file(
|
||||
models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
|
||||
# update versions schema.yml block - latest_version from 2 to 1
|
||||
write_file(
|
||||
models_versions_updated_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
results, log_output = run_dbt_and_capture(
|
||||
["--partial-parse", "--log-format", "json", "run"]
|
||||
)
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert not model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"]
|
||||
# assert unpinned ref to latest-not-max version yields an "FYI" info-level log
|
||||
assert "UnpinnedRefNewVersionAvailable" in log_output
|
||||
|
||||
# update versioned model
|
||||
write_file(model_two_sql, project.project_root, "models", "model_one_different.sql")
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert len(manifest.nodes) == 3
|
||||
print(f"--- nodes: {manifest.nodes.keys()}")
|
||||
|
||||
# create a new model_one in model_one.sql and re-parse
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one.sql")
|
||||
with pytest.raises(DuplicateVersionedUnversionedError):
|
||||
run_dbt(["parse"])
|
||||
|
||||
|
||||
class TestSources:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
126
tests/functional/partial_parsing/test_versioned_models.py
Normal file
126
tests/functional/partial_parsing/test_versioned_models.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import pytest
|
||||
import pathlib
|
||||
from dbt.tests.util import (
|
||||
run_dbt,
|
||||
get_manifest,
|
||||
write_file,
|
||||
rm_file,
|
||||
read_file,
|
||||
)
|
||||
from dbt.exceptions import DuplicateVersionedUnversionedError
|
||||
|
||||
model_one_sql = """
|
||||
select 1 as fun
|
||||
"""
|
||||
|
||||
model_one_downstream_sql = """
|
||||
select fun from {{ ref('model_one') }}
|
||||
"""
|
||||
|
||||
models_versions_schema_yml = """
|
||||
|
||||
models:
|
||||
- name: model_one
|
||||
description: "The first model"
|
||||
versions:
|
||||
- v: 1
|
||||
- v: 2
|
||||
"""
|
||||
|
||||
models_versions_defined_in_schema_yml = """
|
||||
models:
|
||||
- name: model_one
|
||||
description: "The first model"
|
||||
versions:
|
||||
- v: 1
|
||||
- v: 2
|
||||
defined_in: model_one_different
|
||||
"""
|
||||
|
||||
models_versions_updated_schema_yml = """
|
||||
models:
|
||||
- name: model_one
|
||||
latest_version: 1
|
||||
description: "The first model"
|
||||
versions:
|
||||
- v: 1
|
||||
- v: 2
|
||||
defined_in: model_one_different
|
||||
"""
|
||||
|
||||
model_two_sql = """
|
||||
select 1 as notfun
|
||||
"""
|
||||
|
||||
|
||||
class TestVersionedModels:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"model_one_v1.sql": model_one_sql,
|
||||
"model_one.sql": model_one_sql,
|
||||
"model_one_downstream.sql": model_one_downstream_sql,
|
||||
"schema.yml": models_versions_schema_yml,
|
||||
}
|
||||
|
||||
def test_pp_versioned_models(self, project):
|
||||
results = run_dbt(["run"])
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert not model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"]
|
||||
|
||||
# update schema.yml block - model_one is now 'defined_in: model_one_different'
|
||||
rm_file(project.project_root, "models", "model_one.sql")
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one_different.sql")
|
||||
write_file(
|
||||
models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
|
||||
# update versions schema.yml block - latest_version from 2 to 1
|
||||
write_file(
|
||||
models_versions_updated_schema_yml, project.project_root, "models", "schema.yml"
|
||||
)
|
||||
# This is where the test was failings in a CI run with:
|
||||
# relation \"test..._test_partial_parsing.model_one_downstream\" does not exist
|
||||
# because in core/dbt/include/global_project/macros/materializations/models/view/view.sql
|
||||
# "existing_relation" didn't actually exist by the time it gets to the rename of the
|
||||
# existing relation.
|
||||
(pathlib.Path(project.project_root) / "log_output").mkdir(parents=True, exist_ok=True)
|
||||
results = run_dbt(
|
||||
["--partial-parse", "--log-format-file", "json", "--log-path", "log_output", "run"]
|
||||
)
|
||||
assert len(results) == 3
|
||||
|
||||
manifest = get_manifest(project.project_root)
|
||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||
assert model_one_node.is_latest_version
|
||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||
assert not model_two_node.is_latest_version
|
||||
# assert unpinned ref points to latest version
|
||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"]
|
||||
|
||||
# assert unpinned ref to latest-not-max version yields an "FYI" info-level log
|
||||
log_output = read_file("log_output", "dbt.log").replace("\n", " ").replace("\\n", " ")
|
||||
assert "UnpinnedRefNewVersionAvailable" in log_output
|
||||
|
||||
# update versioned model
|
||||
write_file(model_two_sql, project.project_root, "models", "model_one_different.sql")
|
||||
results = run_dbt(["--partial-parse", "run"])
|
||||
assert len(results) == 3
|
||||
manifest = get_manifest(project.project_root)
|
||||
assert len(manifest.nodes) == 3
|
||||
|
||||
# create a new model_one in model_one.sql and re-parse
|
||||
write_file(model_one_sql, project.project_root, "models", "model_one.sql")
|
||||
with pytest.raises(DuplicateVersionedUnversionedError):
|
||||
run_dbt(["parse"])
|
||||
@@ -57,6 +57,11 @@ class TestFlags:
|
||||
assert hasattr(flags, "LOG_PATH")
|
||||
assert getattr(flags, "LOG_PATH") == Path("logs")
|
||||
|
||||
def test_log_file_max_size_default(self, run_context):
|
||||
flags = Flags(run_context)
|
||||
assert hasattr(flags, "LOG_FILE_MAX_BYTES")
|
||||
assert getattr(flags, "LOG_FILE_MAX_BYTES") == 10 * 1024 * 1024
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"set_stats_param,do_not_track,expected_anonymous_usage_stats",
|
||||
[
|
||||
|
||||
@@ -424,6 +424,9 @@ def test_invocation_args_to_dict_in_macro_runtime_context(
|
||||
# Comes from unit/utils.py config_from_parts_or_dicts method
|
||||
assert ctx["invocation_args_dict"]["profile_dir"] == "/dev/null"
|
||||
|
||||
assert isinstance(ctx["invocation_args_dict"]["warn_error_options"], Dict)
|
||||
assert ctx["invocation_args_dict"]["warn_error_options"] == {"include": [], "exclude": []}
|
||||
|
||||
|
||||
def test_model_parse_context(config_postgres, manifest_fx, get_adapter, get_include_paths):
|
||||
ctx = providers.generate_parser_model_context(
|
||||
|
||||
@@ -28,6 +28,11 @@ class TestCoreDbtUtils(unittest.TestCase):
|
||||
connection_exception_retry(lambda: Counter._add_with_untar_exception(), 5)
|
||||
self.assertEqual(2, counter) # 2 = original attempt returned ReadError, plus 1 retry
|
||||
|
||||
def test_connection_exception_retry_success_failed_eofexception(self):
|
||||
Counter._reset()
|
||||
connection_exception_retry(lambda: Counter._add_with_eof_exception(), 5)
|
||||
self.assertEqual(2, counter) # 2 = original attempt returned EOFError, plus 1 retry
|
||||
|
||||
|
||||
counter: int = 0
|
||||
|
||||
@@ -57,6 +62,12 @@ class Counter:
|
||||
if counter < 2:
|
||||
raise tarfile.ReadError
|
||||
|
||||
def _add_with_eof_exception():
|
||||
global counter
|
||||
counter += 1
|
||||
if counter < 2:
|
||||
raise EOFError
|
||||
|
||||
def _reset():
|
||||
global counter
|
||||
counter = 0
|
||||
|
||||
@@ -2,7 +2,7 @@ from argparse import Namespace
|
||||
import pytest
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.events.functions import msg_to_dict, warn_or_error
|
||||
from dbt.events.functions import msg_to_dict, warn_or_error, setup_event_logger
|
||||
from dbt.events.types import InfoLevel, NoNodesForSelectionCriteria
|
||||
from dbt.exceptions import EventCompilationError
|
||||
|
||||
@@ -59,3 +59,13 @@ def test_msg_to_dict_handles_exceptions_gracefully():
|
||||
assert (
|
||||
False
|
||||
), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}"
|
||||
|
||||
|
||||
def test_setup_event_logger_specify_max_bytes(mocker):
|
||||
patched_file_handler = mocker.patch("dbt.events.eventmgr.RotatingFileHandler")
|
||||
args = Namespace(log_file_max_bytes=1234567)
|
||||
flags.set_from_args(args, {})
|
||||
setup_event_logger(flags.get_flags())
|
||||
patched_file_handler.assert_called_once_with(
|
||||
filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5
|
||||
)
|
||||
|
||||
@@ -18,6 +18,7 @@ from dbt import tracking
|
||||
from dbt.contracts.files import SourceFile, FileHash, FilePath
|
||||
from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck
|
||||
from dbt.graph import NodeSelector, parse_difference
|
||||
from dbt.events.functions import setup_event_logger
|
||||
|
||||
try:
|
||||
from queue import Empty
|
||||
@@ -140,6 +141,7 @@ class GraphTest(unittest.TestCase):
|
||||
|
||||
config = config_from_parts_or_dicts(project=cfg, profile=self.profile)
|
||||
dbt.flags.set_from_args(Namespace(), config)
|
||||
setup_event_logger(dbt.flags.get_flags())
|
||||
object.__setattr__(dbt.flags.get_flags(), "PARTIAL_PARSE", False)
|
||||
return config
|
||||
|
||||
|
||||
@@ -1,7 +1,20 @@
|
||||
import pytest
|
||||
|
||||
from dbt.exceptions import DbtRuntimeError
|
||||
from dbt.plugins import PluginManager, dbtPlugin, dbt_hook
|
||||
from dbt.plugins.manifest import PluginNodes, ModelNodeArgs
|
||||
from dbt.plugins.contracts import PluginArtifacts, PluginArtifact
|
||||
from dbt.plugins.exceptions import dbtPluginError
|
||||
|
||||
|
||||
class ExceptionInitializePlugin(dbtPlugin):
|
||||
def initialize(self) -> None:
|
||||
raise Exception("plugin error message")
|
||||
|
||||
|
||||
class dbtRuntimeErrorInitializePlugin(dbtPlugin):
|
||||
def initialize(self) -> None:
|
||||
raise dbtPluginError("plugin error message")
|
||||
|
||||
|
||||
class GetNodesPlugin(dbtPlugin):
|
||||
@@ -42,6 +55,14 @@ class TestPluginManager:
|
||||
def get_artifacts_plugins(self, get_artifacts_plugin):
|
||||
return [get_artifacts_plugin, GetArtifactsPlugin(project_name="test2")]
|
||||
|
||||
def test_plugin_manager_init_exception(self):
|
||||
with pytest.raises(DbtRuntimeError, match="plugin error message"):
|
||||
PluginManager(plugins=[ExceptionInitializePlugin(project_name="test")])
|
||||
|
||||
def test_plugin_manager_init_plugin_exception(self):
|
||||
with pytest.raises(DbtRuntimeError, match="^Runtime Error\n plugin error message"):
|
||||
PluginManager(plugins=[dbtRuntimeErrorInitializePlugin(project_name="test")])
|
||||
|
||||
def test_plugin_manager_init_single_hook(self, get_nodes_plugin):
|
||||
pm = PluginManager(plugins=[get_nodes_plugin])
|
||||
assert len(pm.hooks) == 1
|
||||
|
||||
Reference in New Issue
Block a user